diff --git a/.dockerignore b/.dockerignore index 130ca4c2c..7cae75457 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,6 +1,6 @@ .venv/ **/aws -# node_modules +node_modules **/node_modules/ dist/ **/build/ diff --git a/.env.example b/.env.example index 29a7259b0..26f6e3a29 100644 --- a/.env.example +++ b/.env.example @@ -56,6 +56,13 @@ LANGFLOW_REMOVE_API_KEYS= # LANGFLOW_REDIS_CACHE_EXPIRE (default: 3600) LANGFLOW_CACHE_TYPE= + +# Set AUTO_LOGIN to false if you want to disable auto login +# and use the login form to login. LANGFLOW_SUPERUSER and LANGFLOW_SUPERUSER_PASSWORD +# must be set if AUTO_LOGIN is set to false +# Values: true, false +LANGFLOW_AUTO_LOGIN= + # Superuser username # Example: LANGFLOW_SUPERUSER=admin LANGFLOW_SUPERUSER= diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index d4eaafb95..c78ba4c71 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -14,7 +14,7 @@ on: - "src/backend/**" env: - POETRY_VERSION: "1.7.0" + POETRY_VERSION: "1.8.2" jobs: lint: @@ -22,7 +22,6 @@ jobs: strategy: matrix: python-version: - - "3.9" - "3.10" - "3.11" steps: @@ -32,12 +31,15 @@ jobs: pipx install poetry==$POETRY_VERSION - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 + id: setup-python with: python-version: ${{ matrix.python-version }} cache: poetry - - name: Install dependencies + - name: Install Python dependencies run: | + poetry env use ${{ matrix.python-version }} poetry install + if: ${{ steps.setup-python.outputs.cache-hit != 'true' }} - name: Analysing the code with our lint run: | make lint diff --git a/.github/workflows/pre-release.yml b/.github/workflows/pre-release-base.yml similarity index 86% rename from .github/workflows/pre-release.yml rename to .github/workflows/pre-release-base.yml index 58bdd219e..60872def3 100644 --- a/.github/workflows/pre-release.yml +++ b/.github/workflows/pre-release-base.yml @@ -1,4 +1,4 @@ -name: pre-release +name: Langflow Base Pre-release on: pull_request: @@ -11,7 +11,7 @@ on: workflow_dispatch: env: - POETRY_VERSION: "1.5.1" + POETRY_VERSION: "1.8.2" jobs: if_release: @@ -27,7 +27,7 @@ jobs: python-version: "3.10" cache: "poetry" - name: Build project for distribution - run: make build + run: make build base=true - name: Check Version id: check-version run: | @@ -46,7 +46,7 @@ jobs: env: POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }} run: | - poetry publish + poetry publish base=true - name: Set up QEMU uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx @@ -61,5 +61,6 @@ jobs: with: context: . push: true - file: ./build_and_push.Dockerfile - tags: logspace/langflow:${{ steps.check-version.outputs.version }} + file: ./build_and_push_base.Dockerfile + tags: | + logspace/langflow:base-${{ steps.check-version.outputs.version }} diff --git a/.github/workflows/pre-release-langflow.yml b/.github/workflows/pre-release-langflow.yml new file mode 100644 index 000000000..57f3b3e1a --- /dev/null +++ b/.github/workflows/pre-release-langflow.yml @@ -0,0 +1,70 @@ +name: Langflow Pre-release + +on: + pull_request: + types: + - closed + branches: + - dev + paths: + - "pyproject.toml" + workflow_dispatch: + workflow_run: + workflows: ["pre-release-base"] + types: [completed] + branches: [dev] + +env: + POETRY_VERSION: "1.8.2" + +jobs: + if_release: + if: ${{ (github.event.pull_request.merged == true) && contains(github.event.pull_request.labels.*.name, 'pre-release') }} + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Install poetry + run: pipx install poetry==$POETRY_VERSION + - name: Set up Python 3.10 + uses: actions/setup-python@v5 + with: + python-version: "3.10" + cache: "poetry" + - name: Build project for distribution + run: make build main=true + - name: Check Version + id: check-version + run: | + echo version=$(poetry version --short) >> $GITHUB_OUTPUT + - name: Create Release + uses: ncipollo/release-action@v1 + with: + artifacts: "dist/*" + token: ${{ secrets.GITHUB_TOKEN }} + draft: false + generateReleaseNotes: true + prerelease: true + tag: v${{ steps.check-version.outputs.version }} + commit: dev + - name: Publish to PyPI + env: + POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }} + run: | + poetry publish main=true + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + - name: Build and push + uses: docker/build-push-action@v5 + with: + context: . + push: true + file: ./build_and_push.Dockerfile + tags: | + logspace/langflow:${{ steps.check-version.outputs.version }} diff --git a/.github/workflows/test.yml b/.github/workflows/python_test.yml similarity index 76% rename from .github/workflows/test.yml rename to .github/workflows/python_test.yml index 10ab9b324..a04a208f3 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/python_test.yml @@ -15,7 +15,7 @@ on: - "src/backend/**" env: - POETRY_VERSION: "1.5.0" + POETRY_VERSION: "1.8.2" jobs: build: @@ -33,11 +33,15 @@ jobs: run: pipx install poetry==$POETRY_VERSION - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 + id: setup-python with: python-version: ${{ matrix.python-version }} cache: "poetry" - - name: Install dependencies - run: poetry install + - name: Install Python dependencies + run: | + poetry env use ${{ matrix.python-version }} + poetry install + if: ${{ steps.setup-python.outputs.cache-hit != 'true' }} - name: Run unit tests run: | make tests diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 21d8d27eb..8004618f6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -10,7 +10,7 @@ on: - "pyproject.toml" env: - POETRY_VERSION: "1.5.1" + POETRY_VERSION: "1.8.2" jobs: if_release: diff --git a/.github/workflows/typescript_test.yml b/.github/workflows/typescript_test.yml new file mode 100644 index 000000000..a75327785 --- /dev/null +++ b/.github/workflows/typescript_test.yml @@ -0,0 +1,149 @@ +name: Run Frontend Tests + +on: + pull_request: + paths: + - "src/frontend/**" + +env: + POETRY_VERSION: "1.8.2" + NODE_VERSION: "21" + PYTHON_VERSION: "3.10" + # Define the directory where Playwright browsers will be installed. + # Adjust if your project uses a different path. + PLAYWRIGHT_BROWSERS_PATH: "ms-playwright" + +jobs: + setup-and-test: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + shardIndex: [1, 2, 3, 4] + shardTotal: [4] + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v3 + id: setup-node + with: + node-version: ${{ env.NODE_VERSION }} + cache: "npm" + + - name: Install Node.js dependencies + run: | + cd src/frontend + npm ci + if: ${{ steps.setup-node.outputs.cache-hit != 'true' }} + + # Attempt to restore the correct Playwright browser binaries based on the + # currently installed version of Playwright (The browser binary versions + # may change with Playwright versions). + # Note: Playwright's cache directory is hard coded because that's what it + # says to do in the docs. There doesn't appear to be a command that prints + # it out for us. + # - uses: actions/cache@v4 + # id: playwright-cache + # with: + # path: ${{ env.PLAYWRIGHT_BROWSERS_PATH }} + # key: "${{ runner.os }}-playwright-${{ hashFiles('src/frontend/package-lock.json') }}" + # # As a fallback, if the Playwright version has changed, try use the + # # most recently cached version. There's a good chance that at least one + # # of the browser binary versions haven't been updated, so Playwright can + # # skip installing that in the next step. + # # Note: When falling back to an old cache, `cache-hit` (used below) + # # will be `false`. This allows us to restore the potentially out of + # # date cache, but still let Playwright decide if it needs to download + # # new binaries or not. + # restore-keys: | + # ${{ runner.os }}-playwright- + - name: Cache playwright binaries + uses: actions/cache@v4 + id: playwright-cache + with: + path: | + ~/.cache/ms-playwright + key: ${{ runner.os }}-playwright-${{ hashFiles('src/frontend/package-lock.json') }} + - name: Install Frontend dependencies + run: | + cd src/frontend + npm ci + + - name: Install Playwright's browser binaries + run: | + cd src/frontend + npx playwright install --with-deps + if: steps.playwright-cache.outputs.cache-hit != 'true' + - name: Install Playwright's dependencies + run: | + cd src/frontend + npx playwright install-deps + if: steps.playwright-cache.outputs.cache-hit != 'true' + + # If the Playwright browser binaries weren't able to be restored, we tell + # paywright to install everything for us. + # - name: Install Playwright's dependencies + # if: steps.playwright-cache.outputs.cache-hit != 'true' + # run: npx playwright install --with-deps + + - name: Install Poetry + run: pipx install "poetry==${{ env.POETRY_VERSION }}" + + - name: Set up Python + uses: actions/setup-python@v5 + id: setup-python + with: + python-version: ${{ env.PYTHON_VERSION }} + cache: "poetry" + + - name: Install Python dependencies + run: | + poetry env use ${{ env.PYTHON_VERSION }} + poetry install + if: ${{ steps.setup-python.outputs.cache-hit != 'true' }} + + - name: Run Playwright Tests + run: | + cd src/frontend + npx playwright test --shard=${{ matrix.shardIndex }}/${{ matrix.shardTotal }} + + - name: Upload blob report to GitHub Actions Artifacts + if: always() + uses: actions/upload-artifact@v4 + with: + name: blob-report-${{ matrix.shardIndex }} + path: src/frontend/blob-report + retention-days: 1 + + merge-reports: + needs: setup-and-test + runs-on: ubuntu-latest + if: always() + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: ${{ env.NODE_VERSION }} + + - name: Download blob reports from GitHub Actions Artifacts + uses: actions/download-artifact@v4 + with: + path: all-blob-reports + pattern: blob-report-* + merge-multiple: true + + - name: Merge into HTML Report + run: | + npx playwright merge-reports --reporter html ./all-blob-reports + + - name: Upload HTML report + uses: actions/upload-artifact@v4 + with: + name: html-report--attempt-${{ github.run_attempt }} + path: playwright-report + retention-days: 14 diff --git a/.gitignore b/.gitignore index 744817491..0d74c2208 100644 --- a/.gitignore +++ b/.gitignore @@ -258,5 +258,10 @@ langflow.db /tmp/* src/backend/langflow/frontend/ +src/backend/base/langflow/frontend/ .docker scratchpad* +chroma*/* +stuff/* +src/frontend/playwright-report/index.html +*.bak \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json index a8229b155..962599449 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -13,10 +13,32 @@ "7860", "--reload", "--log-level", - "debug" + "debug", + "--loop", + "asyncio" ], "jinja": true, - "justMyCode": true, + "justMyCode": false, + "env": { + "LANGFLOW_LOG_LEVEL": "debug" + }, + "envFile": "${workspaceFolder}/.env" + }, + { + "name": "Debug CLI", + "type": "python", + "request": "launch", + "module": "langflow", + "args": [ + "run", + "--path", + "${workspaceFolder}/src/backend/langflow/frontend" + ], + "jinja": true, + "justMyCode": false, + "env": { + "LANGFLOW_LOG_LEVEL": "debug" + }, "envFile": "${workspaceFolder}/.env" }, { diff --git a/Makefile b/Makefile index 7f0f998ae..e5d886652 100644 --- a/Makefile +++ b/Makefile @@ -1,12 +1,31 @@ .PHONY: all init format lint build build_frontend install_frontend run_frontend run_backend dev help tests coverage all: help +log_level ?= debug +host ?= 0.0.0.0 +port ?= 7860 +env ?= .env +open_browser ?= true +path = src/backend/base/langflow/frontend + +setup_poetry: + pipx install poetry + +add: + @echo 'Adding dependencies' +ifdef devel + cd src/backend/base && poetry add --group dev $(devel) +endif + +ifdef main + poetry add $(main) +endif + +ifdef base + cd src/backend/base && poetry add $(base) +endif init: - @echo 'Installing pre-commit hooks' - git config core.hooksPath .githooks - @echo 'Making pre-commit hook executable' - chmod +x .githooks/pre-commit @echo 'Installing backend dependencies' make install_backend @echo 'Installing frontend dependencies' @@ -32,12 +51,15 @@ format: lint: make install_backend - poetry run mypy src/backend/langflow + poetry run mypy --namespace-packages -p "langflow" poetry run ruff . --fix install_frontend: cd src/frontend && npm install +install_frontendci: + cd src/frontend && npm ci + install_frontendc: cd src/frontend && rm -rf node_modules package-lock.json && npm install @@ -47,22 +69,57 @@ run_frontend: tests_frontend: ifeq ($(UI), true) - cd src/frontend && ./run-tests.sh --ui + cd src/frontend && npx playwright test --ui --project=chromium else - cd src/frontend && ./run-tests.sh + cd src/frontend && npx playwright test --project=chromium endif run_cli: - poetry run langflow run --path src/frontend/build + @echo 'Running the CLI' + @make install_frontend > /dev/null + @echo 'Install backend dependencies' + @make install_backend > /dev/null + @echo 'Building the frontend' + @make build_frontend > /dev/null +ifdef env + @make start env=$(env) host=$(host) port=$(port) log_level=$(log_level) +else + @make start host=$(host) port=$(port) log_level=$(log_level) +endif run_cli_debug: - poetry run langflow run --path src/frontend/build --log-level debug + @echo 'Running the CLI in debug mode' + @make install_frontend > /dev/null + @echo 'Building the frontend' + @make build_frontend > /dev/null + @echo 'Install backend dependencies' + @make install_backend > /dev/null +ifdef env + @make start env=$(env) host=$(host) port=$(port) log_level=debug +else + @make start host=$(host) port=$(port) log_level=debug +endif + +start: + @echo 'Running the CLI' + +ifeq ($(open_browser),false) + @make install_backend && poetry run langflow run --path $(path) --log-level $(log_level) --host $(host) --port $(port) --env-file $(env) --no-open-browser +else + @make install_backend && poetry run langflow run --path $(path) --log-level $(log_level) --host $(host) --port $(port) --env-file $(env) +endif + + setup_devcontainer: make init make build_frontend poetry run langflow --path src/frontend/build +setup_env: + @sh ./scripts/setup/update_poetry.sh 1.8.2 + @sh ./scripts/setup/setup_env.sh + frontend: make install_frontend make run_frontend @@ -72,38 +129,67 @@ frontendc: make run_frontend install_backend: - poetry install --extras deploy + @echo 'Setting up the environment' + @make setup_env + @echo 'Installing backend dependencies' + @poetry install --extras deploy backend: make install_backend @-kill -9 `lsof -t -i:7860` -ifeq ($(login),1) - @echo "Running backend without autologin"; - poetry run langflow run --backend-only --port 7860 --host 0.0.0.0 --no-open-browser --env-file .env +ifdef login + @echo "Running backend autologin is $(login)"; + LANGFLOW_AUTO_LOGIN=$(login) poetry run uvicorn --factory langflow.main:create_app --host 0.0.0.0 --port 7860 --reload --env-file .env --loop asyncio else - @echo "Running backend with autologin"; - LANGFLOW_AUTO_LOGIN=True poetry run langflow run --backend-only --port 7860 --host 0.0.0.0 --no-open-browser --env-file .env + @echo "Running backend respecting the .env file"; + poetry run uvicorn --factory langflow.main:create_app --host 0.0.0.0 --port 7860 --reload --env-file .env --loop asyncio endif build_and_run: - echo 'Removing dist folder' + @echo 'Removing dist folder' + @make setup_env rm -rf dist - make build && poetry run pip install dist/*.tar.gz && poetry run langflow run + rm -rf src/backend/base/dist + make build + poetry run pip install dist/*.tar.gz + poetry run langflow run build_and_install: - echo 'Removing dist folder' + @echo 'Removing dist folder' rm -rf dist - make build && poetry run pip install dist/*.tar.gz + rm -rf src/backend/base/dist + make build && poetry run pip install dist/*.whl && pip install src/backend/base/dist/*.whl --force-reinstall build_frontend: cd src/frontend && CI='' npm run build - cp -r src/frontend/build src/backend/langflow/frontend + cp -r src/frontend/build src/backend/base/langflow/frontend build: - make install_frontend + @echo 'Building the project' + @make setup_env +ifdef base + make install_frontendci make build_frontend - poetry build --format sdist - rm -rf src/backend/langflow/frontend + make build_langflow_base +endif + +ifdef main + make build_langflow +endif + +build_langflow_base: + cd src/backend/base && poetry build + rm -rf src/backend/base/langflow/frontend + +build_langflow_backup: + poetry lock && poetry build + +build_langflow: + cd ./scripts && poetry run python update_dependencies.py + poetry lock + poetry build + mv pyproject.toml.bak pyproject.toml + mv poetry.lock.bak poetry.lock dev: make install_frontend @@ -115,10 +201,36 @@ else docker compose $(if $(debug),-f docker-compose.debug.yml) up endif -publish: - make build +lock_base: + cd src/backend/base && poetry lock + +lock_langflow: + poetry lock + +lock: +# Run both in parallel + @echo 'Locking dependencies' + cd src/backend/base && poetry lock + poetry lock + +publish_base: + make build_langflow_base + cd src/backend/base && poetry publish + +publish_langflow: + make build_langflow poetry publish +publish: + @echo 'Publishing the project' +ifdef base + -make publish_base +endif + +ifdef main + -make publish_langflow +endif + help: @echo '----' @echo 'format - run code formatters' diff --git a/README.md b/README.md index 1431194e1..b9680aa97 100644 --- a/README.md +++ b/README.md @@ -3,6 +3,7 @@ # ⛓️ Langflow ### Discover a simpler & smarter way to build around Foundation Models + # [![Langflow](https://github.com/logspace-ai/langflow/blob/dev/docs/static/img/new_langflow_demo.gif)](https://www.langflow.org) # 📦 Installation @@ -38,11 +39,9 @@ Once you’re done, you can export your flow as a JSON file. Load the flow with: ```python -from langflow import load_flow_from_json +from langflow.load import run_flow_from_json -flow = load_flow_from_json("path/to/flow.json") -# Now you can use it -flow("Hey, have you heard of Langflow?") +results = run_flow_from_json("path/to/flow.json", input_value="Hello, World!") ``` # 🖥️ Command Line Interface (CLI) diff --git a/base.Dockerfile b/base.Dockerfile index 2293c35dd..936943cd1 100644 --- a/base.Dockerfile +++ b/base.Dockerfile @@ -23,7 +23,7 @@ ENV PYTHONUNBUFFERED=1 \ \ # poetry # https://python-poetry.org/docs/configuration/#using-environment-variables - POETRY_VERSION=1.7.1 \ + POETRY_VERSION=1.8.2 \ # make poetry install to this location POETRY_HOME="/opt/poetry" \ # make poetry create the virtual environment in the project's root diff --git a/build_and_push.Dockerfile b/build_and_push.Dockerfile index f296d83aa..1c595e180 100644 --- a/build_and_push.Dockerfile +++ b/build_and_push.Dockerfile @@ -23,7 +23,7 @@ ENV PYTHONUNBUFFERED=1 \ \ # poetry # https://python-poetry.org/docs/configuration/#using-environment-variables - POETRY_VERSION=1.7.1 \ + POETRY_VERSION=1.8.2 \ # make poetry install to this location POETRY_HOME="/opt/poetry" \ # make poetry create the virtual environment in the project's root @@ -62,10 +62,14 @@ RUN apt-get update \ WORKDIR /app COPY pyproject.toml poetry.lock ./ COPY src ./src +COPY scripts ./scripts COPY Makefile ./ COPY README.md ./ -RUN curl -sSL https://install.python-poetry.org | python3 - && make build - +RUN --mount=type=cache,target=/root/.cache \ + curl -sSL https://install.python-poetry.org | python3 - +RUN python -m pip install requests && cd ./scripts && python update_dependencies.py +RUN $POETRY_HOME/bin/poetry lock +RUN $POETRY_HOME/bin/poetry build # Final stage for the application FROM python-base as final diff --git a/build_and_push_base.Dockerfile b/build_and_push_base.Dockerfile new file mode 100644 index 000000000..f5092c81c --- /dev/null +++ b/build_and_push_base.Dockerfile @@ -0,0 +1,91 @@ + + +# syntax=docker/dockerfile:1 +# Keep this syntax directive! It's used to enable Docker BuildKit + +# Based on https://github.com/python-poetry/poetry/discussions/1879?sort=top#discussioncomment-216865 +# but I try to keep it updated (see history) + +################################ +# PYTHON-BASE +# Sets up all our shared environment variables +################################ +FROM python:3.10-slim as python-base + +# python +ENV PYTHONUNBUFFERED=1 \ + # prevents python creating .pyc files + PYTHONDONTWRITEBYTECODE=1 \ + \ + # pip + PIP_DISABLE_PIP_VERSION_CHECK=on \ + PIP_DEFAULT_TIMEOUT=100 \ + \ + # poetry + # https://python-poetry.org/docs/configuration/#using-environment-variables + POETRY_VERSION=1.8.2 \ + # make poetry install to this location + POETRY_HOME="/opt/poetry" \ + # make poetry create the virtual environment in the project's root + # it gets named `.venv` + POETRY_VIRTUALENVS_IN_PROJECT=true \ + # do not ask any interactive question + POETRY_NO_INTERACTION=1 \ + \ + # paths + # this is where our requirements + virtual environment will live + PYSETUP_PATH="/opt/pysetup" \ + VENV_PATH="/opt/pysetup/.venv" + + +# prepend poetry and venv to path +ENV PATH="$POETRY_HOME/bin:$VENV_PATH/bin:$PATH" + + +################################ +# BUILDER-BASE +# Used to build deps + create our virtual environment +################################ +FROM python-base as builder-base +RUN apt-get update \ + && apt-get install --no-install-recommends -y \ + # deps for installing poetry + curl \ + # deps for building python deps + build-essential \ + # npm + npm \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +RUN --mount=type=cache,target=/root/.cache \ + curl -sSL https://install.python-poetry.org | python3 - + +# Now we need to copy the entire project into the image +COPY pyproject.toml poetry.lock ./ +COPY src/frontend/package.json /tmp/package.json +RUN cd /tmp && npm install +WORKDIR /app +COPY src/frontend ./src/frontend +RUN rm -rf src/frontend/node_modules +RUN cp -a /tmp/node_modules /app/src/frontend +COPY scripts ./scripts +COPY Makefile ./ +COPY README.md ./ +RUN cd src/frontend && npm run build +COPY src/backend ./src/backend +RUN cp -r src/frontend/build src/backend/base/langflow/frontend +RUN rm -rf src/backend/base/dist +RUN cd src/backend/base && $POETRY_HOME/bin/poetry build --format sdist + +# Final stage for the application +FROM python-base as final + +# Copy virtual environment and built .tar.gz from builder base +COPY --from=builder-base /app/src/backend/base/dist/*.tar.gz ./ + +# Install the package from the .tar.gz +RUN pip install *.tar.gz + +WORKDIR /app +CMD ["python", "-m", "langflow", "run", "--host", "0.0.0.0", "--port", "7860"] diff --git a/deploy/base.Dockerfile b/deploy/base.Dockerfile index 323663283..58fae3dab 100644 --- a/deploy/base.Dockerfile +++ b/deploy/base.Dockerfile @@ -23,7 +23,7 @@ ENV PYTHONUNBUFFERED=1 \ \ # poetry # https://python-poetry.org/docs/configuration/#using-environment-variables - POETRY_VERSION=1.5.1 \ + POETRY_VERSION=1.8.2 \ # make poetry install to this location POETRY_HOME="/opt/poetry" \ # make poetry create the virtual environment in the project's root diff --git a/docs/docs/components/custom.mdx b/docs/docs/components/custom.mdx index d8c6ff2f5..b07f953fa 100644 --- a/docs/docs/components/custom.mdx +++ b/docs/docs/components/custom.mdx @@ -70,7 +70,6 @@ The CustomComponent class serves as the foundation for creating custom component | Key | Description | | -------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | - | _`field_type: str`_ | The type of the field (can be any of the types supported by the _`build`_ method). | | _`is_list: bool`_ | If the field can be a list of values, meaning that the user can manually add more inputs to the same field. | | _`options: List[str]`_ | When defined, the field becomes a dropdown menu where a list of strings defines the options to be displayed. If the _`value`_ attribute is set to one of the options, that option becomes default. For this parameter to work, _`field_type`_ should invariably be _`str`_. | | _`multiline: bool`_ | Defines if a string field opens a text editor. Useful for longer texts. | @@ -78,20 +77,20 @@ The CustomComponent class serves as the foundation for creating custom component | _`display_name: str`_ | Defines the name of the field. | | _`advanced: bool`_ | Hide the field in the canvas view (displayed component settings only). Useful when a field is for advanced users. | | _`password: bool`_ | To mask the input text. Useful to hide sensitive text (e.g. API keys). | - | _`required: bool`_ | Makes the field required. | + | _`required: bool`_ | This is determined automatically but can be used to override the default behavior. | | _`info: str`_ | Adds a tooltip to the field. | | _`file_types: List[str]`_ | This is a requirement if the _`field_type`_ is _file_. Defines which file types will be accepted. For example, _json_, _yaml_ or _yml_. | | _`range_spec: langflow.field_typing.RangeSpec`_ | This is a requirement if the _`field_type`_ is _`float`_. Defines the range of values accepted and the step size. If none is defined, the default is _`[-1, 1, 0.1]`_. | | _`title_case: bool`_ | Formats the name of the field when _`display_name`_ is not defined. Set it to False to keep the name as you set it in the _`build`_ method. | + | _`refresh_button: bool`_ | If set to True a button will appear to the right of the field, and when clicked, it will call the _`update_build_config`_ method which takes in the _`build_config`_, the name of the field (_`field_name`_) and the latest value of the field (_`field_value`_). This is useful when you want to update the _`build_config`_ based on the value of the field. | + | _`real_time_refresh: bool`_ | If set to True, the _`update_build_config`_ method will be called every time the field value changes. | + | _`field_type: str`_ | You should never define this key. It is automatically set based on the type hint of the _`build`_ method. | - - - Keys _`options`_ and _`value`_ can receive a method or function that returns a list of strings or a string, respectively. This is useful when you want to dynamically generate the options or the default value of a field. A refresh button will appear next to the field in the component, allowing the user to update the options or the default value. - - - + +By using the _`update_build_config`_ method, you can update the _`build_config`_ in whatever way you want based on the value of the field or not. + - The CustomComponent class also provides helpful methods for specific tasks (e.g., to load and use other flows from the Langflow platform): diff --git a/docs/docs/components/data.mdx b/docs/docs/components/data.mdx new file mode 100644 index 000000000..94620d047 --- /dev/null +++ b/docs/docs/components/data.mdx @@ -0,0 +1,87 @@ +import Admonition from '@theme/Admonition'; + +# Data + +### API Request + +This component makes HTTP requests to the specified URLs. + +**Params** + +- **URLs:** URLs to make requests to. +- **Method:** The HTTP method to use. +- **Headers:** The headers to send with the request. +- **Body:** The body to send with the request (for POST, PATCH, PUT). +- **Timeout:** The timeout to use for the request. + + +

+ Use this component to make HTTP requests to external APIs or services and retrieve data. +

+

+ Ensure that you provide valid URLs and configure the method, headers, body, and timeout appropriately. +

+
+ +--- + +### Directory + +This component recursively loads files from a directory. + +**Params** + +- **Path:** The path to the directory. +- **Types:** File types to load. Leave empty to load all types. +- **Depth:** Depth to search for files. +- **Max Concurrency:** The maximum number of concurrent file loading operations. +- **Load Hidden:** If true, hidden files will be loaded. +- **Recursive:** If true, the search will be recursive. +- **Silent Errors:** If true, errors will not raise an exception. +- **Use Multithreading:** If true, use multithreading for loading files. + + +

+ Use this component to load files from a directory, such as text files, JSON files, etc. +

+

+ Ensure that you provide the correct path to the directory and configure other parameters as needed. +

+
+ + +--- + +### File + +This component loads a generic file. + +**Params** + +- **Path:** The path to the file. +- **Silent Errors:** If true, errors will not raise an exception. + + +

+ Use this component to load a generic file, such as a text file, JSON file, etc. +

+

+ Ensure that you provide the correct path to the file and configure other parameters as needed. +

+
+ +--- + +### URL + +This component fetches content from one or more URLs. + +**Params** + +- **URLs:** The URLs from which content will be fetched. + + +

+ Ensure that you provide valid URLs and configure other parameters as needed. +

+
diff --git a/docs/docs/components/embeddings.mdx b/docs/docs/components/embeddings.mdx index d4ad17542..9a20e5821 100644 --- a/docs/docs/components/embeddings.mdx +++ b/docs/docs/components/embeddings.mdx @@ -2,19 +2,7 @@ import Admonition from "@theme/Admonition"; # Embeddings - -

- We appreciate your understanding as we polish our documentation – it may - contain some rough edges. Share your feedback or report issues to help us - improve! 🛠️📝 -

-
- -Embeddings are vector representations of text that capture the semantic meaning of the text. They are created using text embedding models and allow us to think about the text in a vector space, enabling us to perform tasks like semantic search, where we look for pieces of text that are most similar in the vector space. - ---- - -### BedrockEmbeddings +### Amazon Bedrock Embeddings Used to load [Amazon Bedrocks’s](https://aws.amazon.com/bedrock/) embedding models. @@ -30,7 +18,7 @@ Used to load [Amazon Bedrocks’s](https://aws.amazon.com/bedrock/) embedding mo --- -### CohereEmbeddings +### Cohere Embeddings Used to load [Cohere’s](https://cohere.com/) embedding models. @@ -44,57 +32,93 @@ Used to load [Cohere’s](https://cohere.com/) embedding models. --- -### HuggingFaceEmbeddings +### Azure OpenAI Embeddings + +Generate embeddings using Azure OpenAI models. + +**Params** + +- **Azure Endpoint:** Your Azure endpoint, including the resource. Example: `https://example-resource.azure.openai.com/` +- **Deployment Name:** The name of the deployment. +- **API Version:** The API version to use. (Options: 2022-12-01, 2023-03-15-preview, 2023-05-15, 2023-06-01-preview, 2023-07-01-preview, 2023-08-01-preview) +- **API Key:** The API key to access the Azure OpenAI service. + +--- + +### Hugging Face API Embeddings + +Generate embeddings using Hugging Face Inference API models. + +**Params** + +- **API Key:** API key for accessing the Hugging Face Inference API. (Type: str) +- **API URL:** URL of the Hugging Face Inference API. (Default: http://localhost:8080) +- **Model Name:** Name of the model to use. (Default: BAAI/bge-large-en-v1.5) +- **Cache Folder:** Folder path to cache Hugging Face models. (Advanced) +- **Encode Kwargs:** Additional arguments for the encoding process. (Type: dict, Advanced) +- **Model Kwargs:** Additional arguments for the model. (Type: dict, Advanced) +- **Multi Process:** Whether to use multiple processes. (Default: False, Advanced) + +--- + +### Hugging Face Embeddings Used to load [HuggingFace’s](https://huggingface.co) embedding models. **Params** -- **cache_folder:** Used to specify the folder where the embeddings will be cached. When embeddings are computed for a text, they can be stored in the cache folder so that they can be reused later without the need to recompute them. This can improve the performance of the application by avoiding redundant computations. - -- **encode_kwargs:** Used to pass additional keyword arguments to the encoding method of the underlying HuggingFace model. These keyword arguments can be used to customize the encoding process, such as specifying the maximum length of the input sequence or enabling truncation or padding. - -- **model_kwargs:** Used to customize the behavior of the model, such as specifying the model architecture, the tokenizer, or any other model-specific configuration options. By using `model_kwargs`, the user can configure the HuggingFace model according to specific needs and preferences. - -- **model_name:** Used to specify the name or identifier of the HuggingFace model that will be used for generating embeddings. It allows users to choose a specific pre-trained model from the Hugging Face model hub — defaults to `sentence-transformers/all-mpnet-base-v2`. +- **Cache Folder:** Folder path to cache HuggingFace models. +- **Encode Kwargs:** Additional arguments for the encoding process. (Type: dict) +- **Model Kwargs:** Additional arguments for the model. (Type: dict) +- **Model Name:** Name of the HuggingFace model to use. (Default: sentence-transformers/all-mpnet-base-v2) +- **Multi Process:** Whether to use multiple processes. (Default: False) --- -### OpenAIEmbeddings +### Ollama Embeddings + +Generate embeddings using Ollama models. + +**Params** + +- **Ollama Model:** Name of the Ollama model to use. (Default: llama2) +- **Ollama Base URL:** Base URL of the Ollama API. (Default: http://localhost:11434) +- **Model Temperature:** Temperature parameter for the model. (Type: float) + +--- + +### OpenAI Embeddings Used to load [OpenAI’s](https://openai.com/) embedding models. **Params** -- **chunk_size:** Determines the maximum size of each chunk of text that is processed for embedding. If any of the incoming text chunks exceeds `chunk_size` characters, it will be split into multiple chunks of size `chunk_size` or less before being embedded — defaults to `1000`. - -- **deployment:** Used to specify the deployment name or identifier of the text embedding model. It allows the user to choose a specific deployment of the model to use for embedding. When the deployment is provided, this can be useful when the user has multiple deployments of the same model with different configurations or versions — defaults to `text-embedding-ada-002`. - -- **embedding_ctx_length:** This parameter determines the maximum context length for the text embedding model. It specifies the number of tokens that the model considers when generating embeddings for a piece of text — defaults to `8191` (this means that the model will consider up to 8191 tokens when generating embeddings). - -- **max_retries:** Determines the maximum number of times to retry a request if the model provider returns an error from their API — defaults to `6`. - -- **model:** Defines which pre-trained text embedding model to use — defaults to `text-embedding-ada-002`. - -- **openai_api_base:** Refers to the base URL for the Azure OpenAI resource. It is used to configure the API to connect to the Azure OpenAI service. The base URL can be found in the Azure portal under the user Azure OpenAI resource. - -- **openai_api_key:** Is used to authenticate and authorize access to the OpenAI service. - -- **openai_api_type:** Is used to specify the type of OpenAI API being used, either the regular OpenAI API or the Azure OpenAI API. This parameter allows the `OpenAIEmbeddings` class to connect to the appropriate API service. - -- **openai_api_version:** Is used to specify the version of the OpenAI API being used. This parameter allows the `OpenAIEmbeddings` class to connect to the appropriate version of the OpenAI API service. - -- **openai_organization:** Is used to specify the organization associated with the OpenAI API key. If not provided, the default organization associated with the API key will be used. - -- **openai_proxy:** Proxy enables better budgeting and cost management for making OpenAI API calls, including more transparency into pricing. - -- **request_timeout:** Used to specify the maximum amount of time, in milliseconds, to wait for a response from the OpenAI API when generating embeddings for a given text. - -- **tiktoken_model_name:** Used to count the number of tokens in documents to constrain them to be under a certain limit. By default, when set to None, this will be the same as the embedding model name. +- **OpenAI API Key:** The API key to use for accessing the OpenAI API. (Type: str) +- **Default Headers:** Default headers for the HTTP requests. (Type: Dict[str, str], Optional) +- **Default Query:** Default query parameters for the HTTP requests. (Type: NestedDict, Optional) +- **Allowed Special:** Special tokens allowed for processing. (Type: List[str], Default: []) +- **Disallowed Special:** Special tokens disallowed for processing. (Type: List[str], Default: ["all"]) +- **Chunk Size:** Chunk size for processing. (Type: int, Default: 1000) +- **Client:** HTTP client for making requests. (Type: Any, Optional) +- **Deployment:** Deployment name for the model. (Type: str, Default: "text-embedding-3-small") +- **Embedding Context Length:** Length of embedding context. (Type: int, Default: 8191) +- **Max Retries:** Maximum number of retries for failed requests. (Type: int, Default: 6) +- **Model:** Name of the model to use. (Type: str, Default: "text-embedding-3-small") +- **Model Kwargs:** Additional keyword arguments for the model. (Type: NestedDict, Optional) +- **OpenAI API Base:** Base URL of the OpenAI API. (Type: str, Optional) +- **OpenAI API Type:** Type of the OpenAI API. (Type: str, Optional) +- **OpenAI API Version:** Version of the OpenAI API. (Type: str, Optional) +- **OpenAI Organization:** Organization associated with the API key. (Type: str, Optional) +- **OpenAI Proxy:** Proxy server for the requests. (Type: str, Optional) +- **Request Timeout:** Timeout for the HTTP requests. (Type: float, Optional) +- **Show Progress Bar:** Whether to show a progress bar for processing. (Type: bool, Default: False) +- **Skip Empty:** Whether to skip empty inputs. (Type: bool, Default: False) +- **TikToken Enable:** Whether to enable TikToken. (Type: bool, Default: True) +- **TikToken Model Name:** Name of the TikToken model. (Type: str, Optional) --- -### VertexAIEmbeddings +### VertexAI Embeddings Wrapper around [Google Vertex AI](https://cloud.google.com/vertex-ai) [Embeddings API](https://cloud.google.com/vertex-ai/docs/generative-ai/embeddings/get-text-embeddings). @@ -113,11 +137,3 @@ Vertex AI is a cloud computing platform offered by Google Cloud Platform (GCP). - **top_p:** Tokens are selected from most probable to least until the sum of their – defaults to `0.95`. - **tuned_model_name:** The name of a tuned model. If provided, model_name is ignored. - **verbose:** This parameter is used to control the level of detail in the output of the chain. When set to True, it will print out some internal states of the chain while it is being run, which can help debug and understand the chain's behavior. If set to False, it will suppress the verbose output – defaults to `False`. - -### OllamaEmbeddings - -Used to load [Ollama’s](https://ollama.ai/) embedding models. Wrapper around LangChain's [Ollama API](https://python.langchain.com/docs/integrations/text_embedding/ollama). - -- **model** The name of the Ollama model to use – defaults to `llama2`. -- **base_url** The base URL for the Ollama API – defaults to `http://localhost:11434`. -- **temperature** Tunes the degree of randomness in text generations. Should be a non-negative value – defaults to `0`. diff --git a/docs/docs/components/experimental.mdx b/docs/docs/components/experimental.mdx new file mode 100644 index 000000000..d81f28ca3 --- /dev/null +++ b/docs/docs/components/experimental.mdx @@ -0,0 +1,250 @@ +import Admonition from '@theme/Admonition'; + +# Experimental + +Experimental are components that are currently in a beta phase. This means they have undergone initial development and testing but have not yet reached a stable or fully supported status. Users are encouraged to explore these components, provide feedback, and report any issues encountered during their usage. + +### Clear Message History Component + +This component is designed to clear the message history associated with a specific session ID. + +**Beta:** This component is currently in beta. + +**Parameters** + +- **Session ID:** + - **Display Name:** Session ID + - **Info:** The session ID to clear the message history. + +**Usage** + +To use this component, provide the session ID for which you want to clear the message history. + +--- + +### Extract Key From Record + +This component extracts specified keys from a record. + +**Parameters** + +- **Record:** + - **Display Name:** Record + - **Info:** The record from which to extract the keys. + +- **Keys:** + - **Display Name:** Keys + - **Info:** The keys to extract from the record. + +- **Silent Errors:** + - **Display Name:** Silent Errors + - **Info:** If True, errors will not be raised. + - **Advanced:** True + +**Usage** + +To use this component, provide the record from which you want to extract keys, specify the keys to extract, and optionally set whether to raise errors for missing keys. + +--- + +### Flow as Tool + +This component constructs a Tool from a function that runs the loaded Flow. + +**Parameters** + +- **Flow Name:** + - **Display Name:** Flow Name + - **Info:** The name of the flow to run. + - **Options:** List of available flow names. + - **Real-time Refresh:** True + - **Refresh Button:** True + +- **Name:** + - **Display Name:** Name + - **Description:** The name of the tool. + +- **Description:** + - **Display Name:** Description + - **Description:** The description of the tool. + +- **Return Direct:** + - **Display Name:** Return Direct + - **Description:** Return the result directly from the Tool. + - **Advanced:** True + +**Usage** + +To use this component, select the desired flow from the available options, provide a name and description for the tool, and specify whether to return the result directly from the tool. + +--- + +### Listen + +This component listens for a notification. + +**Parameters** + +- **Name:** + - **Display Name:** Name + - **Info:** The name of the notification to listen for. + +**Usage** + +To use this component, specify the name of the notification to listen for. + +--- + +### List Flows + +This component lists all available flows. + +**Usage** + +To use this component, simply call it without any parameters. + +--- + +### Merge Records + +**Parameters** + +- **Records:** + - **Display Name:** Records + +**Usage** + +To use this component, provide a list of records to merge. + +--- + +### Notify + +This component generates a notification to the Get Notified component. + +**Parameters** + +- **Name:** + - **Display Name:** Name + - **Info:** The name of the notification. + +- **Record:** + - **Display Name:** Record + - **Info:** The record to store. + +- **Append:** + - **Display Name:** Append + - **Info:** If True, the record will be appended to the notification. + +**Usage** + +To use this component, specify the name of the notification, provide an optional record to store, and indicate whether to append the record to the notification. + +--- + +### Run Flow + +This component runs a flow. + +**Parameters** + +- **Input Value:** + - **Display Name:** Input Value + - **Multiline:** True + +- **Flow Name:** + - **Display Name:** Flow Name + - **Info:** The name of the flow to run. + - **Options:** List of available flow names. + - **Refresh Button:** True + +- **Tweaks:** + - **Display Name:** Tweaks + - **Info:** Tweaks to apply to the flow. + +**Usage** + +To use this component, provide the input value, specify the flow name to run, and optionally provide tweaks to apply to the flow. + +--- + +### Runnable Executor + +This component executes a runnable. + +**Parameters** + +- **Input Key:** + - **Display Name:** Input Key + - **Info:** The key to use for the input. + +- **Inputs:** + - **Display Name:** Inputs + - **Info:** The inputs to pass to the runnable. + +- **Runnable:** + - **Display Name:** Runnable + - **Info:** The runnable to execute. + +- **Output Key:** + - **Display Name:** Output Key + - **Info:** The key to use for the output. + +**Usage** + +To use this component, specify the input key, provide the inputs to pass to the runnable, select the runnable to execute, and optionally specify the output key. + +--- + +### SQL Executor + + This component executes an SQL query. + +**Parameters** + +- **Database URL:** + - **Display Name:** Database URL + - **Info:** The URL of the database. + +- **Include Columns:** + - **Display Name:** Include Columns + - **Info:** Include columns in the result. + +- **Passthrough:** + - **Display Name:** Passthrough + - **Info:** If an error occurs, return the query instead of raising an exception. + +- **Add Error:** + - **Display Name:** Add Error + - **Info:** Add the error to the result. + +**Usage** + +To use this component, provide the SQL query, specify the database URL, and optionally configure include columns, passthrough, and add error settings. + +--- + +### SubFlow + +This component dynamically generates a component from a flow. The output is a list of records with keys 'result' and 'message'. + +**Parameters** + +- **Input Value:** + - **Display Name:** Input Value + - **Multiline:** True + +- **Flow Name:** + - **Display Name:** Flow Name + - **Info:** The name of the flow to run. + - **Options:** List of available flow names. + - **Real Time Refresh:** True + - **Refresh Button:** True + +- **Tweaks:** + - **Display Name:** Tweaks + - **Info:** Tweaks to apply to the flow. + +**Usage** + +To use this component, specify the flow name and provide any necessary tweaks to apply to the flow. diff --git a/docs/docs/components/helpers.mdx b/docs/docs/components/helpers.mdx new file mode 100644 index 000000000..d965761aa --- /dev/null +++ b/docs/docs/components/helpers.mdx @@ -0,0 +1,127 @@ +import Admonition from '@theme/Admonition'; + +# Helpers + +### Chat Memory + +This component retrieves stored chat messages given a specific Session ID. + +**Params** + +- **Sender Type:** Choose the sender type from options like "Machine", "User", or "Machine and User". +- **Sender Name:** (Optional) The name of the sender. +- **Number of Messages:** Number of messages to retrieve. +- **Session ID:** The Session ID of the chat history. +- **Order:** Choose the order of the messages, either "Ascending" or "Descending". +- **Record Template:** (Optional) Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key. + +--- + +### Combine Text + +This component concatenates two text sources into a single text chunk using a specified delimiter. + +**Params** + +- **First Text:** The first text input to concatenate. +- **Second Text:** The second text input to concatenate. +- **Delimiter:** A string used to separate the two text inputs. Defaults to a whitespace. + +--- + +### Create Record + +This component dynamically creates a Record with a specified number of fields. + +**Params** + +- **Number of Fields:** Number of fields to be added to the record. +- **Text Key:** Key to be used as text. + +--- + +### Custom Component + +Use this component as a template to create your own custom component. + +**Params** + +- **Parameter:** Describe the purpose of this parameter. + + +

+ Customize the build_config and build methods according to your requirements. +

+
+ +Learn more about [Custom Component](http://docs.langflow.org/components/custom). + +--- + +### Documents to Records + +Convert LangChain Documents into Records. + +**Parameters** + +- **Documents:** Documents to be converted into Records. + +--- + +### ID Generator + +Generates a unique ID. + +**Parameters** + +- **Value:** Unique ID generated. + +--- + +### Message History + +Retrieves stored chat messages given a specific Session ID. + +**Parameters** + +- **Sender Type:** Options for the sender type. +- **Sender Name:** Sender name. +- **Number of Messages:** Number of messages to retrieve. +- **Session ID:** Session ID of the chat history. +- **Order:** Order of the messages. + +--- + +### Records to Text + +Convert Records into plain text following a specified template. + +**Parameters** + +- **Records:** The records to convert to text. +- **Template:** The template to use for formatting the records. It can contain the keys `{text}`, `{data}` or any other key in the Record. + +--- + +### Split Text + +Split text into chunks of a specified length. + +**Parameters** + +- **Texts:** Texts to split. +- **Separators:** The characters to split on. Defaults to [" "]. +- **Max Chunk Size:** The maximum length (in number of characters) of each chunk. +- **Chunk Overlap:** The amount of character overlap between chunks. +- **Recursive:** Whether to split recursively. + +--- + +### Update Record + +Update Record with text-based key/value pairs, similar to updating a Python dictionary. + +**Parameters** + +- **Record:** The record to update. +- **New Data:** The new data to update the record with. diff --git a/docs/docs/components/inputs.mdx b/docs/docs/components/inputs.mdx new file mode 100644 index 000000000..fe8804e43 --- /dev/null +++ b/docs/docs/components/inputs.mdx @@ -0,0 +1,164 @@ +import Admonition from "@theme/Admonition"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; + +# Inputs + +### Chat Input + +This component is designed to get user input from the chat. + +**Params** + +- **Sender Type:** specifies the sender type. Defaults to _`"User"`_. Options are _`"Machine"`_ and _`"User"`_. + +- **Sender Name:** specifies the name of the sender. Defaults to _`"User"`_. + +- **Message:** specifies the message text. It is a multiline text input. + +- **Session ID:** specifies the session ID of the chat history. If provided, the message will be saved in the Message History. + + +

+ If _`As Record`_ is _`true`_ and the _`Message`_ is a _`Record`_, the data + of the _`Record`_ will be updated with the _`Sender`_, _`Sender Name`_, and + _`Session ID`_. +

+
+ +When you get it from the sidebar, it will look like the image below but that is because some fields are in the advanced section. + + + +If you expose all its fields, it will look like the image below. + + + +One key capability of the Chat Input component is how it transforms the Interaction Panel into a chat window. This feature is particularly useful for scenarios where user input is required to initiate or influence the flow. + + + +--- + +### Prompt + +Create a prompt template with dynamic variables. This is a very useful component for structuring prompts and passing dynamic data to a language model. + +**Parameters** + +- **Template:** the template for the prompt. This field allows you to create other fields dynamically by using curly brackets `{}`. For example, if you have a template like this: _`"Hello {name}, how are you?"`_, a new field called _`name`_ will be created. + + +

+ Prompt variables can be created with any chosen name inside curly brackets, + e.g. `{variable_name}` +

+
+ +Here is how it looks when you get it from the sidebar. + + + +And here when you add a Template with the value _`Hello {name}, how are you?`_. + + + +--- + +### Text Input + +This component is designed for simple text input, allowing users to pass textual data to subsequent components in the workflow. It's particularly useful for scenarios where a brief user input is required to initiate or influence the flow. + +**Params** + +- **Value:** Specifies the text input value. This is where the user can input the text data that will be passed to the next component in the sequence. If no value is provided, it defaults to an empty string. +- **Record Template:** Specifies how a Record should be converted into Text. + + +

+ The `TextInput` component serves as a straightforward means for setting Text + input values in the chat window. It ensures that textual data can be + seamlessly passed to subsequent components in the flow. +

+
+ +It should look like this when dropped directly from the sidebar. + + + +And when you expose all its fields, it will look like the image below. + +The **Record Template** field is used to specify how a Record should be converted into Text. This is particularly useful when you want to extract specific information from a Record and pass it as text to the next component in the sequence. + +For example, if you have a Record with the following structure: + +```json +{ + "name": "John Doe", + "age": 30, + "email": "johndoe@email.com" +} +``` + +You can use a template like this: _`"Name: {name}, Age: {age}"`_ to convert the Record into a text string like this: _`"Name: John Doe, Age: 30"`_, and if you pass more than one Record, the text will be concatenated with a new line separator. + + + +The Text Input component gives you the possibility to add an Input field on the Interaction Panel. This is useful because it allows you to define parameters while running and testing your flow. + + diff --git a/docs/docs/components/memories.mdx b/docs/docs/components/memories.mdx index 3bf9a957c..b92538134 100644 --- a/docs/docs/components/memories.mdx +++ b/docs/docs/components/memories.mdx @@ -12,6 +12,26 @@ Memory is a concept in chat-based applications that allows the system to remembe --- +### MessageHistory + +This component is designed to retrieve stored messages based on various filters such as sender type, sender name, session ID, and a specific file path where messages are stored. It allows for a flexible retrieval of chat history, providing insights into past interactions. + +**Params** + +- **Sender Type:** (Optional) Specifies the type of the sender. Options are _`"Machine"`_, _`"User"`_, or _`"Machine and User"`_. Filters the messages by the type of the sender. + +- **Sender Name:** (Optional) Specifies the name of the sender. Filters the messages by the name of the sender. + +- **Session ID:** (Optional) Specifies the session ID of the chat history. Filters the messages belonging to a specific session. + +- **Number of Messages:** Specifies the number of messages to retrieve. Defaults to _`5`_. Determines how many recent messages from the chat history to fetch. + + +

+ The component retrieves messages based on the provided criteria, including the specific file path for stored messages. If no specific criteria are provided, it will return the most recent messages up to the specified limit. This component can be used to review past interactions and analyze the flow of conversations. +

+
+ ### ConversationBufferMemory The `ConversationBufferMemory` component is a type of memory system that plainly stores the last few inputs and outputs of a conversation. @@ -27,7 +47,7 @@ The `ConversationBufferMemory` component is a type of memory system that plainly ### ConversationBufferWindowMemory -`ConversationBufferWindowMemory` is a variation of the `ConversationBufferMemory` that maintains a list of the recent interactions in a conversation. It only keeps the last K interactions in memory, which can be useful for maintaining a sliding window of the most recent interactions without letting the buffer get too large. +`ConversationBufferWindowMemory` is a variation of the `ConversationBufferMemory` that maintains a list of the recent interactions in a conversation. It only keeps the last K interactions in memory, which can be useful for maintaining a sliding window of the most recent interactions without letting the buffer get too large. **Params** @@ -72,7 +92,7 @@ The `ConversationEntityMemory` component incorporates intricate memory structure ### ConversationSummaryMemory -The `ConversationSummaryMemory` is a memory component that creates a summary of the conversation over time. It condenses information from the conversation and stores the current summary in memory. It is particularly useful for longer conversations where keeping the entire message history in the prompt would take up too many tokens. +The `ConversationSummaryMemory` is a memory component that creates a summary of the conversation over time. It condenses information from the conversation and stores the current summary in memory. It is particularly useful for longer conversations where keeping the entire message history in the prompt would take up too many tokens. **Params** diff --git a/docs/docs/components/llms.mdx b/docs/docs/components/model_specs.mdx similarity index 100% rename from docs/docs/components/llms.mdx rename to docs/docs/components/model_specs.mdx diff --git a/docs/docs/components/models.mdx b/docs/docs/components/models.mdx new file mode 100644 index 000000000..1c3b404b5 --- /dev/null +++ b/docs/docs/components/models.mdx @@ -0,0 +1,346 @@ +import Admonition from '@theme/Admonition'; + +# Models + +### Amazon Bedrock + +This component facilitates the generation of text using the LLM (Large Language Model) model from Amazon Bedrock. + +**Params** + +- **Input Value:** Specifies the input text for text generation. + +- **System Message (Optional):** A system message to pass to the model. + +- **Model ID (Optional):** Specifies the model ID to be used for text generation. Defaults to _`"anthropic.claude-instant-v1"`_. Available options include: + - _`"ai21.j2-grande-instruct"`_ + - _`"ai21.j2-jumbo-instruct"`_ + - _`"ai21.j2-mid"`_ + - _`"ai21.j2-mid-v1"`_ + - _`"ai21.j2-ultra"`_ + - _`"ai21.j2-ultra-v1"`_ + - _`"anthropic.claude-instant-v1"`_ + - _`"anthropic.claude-v1"`_ + - _`"anthropic.claude-v2"`_ + - _`"cohere.command-text-v14"`_ + +- **Credentials Profile Name (Optional):** Specifies the name of the credentials profile. + +- **Region Name (Optional):** Specifies the region name. + +- **Model Kwargs (Optional):** Additional keyword arguments for the model. + +- **Endpoint URL (Optional):** Specifies the endpoint URL. + +- **Streaming (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_. + +- **Cache (Optional):** Specifies whether to cache the response. + +- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_. + + +

+ Ensure that necessary credentials are provided to connect to the Amazon Bedrock API. If connection fails, a ValueError will be raised. +

+
+ + +--- + +### Anthropic + +This component allows the generation of text using Anthropic Chat&Completion large language models. + +**Params** + +- **Model Name:** Specifies the name of the Anthropic model to be used for text generation. Available options include: + - _`"claude-2.1"`_ + - _`"claude-2.0"`_ + - _`"claude-instant-1.2"`_ + - _`"claude-instant-1"`_ + +- **Anthropic API Key:** Your Anthropic API key. + +- **Max Tokens (Optional):** Specifies the maximum number of tokens to generate. Defaults to _`256`_. + +- **Temperature (Optional):** Specifies the sampling temperature. Defaults to _`0.7`_. + +- **API Endpoint (Optional):** Specifies the endpoint of the Anthropic API. Defaults to _`"https://api.anthropic.com"`_ if not specified. + +- **Input Value:** Specifies the input text for text generation. + +- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_. + +- **System Message (Optional):** A system message to pass to the model. + +For detailed documentation and integration guides, please refer to the [Anthropic Component Documentation](https://python.langchain.com/docs/integrations/chat/anthropic). + +--- + +### Azure OpenAI + +This component allows the generation of text using the LLM (Large Language Model) model from Azure OpenAI. + +**Params** + +- **Model Name:** Specifies the name of the Azure OpenAI model to be used for text generation. Available options include: + - _`"gpt-35-turbo"`_ + - _`"gpt-35-turbo-16k"`_ + - _`"gpt-35-turbo-instruct"`_ + - _`"gpt-4"`_ + - _`"gpt-4-32k"`_ + - _`"gpt-4-vision"`_ + +- **Azure Endpoint:** Your Azure endpoint, including the resource. Example: `https://example-resource.azure.openai.com/`. + +- **Deployment Name:** Specifies the name of the deployment. + +- **API Version:** Specifies the version of the Azure OpenAI API to be used. Available options include: + - _`"2023-03-15-preview"`_ + - _`"2023-05-15"`_ + - _`"2023-06-01-preview"`_ + - _`"2023-07-01-preview"`_ + - _`"2023-08-01-preview"`_ + - _`"2023-09-01-preview"`_ + - _`"2023-12-01-preview"`_ + +- **API Key:** Your Azure OpenAI API key. + +- **Temperature (Optional):** Specifies the sampling temperature. Defaults to _`0.7`_. + +- **Max Tokens (Optional):** Specifies the maximum number of tokens to generate. Defaults to _`1000`_. + +- **Input Value:** Specifies the input text for text generation. + +- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_. + +- **System Message (Optional):** A system message to pass to the model. + +For detailed documentation and integration guides, please refer to the [Azure OpenAI Component Documentation](https://python.langchain.com/docs/integrations/llms/azure_openai). + + +--- + +### Cohere + +This component enables text generation using Cohere large language models. + +**Params** + +- **Cohere API Key:** Your Cohere API key. + +- **Max Tokens (Optional):** Specifies the maximum number of tokens to generate. Defaults to _`256`_. + +- **Temperature (Optional):** Specifies the sampling temperature. Defaults to _`0.75`_. + +- **Input Value:** Specifies the input text for text generation. + +- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_. + +- **System Message (Optional):** A system message to pass to the model. + +--- + +### Google Generative AI + +This component enables text generation using Google Generative AI. + +**Params** + +- **Google API Key:** Your Google API key to use for the Google Generative AI. + +- **Model:** The name of the model to use. Supported examples are _`"gemini-pro"`_ and _`"gemini-pro-vision"`_. + +- **Max Output Tokens (Optional):** The maximum number of tokens to generate. + +- **Temperature:** Run inference with this temperature. Must be in the closed interval [0.0, 1.0]. + +- **Top K (Optional):** Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive. + +- **Top P (Optional):** The maximum cumulative probability of tokens to consider when sampling. + +- **N (Optional):** Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated. + +- **Input Value:** The input to the model. + +- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_. + +- **System Message (Optional):** A system message to pass to the model. + +--- + +### Hugging Face API + +This component facilitates text generation using LLM models from the Hugging Face Inference API. + +**Params** + +- **Endpoint URL:** The URL of the Hugging Face Inference API endpoint. Should be provided along with necessary authentication credentials. + +- **Task:** Specifies the task for text generation. Options include _`"text2text-generation"`_, _`"text-generation"`_, and _`"summarization"`_. + +- **API Token:** The API token required for authentication with the Hugging Face Hub. + +- **Model Keyword Arguments (Optional):** Additional keyword arguments for the model. Should be provided as a Python dictionary. + +- **Input Value:** The input text for text generation. + +- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_. + +- **System Message (Optional):** A system message to pass to the model. + +--- + +### LiteLLM Model + +Generates text using the `LiteLLM` collection of large language models. + +**Parameters** + +- **Model name:** The name of the model to use. For example, `gpt-3.5-turbo`. (Type: str) +- **API key:** The API key to use for accessing the provider's API. (Type: str, Optional) +- **Provider:** The provider of the API key. (Type: str, Choices: "OpenAI", "Azure", "Anthropic", "Replicate", "Cohere", "OpenRouter") +- **Temperature:** Controls the randomness of the text generation. (Type: float, Default: 0.7) +- **Model kwargs:** Additional keyword arguments for the model. (Type: Dict, Optional) +- **Top p:** Filter responses to keep the cumulative probability within the top p tokens. (Type: float, Optional) +- **Top k:** Filter responses to only include the top k tokens. (Type: int, Optional) +- **N:** Number of chat completions to generate for each prompt. (Type: int, Default: 1) +- **Max tokens:** The maximum number of tokens to generate for each chat completion. (Type: int, Default: 256) +- **Max retries:** Maximum number of retries for failed requests. (Type: int, Default: 6) +- **Verbose:** Whether to print verbose output. (Type: bool, Default: False) +- **Input:** The input prompt for text generation. (Type: str) +- **Stream:** Whether to stream the output. (Type: bool, Default: False) +- **System message:** System message to pass to the model. (Type: str, Optional) + +--- + +### Ollama + +Generate text using Ollama Local LLMs. + +**Parameters** + +- **Base URL:** Endpoint of the Ollama API. Defaults to 'http://localhost:11434' if not specified. +- **Model Name:** The model name to use. Refer to [Ollama Library](https://ollama.ai/library) for more models. +- **Temperature:** Controls the creativity of model responses. (Default: 0.8) +- **Cache:** Enable or disable caching. (Default: False) +- **Format:** Specify the format of the output (e.g., json). (Advanced) +- **Metadata:** Metadata to add to the run trace. (Advanced) +- **Mirostat:** Enable/disable Mirostat sampling for controlling perplexity. (Default: Disabled) +- **Mirostat Eta:** Learning rate for Mirostat algorithm. (Default: None) (Advanced) +- **Mirostat Tau:** Controls the balance between coherence and diversity of the output. (Default: None) (Advanced) +- **Context Window Size:** Size of the context window for generating tokens. (Default: None) (Advanced) +- **Number of GPUs:** Number of GPUs to use for computation. (Default: None) (Advanced) +- **Number of Threads:** Number of threads to use during computation. (Default: None) (Advanced) +- **Repeat Last N:** How far back the model looks to prevent repetition. (Default: None) (Advanced) +- **Repeat Penalty:** Penalty for repetitions in generated text. (Default: None) (Advanced) +- **TFS Z:** Tail free sampling value. (Default: None) (Advanced) +- **Timeout:** Timeout for the request stream. (Default: None) (Advanced) +- **Top K:** Limits token selection to top K. (Default: None) (Advanced) +- **Top P:** Works together with top-k. (Default: None) (Advanced) +- **Verbose:** Whether to print out response text. +- **Tags:** Tags to add to the run trace. (Advanced) +- **Stop Tokens:** List of tokens to signal the model to stop generating text. (Advanced) +- **System:** System to use for generating text. (Advanced) +- **Template:** Template to use for generating text. (Advanced) +- **Input:** The input text. +- **Stream:** Whether to stream the response. +- **System Message:** System message to pass to the model. (Advanced) + +--- + +### OpenAI + +This component facilitates text generation using OpenAI's models. + +**Params** + +- **Input Value:** The input text for text generation. + +- **Max Tokens (Optional):** The maximum number of tokens to generate. Defaults to _`256`_. + +- **Model Kwargs (Optional):** Additional keyword arguments for the model. Should be provided as a nested dictionary. + +- **Model Name (Optional):** The name of the model to use. Defaults to _`gpt-4-1106-preview`_. Supported options include: _`gpt-4-turbo-preview`_, _`gpt-4-0125-preview`_, _`gpt-4-1106-preview`_, _`gpt-4-vision-preview`_, _`gpt-3.5-turbo-0125`_, _`gpt-3.5-turbo-1106`_. + +- **OpenAI API Base (Optional):** The base URL of the OpenAI API. Defaults to _`https://api.openai.com/v1`_. + +- **OpenAI API Key (Optional):** The API key for accessing the OpenAI API. + +- **Temperature:** Controls the creativity of model responses. Defaults to _`0.7`_. + +- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_. + +- **System Message (Optional):** System message to pass to the model. + +--- + +### Qianfan + +This component facilitates the generation of text using Baidu Qianfan chat models. + +**Params** + +- **Model Name:** Specifies the name of the Qianfan chat model to be used for text generation. Available options include: + - _`"ERNIE-Bot"`_ + - _`"ERNIE-Bot-turbo"`_ + - _`"BLOOMZ-7B"`_ + - _`"Llama-2-7b-chat"`_ + - _`"Llama-2-13b-chat"`_ + - _`"Llama-2-70b-chat"`_ + - _`"Qianfan-BLOOMZ-7B-compressed"`_ + - _`"Qianfan-Chinese-Llama-2-7B"`_ + - _`"ChatGLM2-6B-32K"`_ + - _`"AquilaChat-7B"`_ + +- **Qianfan Ak:** Your Baidu Qianfan access key, obtainable from [here](https://cloud.baidu.com/product/wenxinworkshop). + +- **Qianfan Sk:** Your Baidu Qianfan secret key, obtainable from [here](https://cloud.baidu.com/product/wenxinworkshop). + +- **Top p (Optional):** Model parameter. Specifies the top-p value. Only supported in ERNIE-Bot and ERNIE-Bot-turbo models. Defaults to _`0.8`_. + +- **Temperature (Optional):** Model parameter. Specifies the sampling temperature. Only supported in ERNIE-Bot and ERNIE-Bot-turbo models. Defaults to _`0.95`_. + +- **Penalty Score (Optional):** Model parameter. Specifies the penalty score. Only supported in ERNIE-Bot and ERNIE-Bot-turbo models. Defaults to _`1.0`_. + +- **Endpoint (Optional):** Endpoint of the Qianfan LLM, required if custom model is used. + +- **Input Value:** Specifies the input text for text generation. + +- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_. + +- **System Message (Optional):** A system message to pass to the model. + +--- + +### Vertex AI + +The `ChatVertexAI` is a component for generating text using Vertex AI Chat large language models API. + +**Params** + +- **Credentials:** The JSON file containing the credentials for accessing the Vertex AI Chat API. + +- **Project:** The name of the project associated with the Vertex AI Chat API. + +- **Examples (Optional):** List of examples to provide context for text generation. + +- **Location:** The location of the Vertex AI Chat API service. Defaults to _`us-central1`_. + +- **Max Output Tokens:** The maximum number of tokens to generate. Defaults to _`128`_. + +- **Model Name:** The name of the model to use. Defaults to _`chat-bison`_. + +- **Temperature:** Controls the creativity of model responses. Defaults to _`0.0`_. + +- **Input Value:** The input text for text generation. + +- **Top K:** Limits token selection to top K. Defaults to _`40`_. + +- **Top P:** Works together with top-k. Defaults to _`0.95`_. + +- **Verbose:** Whether to print out response text. Defaults to _`False`_. + +- **Stream (Optional):** Specifies whether to stream the response from the model. Defaults to _`False`_. + +- **System Message (Optional):** System message to pass to the model. \ No newline at end of file diff --git a/docs/docs/components/outputs.mdx b/docs/docs/components/outputs.mdx new file mode 100644 index 000000000..6fe3f9d2a --- /dev/null +++ b/docs/docs/components/outputs.mdx @@ -0,0 +1,37 @@ +import Admonition from '@theme/Admonition'; + +# Outputs + +### Chat Output + +This component is designed to send a message to the chat. + +**Params** + +- **Sender Type:** specifies the sender type. Defaults to _`"Machine"`_. Options are _`"Machine"`_ and _`"User"`_. + +- **Sender Name:** specifies the name of the sender. Defaults to _`"AI"`_. + +- **Session ID:** specifies the session ID of the chat history. If provided, the message will be saved in the Message History. + +- **Message:** specifies the message text. + + +

+ If _`As Record`_ is _`true`_ and the _`Message`_ is a _`Record`_, the data of the _`Record`_ will be updated with the _`Sender`_, _`Sender Name`_, and _`Session ID`_. +

+
+ +### Text Output + +This component is designed to display text data to the user. It's particularly useful for scenarios where you don't want to send the text data to the chat, but still want to display it. + +**Params** + +- **Value:** Specifies the text data to be displayed. This is where the text data to be displayed is provided. If no value is provided, it defaults to an empty string. + + +

+ The `TextOutput` component serves as a straightforward means for displaying text data. It ensures that textual data can be seamlessly observed in the chat window throughout your flow. +

+
\ No newline at end of file diff --git a/docs/docs/components/prompts.mdx b/docs/docs/components/prompts.mdx index 3aafc9b96..0b1ad705c 100644 --- a/docs/docs/components/prompts.mdx +++ b/docs/docs/components/prompts.mdx @@ -21,7 +21,7 @@ The `PromptTemplate` component allows users to create prompts and define variabl Once a variable is defined in the prompt template, it becomes a component input of its own. Check out [Prompt - Customization](../docs/guidelines/prompt-customization.mdx) to learn more. + Customization](../guidelines/prompt-customization) to learn more. - **template:** Template used to format an individual request. diff --git a/docs/docs/components/utilities.mdx b/docs/docs/components/utilities.mdx index 593864213..e8c2ba216 100644 --- a/docs/docs/components/utilities.mdx +++ b/docs/docs/components/utilities.mdx @@ -74,3 +74,23 @@ Build a Document containing a JSON object using a key and another Document page **Output** - **List of Documents:** A list containing the Document with the JSON object. + +## Unique ID Generator + +Generates a unique identifier (UUID) for each instance it is invoked, providing a distinct and reliable identifier suitable for a variety of applications. + +**Params** + +- **Value:** This field displays the generated unique identifier (UUID). The UUID is generated dynamically for each instance of the component, ensuring uniqueness across different uses. + +**Output** + +- Returns a unique identifier (UUID) as a string. This UUID is generated using Python's `uuid` module, ensuring that each identifier is unique and can be used as a reliable reference in your application. + + +

+ The Unique ID Generator is crucial for scenarios requiring distinct identifiers, such as session management, transaction tracking, or any context where different instances or entities must be uniquely identified. The generated UUID is provided as a hexadecimal string, offering a high level of uniqueness and security for identification purposes. +

+
+ +For additional information and examples, please consult the [Langflow Components Custom Documentation](http://docs.langflow.org/components/custom). diff --git a/docs/docs/components/vector-stores.mdx b/docs/docs/components/vector-stores.mdx index 133984cda..103d755b4 100644 --- a/docs/docs/components/vector-stores.mdx +++ b/docs/docs/components/vector-stores.mdx @@ -1,9 +1,635 @@ -import Admonition from '@theme/Admonition'; +import Admonition from "@theme/Admonition"; # Vector Stores - +### Astra DB + +The `Astra DB` is a component for initializing an Astra DB Vector Store from Records. It facilitates the creation of Astra DB-based vector indexes for efficient document storage and retrieval. + +**Params** + +- **Input:** The input documents or records. + +- **Embedding:** The embedding model used by Astra DB. + +- **Collection Name:** The name of the collection in Astra DB. + +- **Token:** The token for Astra DB. + +- **API Endpoint:** The API endpoint for Astra DB. + +- **Namespace:** The namespace in Astra DB. + +- **Metric:** The metric to use in Astra DB. + +- **Batch Size:** The batch size for Astra DB. + +- **Bulk Insert Batch Concurrency:** The bulk insert batch concurrency for Astra DB. + +- **Bulk Insert Overwrite Concurrency:** The bulk insert overwrite concurrency for Astra DB. + +- **Bulk Delete Concurrency:** The bulk delete concurrency for Astra DB. + +- **Setup Mode:** The setup mode for the vector store. + +- **Pre Delete Collection:** Pre delete collection. + +- **Metadata Indexing Include:** Metadata indexing include. + +- **Metadata Indexing Exclude:** Metadata indexing exclude. + +- **Collection Indexing Policy:** Collection indexing policy. + +

- We appreciate your understanding as we polish our documentation – it may contain some rough edges. Share your feedback or report issues to help us improve! 🛠️📝 + Ensure that the required Astra DB token and API endpoint are properly configured.

-
\ No newline at end of file + +
+ +--- + +### Astra DB Search + +The `Astra DBSearch` is a component for searching an existing Astra DB Vector Store for similar documents. It extends the functionality of the `Astra DB` component to provide efficient document retrieval based on similarity metrics. + +**Params** + +- **Search Type:** The type of search to perform (e.g., Similarity, MMR). + +- **Input Value:** The input value to search for. + +- **Embedding:** The embedding model used by Astra DB. + +- **Collection Name:** The name of the collection in Astra DB. + +- **Token:** The token for Astra DB. + +- **API Endpoint:** The API endpoint for Astra DB. + +- **Namespace:** The namespace in Astra DB. + +- **Metric:** The metric to use in Astra DB. + +- **Batch Size:** The batch size for Astra DB. + +- **Bulk Insert Batch Concurrency:** The bulk insert batch concurrency for Astra DB. + +- **Bulk Insert Overwrite Concurrency:** The bulk insert overwrite concurrency for Astra DB. + +- **Bulk Delete Concurrency:** The bulk delete concurrency for Astra DB. + +- **Setup Mode:** The setup mode for the vector store. + +- **Pre Delete Collection:** Pre delete collection. + +- **Metadata Indexing Include:** Metadata indexing include. + +- **Metadata Indexing Exclude:** Metadata indexing exclude. + +- **Collection Indexing Policy:** Collection indexing policy. + +--- + +### Chroma + +The `Chroma` is a component designed for implementing a Vector Store using Chroma. This component allows users to utilize Chroma for efficient vector storage and retrieval within their language processing workflows. + +**Params** + +- **Collection Name:** The name of the collection. + +- **Persist Directory:** The directory to persist the Vector Store to. + +- **Server CORS Allow Origins (Optional):** The CORS allow origins for the Chroma server. + +- **Server Host (Optional):** The host for the Chroma server. + +- **Server Port (Optional):** The port for the Chroma server. + +- **Server gRPC Port (Optional):** The gRPC port for the Chroma server. + +- **Server SSL Enabled (Optional):** Whether to enable SSL for the Chroma server. + +- **Input:** Input data for creating the Vector Store. + +- **Embedding:** The embeddings to use for the Vector Store. + +For detailed documentation and integration guides, please refer to the [Chroma Component Documentation](https://python.langchain.com/docs/integrations/vectorstores/chroma). + +--- + +### Chroma Search + +The `ChromaSearch` is a component designed for searching a Chroma collection for similar documents. This component integrates with Chroma to facilitate efficient document retrieval based on similarity metrics. + +**Params** + +- **Input:** The input text to search for similar documents. + +- **Search Type:** The type of search to perform ("Similarity" or "MMR"). + +- **Collection Name:** The name of the Chroma collection. + +- **Index Directory:** The directory where the Chroma index is stored. + +- **Embedding:** The embedding model used to vectorize inputs (make sure to use the same as the index). + +- **Server CORS Allow Origins (Optional):** The CORS allow origins for the Chroma server. + +- **Server Host (Optional):** The host for the Chroma server. + +- **Server Port (Optional):** The port for the Chroma server. + +- **Server gRPC Port (Optional):** The gRPC port for the Chroma server. + +- **Server SSL Enabled (Optional):** Whether SSL is enabled for the Chroma server. + +--- + +### FAISS + +The `FAISS` is a component designed for ingesting documents into a FAISS Vector Store. It facilitates efficient document indexing and retrieval using the FAISS library. + +**Params** + +- **Embedding:** The embedding model used to vectorize inputs. + +- **Input:** The input documents to ingest into the FAISS Vector Store. + +- **Folder Path:** The path to save the FAISS index. It will be relative to where Langflow is running. + +- **Index Name:** The name of the FAISS index. + +For detailed documentation and integration guides, please refer to the [FAISS Component Documentation](https://faiss.ai/index.html). + +--- + +### FAISS Search + +The `FAISSSearch` is a component for searching a FAISS Vector Store for similar documents. It enables efficient document retrieval based on similarity metrics using FAISS. + +**Params** + +- **Embedding:** The embedding model used by the FAISS Vector Store. + +- **Folder Path:** The path from which to load the FAISS index. It will be relative to where Langflow is running. + +- **Input:** The input value to search for similar documents. + +- **Index Name:** The name of the FAISS index. + +--- + +### MongoDB Atlas + +The `MongoDBAtlas` is a component used to construct a MongoDB Atlas Vector Search vector store from Records. It facilitates the creation of MongoDB Atlas-based vector stores for efficient document storage and retrieval. + +**Params** + +- **Embedding:** The embedding model used by the MongoDB Atlas Vector Search. + +- **Input:** The input documents or records. + +- **Collection Name:** The name of the collection in the MongoDB Atlas database. + +- **Database Name:** The name of the database in MongoDB Atlas. + +- **Index Name:** The name of the index in MongoDB Atlas. + +- **MongoDB Atlas Cluster URI:** The URI of the MongoDB Atlas cluster. + +- **Search Kwargs:** Additional search arguments for MongoDB Atlas. + + +

Ensure that pymongo is installed to use MongoDB Atlas Vector Store.

+
+ +--- + +### MongoDB Atlas Search + +The `MongoDBAtlasSearch` is a component for searching a MongoDB Atlas Vector Store for similar documents. It extends the functionality of the MongoDBAtlasComponent to provide efficient document retrieval based on similarity metrics. + +**Params** + +- **Search Type:** The type of search to perform. Options: "Similarity", "MMR". + +- **Input:** The input value to search for. + +- **Embedding:** The embedding model used by the MongoDB Atlas Vector Store. + +- **Collection Name:** The name of the collection in the MongoDB Atlas database. + +- **Database Name:** The name of the database in MongoDB Atlas. + +- **Index Name:** The name of the index in MongoDB Atlas. + +- **MongoDB Atlas Cluster URI:** The URI of the MongoDB Atlas cluster. + +- **Search Kwargs:** Additional search arguments for MongoDB Atlas. + +--- + +### PGVector + +The `PGVector` is a component for implementing a Vector Store using PostgreSQL. It allows users to store and retrieve vectors efficiently within a PostgreSQL database. + +**Params** + +- **Input:** The input value to use for the Vector Store. + +- **Embedding:** The embedding model used by the Vector Store. + +- **PostgreSQL Server Connection String:** The URL for the PostgreSQL server. + +- **Table:** The name of the table in the PostgreSQL database. + +For detailed documentation and integration guides, please refer to the [PGVector Component Documentation](https://python.langchain.com/docs/integrations/vectorstores/pgvector). + + +

+ Ensure that the required PostgreSQL server is accessible and properly + configured. +

+
+ +--- + +### PGVector Search + +The `PGVectorSearch` is a component for searching a PGVector Store for similar documents. It extends the functionality of the PGVectorComponent to provide efficient document retrieval based on similarity metrics. + +**Params** + +- **Input:** The input value to search for. + +- **Embedding:** The embedding model used by the Vector Store. + +- **PostgreSQL Server Connection String:** The URL for the PostgreSQL server. + +- **Table:** The name of the table in the PostgreSQL database. + +- **Search Type:** The type of search to perform (e.g., "Similarity", "MMR"). + +--- + +### Pinecone + +The `Pinecone` is a component used to construct a Pinecone wrapper from Records. It facilitates the creation of Pinecone-based vector indexes for efficient document storage and retrieval. + +**Params** + +- **Input:** The input documents or records. + +- **Embedding:** The embedding model used by Pinecone. + +- **Index Name:** The name of the index in Pinecone. + +- **Namespace:** The namespace in Pinecone. + +- **Pinecone API Key:** The API key for Pinecone. + +- **Pinecone Environment:** The environment for Pinecone. + +- **Search Kwargs:** Additional search keyword arguments for Pinecone. + +- **Pool Threads:** The number of threads to use for Pinecone. + + +

+ Ensure that the required Pinecone API key and environment are properly + configured. +

+
+ +--- + +### Pinecone Search + +The `PineconeSearch` is a component used to search a Pinecone Vector Store for similar documents. It extends the functionality of the `PineconeComponent` to provide efficient document retrieval based on similarity metrics. + +**Params** + +- **Search Type:** The type of search to perform (e.g., Similarity, MMR). + +- **Input Value:** The input value to search for. + +- **Embedding:** The embedding model used by Pinecone. + +- **Index Name:** The name of the index in Pinecone. + +- **Namespace:** The namespace in Pinecone. + +- **Pinecone API Key:** The API key for Pinecone. + +- **Pinecone Environment:** The environment for Pinecone. + +- **Search Kwargs:** Additional search keyword arguments for Pinecone. + +- **Pool Threads:** The number of threads to use for Pinecone. + +--- + +### Qdrant + +The `Qdrant` is a component used to construct a Qdrant wrapper from a list of texts. It allows for efficient similarity search and retrieval operations based on the provided embeddings. + +**Params** + +- **Input:** The input documents or records. + +- **Embedding:** The embedding model used by Qdrant. + +- **API Key:** The API key for Qdrant (password field). + +- **Collection Name:** The name of the collection in Qdrant. + +- **Content Payload Key:** The key for the content payload in the documents (advanced). + +- **Distance Function:** The distance function to use in Qdrant (advanced). + +- **gRPC Port:** The gRPC port for Qdrant (advanced). + +- **Host:** The host for Qdrant (advanced). + +- **HTTPS:** Enable HTTPS for Qdrant (advanced). + +- **Location:** The location for Qdrant (advanced). + +- **Metadata Payload Key:** The key for the metadata payload in the documents (advanced). + +- **Path:** The path for Qdrant (advanced). + +- **Port:** The port for Qdrant (advanced). + +- **Prefer gRPC:** Prefer gRPC for Qdrant (advanced). + +- **Prefix:** The prefix for Qdrant (advanced). + +- **Search Kwargs:** Additional search keyword arguments for Qdrant (advanced). + +- **Timeout:** The timeout for Qdrant (advanced). + +- **URL:** The URL for Qdrant (advanced). + +--- + +### Qdrant Search + +The `QdrantSearch` is a component used to search a Qdrant Vector Store for similar documents. It extends the functionality of the `QdrantComponent` to provide efficient document retrieval based on similarity metrics. + +**Params** + +- **Search Type:** The type of search to perform (e.g., Similarity, MMR). + +- **Input Value:** The input value to search for. + +- **Embedding:** The embedding model used by Qdrant. + +- **API Key:** The API key for Qdrant (password field). + +- **Collection Name:** The name of the collection in Qdrant. + +- **Content Payload Key:** The key for the content payload in the documents (advanced). + +- **Distance Function:** The distance function to use in Qdrant (advanced). + +- **gRPC Port:** The gRPC port for Qdrant (advanced). + +- **Host:** The host for Qdrant (advanced). + +- **HTTPS:** Enable HTTPS for Qdrant (advanced). + +- **Location:** The location for Qdrant (advanced). + +- **Metadata Payload Key:** The key for the metadata payload in the documents (advanced). + +- **Path:** The path for Qdrant (advanced). + +- **Port:** The port for Qdrant (advanced). + +- **Prefer gRPC:** Prefer gRPC for Qdrant (advanced). + +- **Prefix:** The prefix for Qdrant (advanced). + +- **Search Kwargs:** Additional search keyword arguments for Qdrant (advanced). + +- **Timeout:** The timeout for Qdrant (advanced). + +- **URL:** The URL for Qdrant (advanced). + +--- + +### Redis + +The `Redis` is a component for implementing a Vector Store using Redis. It provides functionality to store and retrieve vectors efficiently from a Redis database. + +**Params** + +- **Index Name:** The name of the index in Redis (default: your_index). + +- **Input:** The input data to build the Redis Vector Store (input types: Document, Record). + +- **Embedding:** The embedding model used by Redis. + +- **Schema:** The schema file (.yaml) to define the structure of the documents (optional). + +- **Redis Server Connection String:** The connection string for the Redis server. + +- **Redis Index:** The name of the Redis index (optional). + +For detailed documentation, please refer to the [Redis Documentation](https://python.langchain.com/docs/integrations/vectorstores/redis). + + +

+ Ensure that the required Redis server connection URL and index name are + properly configured. If no documents are provided, a schema must be + provided. +

+
+ +--- + +### Redis Search + +The `RedisSearch` is a component for searching a Redis Vector Store for similar documents. + +**Params** + +- **Search Type:** The type of search to perform (e.g., Similarity, MMR). + +- **Input Value:** The input value to search for. + +- **Index Name:** The name of the index in Redis (default: your_index). + +- **Embedding:** The embedding model used by Redis. + +- **Schema:** The schema file (.yaml) to define the structure of the documents (optional). + +- **Redis Server Connection String:** The connection string for the Redis server. + +- **Redis Index:** The name of the Redis index (optional). + +--- + +### Supabase + +The `Supabase` is a component for initializing a Supabase Vector Store from texts and embeddings. + +**Params** + +- **Input:** The input documents or records. + +- **Embedding:** The embedding model used by Supabase. + +- **Query Name:** The name of the query (optional). + +- **Search Kwargs:** Additional search keyword arguments for Supabase (advanced). + +- **Supabase Service Key:** The service key for Supabase. + +- **Supabase URL:** The URL for the Supabase instance. + +- **Table Name:** The name of the table in Supabase (advanced). + + +

+ Ensure that the required Supabase service key, Supabase URL, and table name + are properly configured. +

+
+ +--- + +### Supabase Search + +The `SupabaseSearch` is a component for searching a Supabase Vector Store for similar documents. + +**Params** + +- **Search Type:** The type of search to perform (e.g., Similarity, MMR). + +- **Input Value:** The input value to search for. + +- **Embedding:** The embedding model used by Supabase. + +- **Query Name:** The name of the query (optional). + +- **Search Kwargs:** Additional search keyword arguments for Supabase (advanced). + +- **Supabase Service Key:** The service key for Supabase. + +- **Supabase URL:** The URL for the Supabase instance. + +- **Table Name:** The name of the table in Supabase (advanced). + +--- + +### Vectara + +The `Vectara` is a component for implementing a Vector Store using Vectara. + +**Params** + +- **Vectara Customer ID:** The customer ID for Vectara. + +- **Vectara Corpus ID:** The corpus ID for Vectara. + +- **Vectara API Key:** The API key for Vectara. + +- **Files Url:** The URL(s) of the file(s) to be used for initializing the Vectara Vector Store (optional). + +- **Input:** The input data to be upserted to the corpus (optional). + +For detailed documentation and integration guides, please refer to the [Vectara Component Documentation](https://python.langchain.com/docs/integrations/vectorstores/vectara). + + +

+ If `inputs` are provided, they will be upserted to the corpus. If + `files_url` are provided, Vectara will process the files from the URLs. +

+
+ +--- + +### Vectara Search + +The `VectaraSearch` is a component for searching a Vectara Vector Store for similar documents. + +**Params** + +- **Search Type:** The type of search to perform (e.g., Similarity, MMR). + +- **Input Value:** The input value to search for. + +- **Vectara Customer ID:** The customer ID for Vectara. + +- **Vectara Corpus ID:** The corpus ID for Vectara. + +- **Vectara API Key:** The API key for Vectara. + +- **Files Url:** The URL(s) of the file(s) to be used for initializing the Vectara Vector Store (optional). + +--- + +### Weaviate + +The `Weaviate` is a component for implementing a Vector Store using Weaviate. + +**Params** + +- **Weaviate URL:** The URL of the Weaviate instance (default: http://localhost:8080). + +- **Search By Text:** Boolean indicating whether to search by text (default: False). + +- **API Key:** The API key for authentication (optional). + +- **Index name:** The name of the index in Weaviate (optional). + +- **Text Key:** The key used to extract text from documents (default: "text"). + +- **Input:** The input document or record. + +- **Embedding:** The embedding model used by Weaviate. + +- **Attributes:** Additional attributes to consider during indexing (optional). + +For detailed documentation and integration guides, please refer to the [Weaviate Component Documentation](https://python.langchain.com/docs/integrations/vectorstores/weaviate). + + +

+ Before using the Weaviate Vector Store component, ensure that you have a + Weaviate instance running and accessible at the specified URL. Additionally, + make sure to provide the correct API key for authentication if required. + Adjust the index name, text key, and attributes according to your dataset + and indexing requirements. Finally, ensure that the provided embeddings are + compatible with Weaviate's requirements. +

+
+ +--- + +### Weaviate Search + +The `WeaviateSearch` component facilitates searching a Weaviate Vector Store for similar documents. + +**Params** + +- **Search Type:** The type of search to perform (e.g., Similarity, MMR). + +- **Input Value:** The input value to search for. + +- **Weaviate URL:** The URL of the Weaviate instance (default: http://localhost:8080). + +- **Search By Text:** Boolean indicating whether to search by text (default: False). + +- **API Key:** The API key for authentication (optional). + +- **Index name:** The name of the index in Weaviate (optional). + +- **Text Key:** The key used to extract text from documents (default: "text"). + +- **Embedding:** The embedding model used by Weaviate. + +- **Attributes:** Additional attributes to consider during indexing (optional). diff --git a/docs/docs/components/wrappers.mdx b/docs/docs/components/wrappers.mdx deleted file mode 100644 index 4b1251b60..000000000 --- a/docs/docs/components/wrappers.mdx +++ /dev/null @@ -1,20 +0,0 @@ -import Admonition from '@theme/Admonition'; - -# Wrappers - - -

- We appreciate your understanding as we polish our documentation – it may contain some rough edges. Share your feedback or report issues to help us improve! 🛠️📝 -

-
- - -### TextRequestsWrapper - -This component is designed to work with the Python Requests module, which is a popular tool for making web requests. Used to fetch data from a particular website. - -**Params** - -- **header:** specifies the headers to be included in the HTTP request. Defaults to `{'Authorization': 'Bearer '}`. - - Headers are key-value pairs that provide additional information about the request or the client making the request. They can be used to send authentication credentials, specify the content type of the request, set cookies, and more. They allow the client and the server to communicate additional information beyond the basic request. \ No newline at end of file diff --git a/docs/docs/examples/buffer-memory.mdx b/docs/docs/examples/buffer-memory.mdx index 3167081a5..b196f9031 100644 --- a/docs/docs/examples/buffer-memory.mdx +++ b/docs/docs/examples/buffer-memory.mdx @@ -16,6 +16,12 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; light: "img/buffer-memory.png", dark: "img/buffer-memory.png", }} + style={{ + width: "80%", + margin: "20px auto", + display: "flex", + justifyContent: "center", + }} /> #### Download Flow diff --git a/docs/docs/examples/conversation-chain.mdx b/docs/docs/examples/conversation-chain.mdx index 1cd59ca55..294d1b440 100644 --- a/docs/docs/examples/conversation-chain.mdx +++ b/docs/docs/examples/conversation-chain.mdx @@ -22,6 +22,13 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; light: "img/basic-chat.png", dark: "img/basic-chat.png", }} + +style={{ + width: "80%", + margin: "20px auto", + display: "flex", + justifyContent: "center", + }} /> #### Download Flow diff --git a/docs/docs/examples/csv-loader.mdx b/docs/docs/examples/csv-loader.mdx index 351e99440..25f3bb444 100644 --- a/docs/docs/examples/csv-loader.mdx +++ b/docs/docs/examples/csv-loader.mdx @@ -34,6 +34,12 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; light: "img/csv-loader.png", dark: "img/csv-loader.png", }} + style={{ + width: "80%", + margin: "20px auto", + display: "flex", + justifyContent: "center", + }} /> #### Download Flow diff --git a/docs/docs/examples/flow-runner.mdx b/docs/docs/examples/flow-runner.mdx index e20dc39f7..8a07adb0a 100644 --- a/docs/docs/examples/flow-runner.mdx +++ b/docs/docs/examples/flow-runner.mdx @@ -3,9 +3,6 @@ description: Custom Components hide_table_of_contents: true --- -import ZoomableImage from "/src/theme/ZoomableImage.js"; -import Admonition from "@theme/Admonition"; - # FlowRunner Component The CustomComponent class allows us to create components that interact with Langflow itself. In this example, we will make a component that runs other flows available in "My Collection". @@ -18,7 +15,7 @@ The CustomComponent class allows us to create components that interact with Lang }} style={{ width: "30%", - margin: "0 auto", + margin: "20px auto", display: "flex", justifyContent: "center", }} @@ -35,7 +32,7 @@ We will cover how to: Example Code ```python -from langflow import CustomComponent +from langflow.custom import CustomComponent from langchain.schema import Document class FlowRunner(CustomComponent): @@ -75,7 +72,7 @@ class FlowRunner(CustomComponent): ```python -from langflow import CustomComponent +from langflow.custom import CustomComponent class MyComponent(CustomComponent): @@ -95,7 +92,7 @@ The typical structure of a Custom Component is composed of _`display_name`_ and --- ```python -from langflow import CustomComponent +from langflow.custom import CustomComponent # focus @@ -118,7 +115,7 @@ Let's start by defining our component's _`display_name`_ and _`description`_. --- ```python -from langflow import CustomComponent +from langflow.custom import CustomComponent # focus from langchain.schema import Document @@ -140,7 +137,7 @@ Second, we will import _`Document`_ from the [_langchain.schema_](https://docs.l --- ```python -from langflow import CustomComponent +from langflow.custom import CustomComponent # focus from langchain.schema import Document @@ -167,7 +164,7 @@ Now, let's add the [parameters](focus://11[20:55]) and the [return type](focus:/ --- ```python focus=13:14 -from langflow import CustomComponent +from langflow.custom import CustomComponent from langchain.schema import Document @@ -189,7 +186,7 @@ We can now start writing the _`build`_ method. Let's list available flows in "My --- ```python focus=15:18 -from langflow import CustomComponent +from langflow.custom import CustomComponent from langchain.schema import Document @@ -222,7 +219,7 @@ And retrieve a flow that matches the selected name (we'll make a dropdown input --- ```python -from langflow import CustomComponent +from langflow.custom import CustomComponent from langchain.schema import Document @@ -250,7 +247,7 @@ You can load this flow using _`get_flow`_ and set a _`tweaks`_ dictionary to cus --- ```python -from langflow import CustomComponent +from langflow.custom import CustomComponent from langchain.schema import Document @@ -287,7 +284,7 @@ The content of a document can be extracted using the _`page_content`_ attribute, --- ```python focus=9:16 -from langflow import CustomComponent +from langflow.custom import CustomComponent from langchain.schema import Document @@ -366,3 +363,6 @@ Done! This is what our script and custom component looks like: /> + +import ZoomableImage from "/src/theme/ZoomableImage.js"; +import Admonition from "@theme/Admonition"; diff --git a/docs/docs/examples/how-upload-examples.mdx b/docs/docs/examples/how-upload-examples.mdx deleted file mode 100644 index 4f54558eb..000000000 --- a/docs/docs/examples/how-upload-examples.mdx +++ /dev/null @@ -1,28 +0,0 @@ -import ThemedImage from "@theme/ThemedImage"; -import useBaseUrl from "@docusaurus/useBaseUrl"; -import ZoomableImage from "/src/theme/ZoomableImage.js"; - -# 📚 How to Upload Examples? - -We welcome all examples that can help our community learn and explore Langflow's capabilities. -Langflow Examples is a repository on [GitHub](https://github.com/logspace-ai/langflow_examples) that contains examples of flows that people can use for inspiration and learning. - -{" "} - - -To upload examples, please follow these steps: - -1. **Create a Flow:** First, create a flow using Langflow. You can use any of the available templates or create a new flow from scratch. - -2. **Export the Flow:** Once you have created a flow, export it as a JSON file. Make sure to give your file a descriptive name and include a brief description of what it does. - -3. **Submit a Pull Request:** Finally, submit a pull request (PR) to the examples repo. Make sure to include your JSON file in the PR. - -If your example uses any third-party libraries or packages, please include them in your PR and make sure that your example follows the [**⛓️ Langflow Code Of Conduct**](https://github.com/logspace-ai/langflow/blob/dev/CODE_OF_CONDUCT.md). diff --git a/docs/docs/examples/midjourney-prompt-chain.mdx b/docs/docs/examples/midjourney-prompt-chain.mdx deleted file mode 100644 index 9df732026..000000000 --- a/docs/docs/examples/midjourney-prompt-chain.mdx +++ /dev/null @@ -1,46 +0,0 @@ -import Admonition from "@theme/Admonition"; - -# MidJourney Prompt Chain - -The `MidJourneyPromptChain` can be used to generate imaginative and detailed MidJourney prompts. - -For example, type something like: - -```bash -Dragon -``` - -And get a response such as: - -```text -Imagine a mysterious forest, the trees are tall and ancient, their branches reaching up to the sky. Through the darkness, a dragon emerges from the shadows, its scales shimmering in the moonlight. Its wingspan is immense, and its eyes glow with a fierce intensity. It is a majestic and powerful creature, one that commands both respect and fear. -``` - - - Notice that the `ConversationSummaryMemory` stores a summary of the - conversation over time. Try using it to create better prompts as the - conversation goes on. - - -## ⛓️ Langflow Example - -import ThemedImage from "@theme/ThemedImage"; -import useBaseUrl from "@docusaurus/useBaseUrl"; -import ZoomableImage from "/src/theme/ZoomableImage.js"; - - - -#### Download Flow - - - -- [`OpenAI`](https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai) -- [`ConversationSummaryMemory`](https://python.langchain.com/docs/modules/memory/types/summary) - - diff --git a/docs/docs/examples/multiple-vectorstores.mdx b/docs/docs/examples/multiple-vectorstores.mdx deleted file mode 100644 index 2e554bbf1..000000000 --- a/docs/docs/examples/multiple-vectorstores.mdx +++ /dev/null @@ -1,58 +0,0 @@ -import Admonition from "@theme/Admonition"; - -# Multiple Vector Stores - -The example below shows an agent operating with two vector stores built upon different data sources. - -The `TextLoader` loads a TXT file, while the `WebBaseLoader` pulls text from webpages into a document format to accessed downstream. The `Chroma` vector stores are created analogous to what we have demonstrated in our [CSV Loader](/examples/csv-loader.mdx) example. Finally, the `VectorStoreRouterAgent` constructs an agent that routes between the vector stores. - - - Get the TXT file used - [here](https://github.com/hwchase17/chat-your-data/blob/master/state_of_the_union.txt). - - -URL used by the `WebBaseLoader`: - -```text -https://pt.wikipedia.org/wiki/Harry_Potter -``` - - - When you build the flow, request information about one of the sources. The - agent should be able to use the correct source to generate a response. - - - - Learn more about Multiple Vector Stores - [here](https://python.langchain.com/docs/modules/data_connection/vectorstores/). - - -## ⛓️ Langflow Example - -import ThemedImage from "@theme/ThemedImage"; -import useBaseUrl from "@docusaurus/useBaseUrl"; -import ZoomableImage from "/src/theme/ZoomableImage.js"; - - - -#### Download Flow - - - -- [`WebBaseLoader`](https://python.langchain.com/docs/integrations/document_loaders/web_base) -- [`TextLoader`](https://python.langchain.com/docs/modules/data_connection/document_loaders/) -- [`CharacterTextSplitter`](https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/character_text_splitter) -- [`OpenAIEmbedding`](https://python.langchain.com/docs/integrations/text_embedding/openai) -- [`Chroma`](https://python.langchain.com/docs/integrations/vectorstores/chroma) -- [`VectorStoreInfo`](https://python.langchain.com/docs/modules/data_connection/vectorstores/) -- [`OpenAI`](https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai) -- [`VectorStoreRouterToolkit`](https://js.langchain.com/docs/modules/agents/tools/how_to/agents_with_vectorstores) -- [`VectorStoreRouterAgent`](https://js.langchain.com/docs/modules/agents/tools/how_to/agents_with_vectorstores) - - diff --git a/docs/docs/examples/python-function.mdx b/docs/docs/examples/python-function.mdx index 9eadd7273..2bb4b93e1 100644 --- a/docs/docs/examples/python-function.mdx +++ b/docs/docs/examples/python-function.mdx @@ -43,6 +43,12 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; light: "img/python-function.png", dark: "img/python-function.png", }} + style={{ + width: "80%", + margin: "20px auto", + display: "flex", + justifyContent: "center", + }} /> #### Download Flow diff --git a/docs/docs/examples/serp-api-tool.mdx b/docs/docs/examples/serp-api-tool.mdx index 7e8d95936..175b6f1be 100644 --- a/docs/docs/examples/serp-api-tool.mdx +++ b/docs/docs/examples/serp-api-tool.mdx @@ -37,6 +37,12 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; light: "img/serp-api-tool.png", dark: "img/serp-api-tool.png", }} + style={{ + width: "80%", + margin: "20px auto", + display: "flex", + justifyContent: "center", + }} /> #### Download Flow diff --git a/src/backend/langflow/components/agents/__init__.py b/docs/docs/getting-started/basic-prompting.mdx similarity index 100% rename from src/backend/langflow/components/agents/__init__.py rename to docs/docs/getting-started/basic-prompting.mdx diff --git a/src/backend/langflow/components/chains/__init__.py b/docs/docs/getting-started/blog-writer.mdx similarity index 100% rename from src/backend/langflow/components/chains/__init__.py rename to docs/docs/getting-started/blog-writer.mdx diff --git a/docs/docs/getting-started/cli.mdx b/docs/docs/getting-started/cli.mdx new file mode 100644 index 000000000..050c4ab50 --- /dev/null +++ b/docs/docs/getting-started/cli.mdx @@ -0,0 +1,44 @@ +# 🖥️ Command Line Interface (CLI) + + +## Overview + +Langflow's Command Line Interface (CLI) is a powerful tool that allows you to interact with the Langflow server from the command line. The CLI provides a wide range of commands to help you shape Langflow to your needs. + +Running the CLI without any arguments will display a list of available commands and options. + +```bash +langflow --help +# or +langflow +``` + +Each option is detailed below: + +- `--help`: Displays all available options. +- `--host`: Defines the host to bind the server to. Can be set using the `LANGFLOW_HOST` environment variable. The default is `127.0.0.1`. +- `--workers`: Sets the number of worker processes. Can be set using the `LANGFLOW_WORKERS` environment variable. The default is `1`. +- `--timeout`: Sets the worker timeout in seconds. The default is `60`. +- `--port`: Sets the port to listen on. Can be set using the `LANGFLOW_PORT` environment variable. The default is `7860`. +- `--config`: Defines the path to the configuration file. The default is `config.yaml`. +- `--env-file`: Specifies the path to the .env file containing environment variables. The default is `.env`. +- `--log-level`: Defines the logging level. Can be set using the `LANGFLOW_LOG_LEVEL` environment variable. The default is `critical`. +- `--components-path`: Specifies the path to the directory containing custom components. Can be set using the `LANGFLOW_COMPONENTS_PATH` environment variable. The default is `langflow/components`. +- `--log-file`: Specifies the path to the log file. Can be set using the `LANGFLOW_LOG_FILE` environment variable. The default is `logs/langflow.log`. +- `--cache`: Select the type of cache to use. Options are `InMemoryCache` and `SQLiteCache`. Can be set using the `LANGFLOW_LANGCHAIN_CACHE` environment variable. The default is `SQLiteCache`. +- `--dev/--no-dev`: Toggles the development mode. The default is `no-dev`. +- `--path`: Specifies the path to the frontend directory containing build files. This option is for development purposes only. Can be set using the `LANGFLOW_FRONTEND_PATH` environment variable. +- `--open-browser/--no-open-browser`: Toggles the option to open the browser after starting the server. Can be set using the `LANGFLOW_OPEN_BROWSER` environment variable. The default is `open-browser`. +- `--remove-api-keys/--no-remove-api-keys`: Toggles the option to remove API keys from the projects saved in the database. Can be set using the `LANGFLOW_REMOVE_API_KEYS` environment variable. The default is `no-remove-api-keys`. +- `--install-completion [bash|zsh|fish|powershell|pwsh]`: Installs completion for the specified shell. +- `--show-completion [bash|zsh|fish|powershell|pwsh]`: Shows completion for the specified shell, allowing you to copy it or customize the installation. +- `--backend-only`: This parameter, with a default value of `False`, allows running only the backend server without the frontend. It can also be set using the `LANGFLOW_BACKEND_ONLY` environment variable. +- `--store`: This parameter, with a default value of `True`, enables the store features, use `--no-store` to deactivate it. It can be configured using the `LANGFLOW_STORE` environment variable. + +These parameters are important for users who need to customize the behavior of Langflow, especially in development or specialized deployment scenarios. + +### Environment Variables + +You can configure many of the CLI options using environment variables. These can be exported in your operating system or added to a `.env` file and loaded using the `--env-file` option. + +A sample `.env` file named `.env.example` is included with the project. Copy this file to a new file named `.env` and replace the example values with your actual settings. If you're setting values in both your OS and the `.env` file, the `.env` settings will take precedence. diff --git a/docs/docs/getting-started/creating-flows.mdx b/docs/docs/getting-started/creating-flows.mdx deleted file mode 100644 index aecc3ea16..000000000 --- a/docs/docs/getting-started/creating-flows.mdx +++ /dev/null @@ -1,38 +0,0 @@ -import ThemedImage from "@theme/ThemedImage"; -import useBaseUrl from "@docusaurus/useBaseUrl"; -import ZoomableImage from "/src/theme/ZoomableImage.js"; -import ReactPlayer from "react-player"; - -# 🎨 Creating Flows - -## Compose - -Creating flows with Langflow is easy. Drag sidebar components onto the canvas and connect them together to create your pipeline. Langflow provides a range of [LangChain components](https://python.langchain.com/docs/modules/) to choose from, including LLMs, prompt serializers, agents, and chains. - - - -## Fork - -The easiest way to start with Langflow is by forking a **community example**. Forking an example stores a copy in your project collection, allowing you to edit and save the modified version as a new flow. - -
- -
- -## Build - -Building a flow means validating if the components have prerequisites fulfilled and are properly instantiated. When a chat message is sent, the flow will run for the first time, executing the pipeline. - -
- -
diff --git a/src/backend/langflow/components/custom_components/__init__.py b/docs/docs/getting-started/document-qa.mdx similarity index 100% rename from src/backend/langflow/components/custom_components/__init__.py rename to docs/docs/getting-started/document-qa.mdx diff --git a/docs/docs/getting-started/hugging-face-spaces.mdx b/docs/docs/getting-started/hugging-face-spaces.mdx deleted file mode 100644 index 4759ea398..000000000 --- a/docs/docs/getting-started/hugging-face-spaces.mdx +++ /dev/null @@ -1,20 +0,0 @@ -# 🤗 HuggingFace Spaces - -A fully featured version of Langflow can be accessed via HuggingFace spaces with no installation required. - -import ThemedImage from "@theme/ThemedImage"; -import useBaseUrl from "@docusaurus/useBaseUrl"; -import ZoomableImage from "/src/theme/ZoomableImage.js"; - -{" "} - - - -Check out Langflow on [HuggingFace Spaces](https://huggingface.co/spaces/Logspace/Langflow). diff --git a/docs/docs/getting-started/installation.md b/docs/docs/getting-started/installation.md deleted file mode 100644 index c3ad54239..000000000 --- a/docs/docs/getting-started/installation.md +++ /dev/null @@ -1,15 +0,0 @@ -# 📦 How to install? - -## Installation - -You can install Langflow from pip: - -```bash -pip install langflow -``` - -Next, run: - -```bash -langflow -``` \ No newline at end of file diff --git a/src/backend/langflow/components/documentloaders/__init__.py b/docs/docs/getting-started/memory-chatbot.mdx similarity index 100% rename from src/backend/langflow/components/documentloaders/__init__.py rename to docs/docs/getting-started/memory-chatbot.mdx diff --git a/docs/docs/getting-started/rag-with-astradb.mdx b/docs/docs/getting-started/rag-with-astradb.mdx new file mode 100644 index 000000000..01daa7b6f --- /dev/null +++ b/docs/docs/getting-started/rag-with-astradb.mdx @@ -0,0 +1,195 @@ +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; +import Admonition from "@theme/Admonition"; + +# 🌟 RAG with Astra DB + +This guide will walk you through how to build a RAG (Retrieval Augmented Generation) application using **Astra DB** and **Langflow**. + +[Astra DB](https://www.datastax.com/products/datastax-astra?utm_source=langflow-pre-release&utm_medium=referral&utm_campaign=langflow-announcement&utm_content=astradb) is a cloud-native database built on Apache Cassandra that is optimized for the cloud. It is a fully managed database-as-a-service that simplifies operations and reduces costs. Astra DB is built on the same technology that powers the largest Cassandra deployments in the world. + +In this guide, we will use Astra DB as a vector store to store and retrieve the documents that will be used by the RAG application to generate responses. + + + This guide assumes that you have Langflow up and running. If you are new to + Langflow, you can check out the [Getting Started](/) guide. + + +TLDR; + +- [Create a free Astra DB account](https://astra.datastax.com/signup?utm_source=langflow-pre-release&utm_medium=referral&utm_campaign=langflow-announcement&utm_content=create-a-free-astra-db-account) +- Duplicate our [Langflow 1.0 Space](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) +- Create a new database, get a **Token** and the **API Endpoint** +- Click on the **New Project** button and look for Vector Store RAG. This will create a new project with the necessary components +- Import the project into Langflow by dropping it on the Canvas or My Collection page +- Update the **Token** and **API Endpoint** in the **Astra DB** components +- Update the OpenAI API key in the **OpenAI** components +- Run the ingestion flow which is the one that uses the **Astra DB** component +- Click on the ⚡ _Run_ button and start interacting with your RAG application + +# First things first + +## Create an Astra DB Database + +To get started, you will need to [create an Astra DB database](https://astra.datastax.com/signup?utm_source=langflow-pre-release&utm_medium=referral&utm_campaign=langflow-announcement&utm_content=create-an-astradb-database). + +Once you have created an account, you will be taken to the Astra DB dashboard. Click on the **Create Database** button. + + + +Now you will need to configure your database. Choose the **Serverless (Vector)** deployment type, and pick a Database name, provider and region. + +After you have configured your database, click on the **Create Database** button. + + + +Once your database is initialized, to the right of the page, you will see the _Database Details_ section which contains a button for you to copy the **API Endpoint** and another to generate a **Token**. + + + +Now we are all set to start building our RAG application using Astra DB and Langflow. + +## (Optional) Duplicate the Langflow 1.0 HuggingFace Space + +If you haven't already, now is the time to launch Langflow. To make things easier, you can duplicate our [Langflow 1.0 Space](https://huggingface.co/spaces/Langflow/Langflow-Preview?duplicate=true) which sets up a Langflow instance just for you. + +## Open the Vector Store RAG Project + +To get started, click on the **New Project** button and look for the **Vector Store RAG** project. This will open a starter project with the necessary components to run a RAG application using Astra DB. + + + +This project consists of two flows. The simpler one is the **Ingestion Flow** which is responsible for ingesting the documents into the Astra DB database. + +Your first step should be to understand what each flow does and how they interact with each other. + +The ingestion flow consists of: + +- **Files** component that uploads a text file to Langflow +- **Recursive Character Text Splitter** component that splits the text into smaller chunks +- **OpenAIEmbeddings** component that generates embeddings for the text chunks +- **Astra DB** component that stores the text chunks in the Astra DB database + + + +Now, let's update the **Astra DB** and **Astra DB Search** components with the **Token** and **API Endpoint** that we generated earlier, and the OpenAI Embeddings components with your OpenAI API key. + + + +And run it! This will ingest the Text data from your file into the Astra DB database. + + + +Now, on to the **RAG Flow**. This flow is responsible for generating responses to your queries. It will define all of the steps from getting the User's input to generating a response and displaying it in the Interaction Panel. + +The RAG flow is a bit more complex. It consists of: + +- **Chat Input** component that defines where to put the user input coming from the Interaction Panel +- **OpenAI Embeddings** component that generates embeddings from the user input +- **Astra DB Search** component that retrieves the most relevant Records from the Astra DB database +- **Text Output** component that turns the Records into Text by concatenating them and also displays it in the Interaction Panel + - One interesting point you'll see here is that this component is named `Extracted Chunks`, and that is how it will appear in the Interaction Panel +- **Prompt** component that takes in the user input and the retrieved Records as text and builds a prompt for the OpenAI model +- **OpenAI** component that generates a response to the prompt +- **Chat Output** component that displays the response in the Interaction Panel + + + +To run it all we have to do is click on the ⚡ _Run_ button and start interacting with your RAG application. + + + +This opens the Interaction Panel where you can chat your data. + +Because this flow has a **Chat Input** and a **Text Output** component, the Panel displays a chat input at the bottom and the Extracted Chunks section on the left. + + + +Once we interact with it we get a response and the Extracted Chunks section is updated with the retrieved records. + + + +And that's it! You have successfully ran a RAG application using Astra DB and Langflow. + +# Conclusion + +In this guide, we have learned how to run a RAG application using Astra DB and Langflow. +We have seen how to create an Astra DB database, import the Astra DB RAG Flows project into Langflow, and run the ingestion and RAG flows. diff --git a/docs/docs/guidelines/api.mdx b/docs/docs/guidelines/api.mdx index 8bba633fb..25dbeb31e 100644 --- a/docs/docs/guidelines/api.mdx +++ b/docs/docs/guidelines/api.mdx @@ -1,5 +1,6 @@ import useBaseUrl from "@docusaurus/useBaseUrl"; import ZoomableImage from "/src/theme/ZoomableImage.js"; +import Admonition from "@theme/Admonition"; # API Keys @@ -7,12 +8,17 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; Langflow offers an API Key functionality that allows users to access their individual components and flows without going through traditional login authentication. The API Key is a user-specific token that can be included in the request's header or query parameter to authenticate API calls. The following documentation outlines how to generate, use, and manage these API Keys in Langflow. + + This feature requires the `LANGFLOW_AUTO_LOGIN` environment variable to be set + to `False`. The default user and password are set using _`LANGFLOW_SUPERUSER`_ + and _`LANGFLOW_SUPERUSER_PASSWORD`_ environment variables. Default values are + _`langflow`_ and _`langflow`_ respectively. + + ## Generating an API Key ### Through Langflow UI -{/* add image img/api-key.png */} - \ + http://localhost:3000/api/v1/run/ \ -H 'Content-Type: application/json'\ -H 'x-api-key: '\ -d '{"inputs": {"text":""}, "tweaks": {}}' diff --git a/docs/docs/guidelines/async-api.mdx b/docs/docs/guidelines/async-api.mdx deleted file mode 100644 index c5473812e..000000000 --- a/docs/docs/guidelines/async-api.mdx +++ /dev/null @@ -1,73 +0,0 @@ -import Admonition from "@theme/Admonition"; - -# Asynchronous Processing - -## Introduction - -Starting from version 0.5, Langflow introduces a new feature to its API: the _`sync`_ flag. This flag allows users to opt for asynchronous processing of their flows, freeing up resources and enabling better control over long-running tasks. -This feature supports running tasks in a Celery worker queue and AnyIO task groups for now. - - - This is an experimental feature. The default behavior of the API is still - synchronous processing. The API may change in the future. - - -## The _`sync`_ Flag - -The _`sync`_ flag can be included in the payload of your POST request to the _`/api/v1/process/`_ endpoint. -When set to _`false`_, the API will initiate an asynchronous task instead of processing the flow synchronously. - -### API Request with _`sync`_ flag - -```bash -curl -X POST \ - http://localhost:3000/api/v1/process/ \ - -H 'Content-Type: application/json' \ - -H 'x-api-key: ' \ - -d '{"inputs": {"text": ""}, "tweaks": {}, "sync": false}' -``` - -Response: - -```json -{ - "result": { - "output": "..." - }, - "task": { - "id": "...", - "href": "api/v1/task/" - }, - "session_id": "...", - "backend": "..." // celery or anyio -} -``` - -## Checking Task Status - -You can check the status of an asynchronous task by making a GET request to the `/task/{task_id}` endpoint. - -```bash -curl -X GET \ - http://localhost:3000/api/v1/task/ \ - -H 'x-api-key: ' -``` - -### Response - -The endpoint will return the current status of the task and, if completed, the result of the task. Possible statuses include: - -- _`PENDING`_: The task is waiting for execution. -- _`SUCCESS`_: The task has completed successfully. -- _`FAILURE`_: The task has failed. - -Example response for a completed task: - -```json -{ - "status": "SUCCESS", - "result": { - "output": "..." - } -} -``` diff --git a/docs/docs/guidelines/components.mdx b/docs/docs/guidelines/components.mdx index 32ec00615..16aa83eff 100644 --- a/docs/docs/guidelines/components.mdx +++ b/docs/docs/guidelines/components.mdx @@ -26,13 +26,14 @@ Components are the building blocks of the flows. They are made of inputs, output {" "} +
diff --git a/docs/docs/guidelines/custom-component.mdx b/docs/docs/guidelines/custom-component.mdx index 99106d400..daf47987f 100644 --- a/docs/docs/guidelines/custom-component.mdx +++ b/docs/docs/guidelines/custom-component.mdx @@ -30,7 +30,7 @@ Here is an example: ```python -from langflow import CustomComponent +from langflow.custom import CustomComponent from langchain.schema import Document class DocumentProcessor(CustomComponent): @@ -92,7 +92,7 @@ The Python script for every Custom Component should follow a set of rules. Let's The script must contain a **single class** that inherits from _`CustomComponent`_. ```python -from langflow import CustomComponent +from langflow.custom import CustomComponent from langchain.schema import Document class MyComponent(CustomComponent): @@ -113,7 +113,7 @@ class MyComponent(CustomComponent): This class requires a _`build`_ method used to run the component and define its fields. ```python -from langflow import CustomComponent +from langflow.custom import CustomComponent from langchain.schema import Document class MyComponent(CustomComponent): @@ -134,7 +134,7 @@ class MyComponent(CustomComponent): The [Return Type Annotation](https://docs.python.org/3/library/typing.html) of the _`build`_ method defines the component type (e.g., Chain, BaseLLM, or basic Python types). Check out all supported types in the [component reference](../components/custom). ```python -from langflow import CustomComponent +from langflow.custom import CustomComponent from langchain.schema import Document class MyComponent(CustomComponent): @@ -153,7 +153,7 @@ class MyComponent(CustomComponent): --- ```python -from langflow import CustomComponent +from langflow.custom import CustomComponent from langchain.schema import Document class MyComponent(CustomComponent): @@ -179,7 +179,7 @@ Check out the [component reference](../components/custom) for more details on th --- ```python -from langflow import CustomComponent +from langflow.custom import CustomComponent from langchain.schema import Document class MyComponent(CustomComponent): @@ -204,7 +204,7 @@ Let's create a custom component that processes a document (_`langchain.schema.Do To start, let's choose a name for our component by adding a _`display_name`_ attribute. This name will appear on the canvas. The name of the class is not relevant, but let's call it _`DocumentProcessor`_. ```python -from langflow import CustomComponent +from langflow.custom import CustomComponent from langchain.schema import Document # focus @@ -227,7 +227,7 @@ class DocumentProcessor(CustomComponent): We can also write a description for it using a _`description`_ attribute. ```python -from langflow import CustomComponent +from langflow.custom import CustomComponent from langchain.schema import Document class DocumentProcessor(CustomComponent): @@ -244,7 +244,7 @@ class DocumentProcessor(CustomComponent): --- ```python -from langflow import CustomComponent +from langflow.custom import CustomComponent from langchain.schema import Document class DocumentProcessor(CustomComponent): @@ -283,11 +283,11 @@ The return type is _`Document`_. The _`build_config`_ method is here defined to customize the component fields. - _`options`_ determines that the field will be a dropdown menu. The list values and field type must be _`str`_. -- _`value`_ is the default option of the dropdown menu. +- _`value`_ is the default value of the field. - _`display_name`_ is the name of the field to be displayed. ```python -from langflow import CustomComponent +from langflow.custom import CustomComponent from langchain.schema import Document class DocumentProcessor(CustomComponent): diff --git a/docs/docs/guidelines/features.mdx b/docs/docs/guidelines/features.mdx index 19837430d..932607c30 100644 --- a/docs/docs/guidelines/features.mdx +++ b/docs/docs/guidelines/features.mdx @@ -1,9 +1,3 @@ -import ThemedImage from "@theme/ThemedImage"; -import useBaseUrl from "@docusaurus/useBaseUrl"; -import ZoomableImage from "/src/theme/ZoomableImage.js"; -import ReactPlayer from "react-player"; -import Admonition from "@theme/Admonition"; - # Features
@@ -14,13 +8,14 @@ import Admonition from "@theme/Admonition";
{" "} +
@@ -46,14 +41,12 @@ The Code button shows snippets to use your flow as a Python object or an API. **Python Code** -Through the Langflow package, you can load a flow from a JSON file and use it as a LangChain object. +Through the Langflow package, you can run your flow from a JSON file. The example below shows how to run a flow from a JSON file. -```py -from langflow import load_flow_from_json +```python +from langflow.load import run_flow_from_json -flow = load_flow_from_json("path/to/flow.json") -# Now you can use it like any chain -flow("Hey, have you heard of Langflow?") +results = run_flow_from_json("path/to/flow.json", input_value="Hello, World!") ``` **API** @@ -67,3 +60,9 @@ The example below shows a Python script making a POST request to a local API end >
+ +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; +import ReactPlayer from "react-player"; +import Admonition from "@theme/Admonition"; diff --git a/docs/docs/guidelines/login.mdx b/docs/docs/guidelines/login.mdx index fde7cd09a..1d5a1d031 100644 --- a/docs/docs/guidelines/login.mdx +++ b/docs/docs/guidelines/login.mdx @@ -105,7 +105,7 @@ Users can change their profile settings by clicking on the profile icon in the t light: useBaseUrl("img/my-account.png"), dark: useBaseUrl("img/my-account.png"), }} - style={{ width: "50%", maxWidth: "600px", margin: "0 auto" }} + style={{ width: "50%", maxWidth: "600px", margin: "20px auto" }} /> By clicking on **Profile Settings**, the user is taken to the profile settings page, where they can change their password and their profile picture. @@ -116,10 +116,11 @@ By clicking on **Profile Settings**, the user is taken to the profile settings p light: useBaseUrl("img/profile-settings.png"), dark: useBaseUrl("img/profile-settings.png"), }} - style={{ maxWidth: "600px", margin: "0 auto" }} + style={{ maxWidth: "600px", margin: "20px auto" }} /> -By clicking on **Admin Page**, the superuser is taken to the admin page, where they can manage users and groups. +By clicking on **Admin Page**, the superuser is taken to the admin page, where they +can manage users and groups. - This implementation is still in development. Contributions are welcome! - - -The Async API is an implementation of the Langflow API that uses [Celery](https://docs.celeryproject.org/en/stable/) -to run the tasks asynchronously, using a message broker to send and receive messages, a result backend to store the results and a cache to store the task states and session data. - -### Configuration - -The folder _`./deploy`_ in the [Github repository](https://github.com/logspace-ai/langflow) contains a _`.env.example`_ file that can be used to configure a Langflow deployment. -The file contains the variables required to configure a Celery worker queue, Redis cache and result backend and a RabbitMQ message broker. - -To set it up locally you can copy the file to _`.env`_ and run the following command: - -```bash -docker compose up -d -``` - -This will set up the following containers: - -- Langflow API -- Celery worker -- RabbitMQ message broker -- Redis cache -- PostgreSQL database -- PGAdmin -- Flower -- Traefik -- Grafana -- Prometheus - -### Testing - -To run the tests for the Async API, you can run the following command: - -```bash -docker compose -f docker-compose.with_tests.yml up --exit-code-from tests tests result_backend broker celeryworker db --build -``` diff --git a/docs/docs/guides/superuser.mdx b/docs/docs/guides/superuser.mdx deleted file mode 100644 index 04e0f96af..000000000 --- a/docs/docs/guides/superuser.mdx +++ /dev/null @@ -1,7 +0,0 @@ -import ThemedImage from "@theme/ThemedImage"; -import useBaseUrl from "@docusaurus/useBaseUrl"; -import ZoomableImage from "/src/theme/ZoomableImage.js"; -import ReactPlayer from "react-player"; - -Now, we need to explain what are the permissions the superuser gets. Once logged in, they can activate new users, -edit them, diff --git a/docs/docs/index.mdx b/docs/docs/index.mdx index 840f10f10..9357a8fdf 100644 --- a/docs/docs/index.mdx +++ b/docs/docs/index.mdx @@ -1,11 +1,13 @@ -# 👋 Welcome to Langflow - -Langflow is an easy way to create flows. The drag-and-drop feature allows quick and effortless experimentation, while the built-in chat interface facilitates real-time interaction. It provides options to edit prompt parameters, create chains and agents, track thought processes, and export flows. - import ThemedImage from "@theme/ThemedImage"; import useBaseUrl from "@docusaurus/useBaseUrl"; import ZoomableImage from "/src/theme/ZoomableImage.js"; +# 👋 Welcome to Langflow + +Langflow is an easy way to build from simple to complex AI applications. It is a low-code platform that allows you to integrate AI into everything you do. + +{" "} + {" "} + +## 🚀 First steps + +## Installation + +Make sure you have **Python 3.10** installed on your system. + +You can install **Langflow** with [pipx](https://pipx.pypa.io/stable/installation/) or with pip. + +Pipx can fetch the missing Python version for you, but you can also install it manually. + +```bash +pip install langflow -U +# or +pipx install langflow --python python3.10 --fetch-missing-python +``` + +Or you can install a pre-release version using: + +```bash +pip install langflow --pre --force-reinstall +# or +pipx install langflow --python python3.10 --fetch-missing-python --pip-args="--pre --force-reinstall" +``` + +We recommend using --force-reinstall to ensure you have the latest version of Langflow and its dependencies. + +### ⛓️ Running Langflow + +Langflow can be run in a variety of ways, including using the command-line interface (CLI) or HuggingFace Spaces. + +```bash +langflow run # or langflow --help +``` + +#### 🤗 HuggingFace Spaces + +Hugging Face provides a great alternative for running Langflow in their Spaces environment. This means you can run Langflow without any local installation required. + +The first step is to go to the [Langflow Space](https://huggingface.co/spaces/Logspace/Langflow?duplicate=true). + +Remember to use a Chromium-based browser for the best experience. You'll be presented with the following screen: + + + +From here, just name your Space, define the visibility (Public or Private), and click on `Duplicate Space` to start the installation process. When that is done, you'll be redirected to the Space's main page to start using Langflow right away! + +Once you get Langflow running, click on New Project in the top right corner of the screen. Langflow provides a range of example flows to help you get started. + +To quickly try one of them, open a starter example, set up your API keys and click ⚡ Run, on the bottom right corner of the canvas. This will open up Langflow's Interaction Panel with the chat console, text inputs, and outputs. + +### 🖥️ Command Line Interface (CLI) + +Langflow provides a command-line interface (CLI) for easy management and configuration. + +#### Usage + +You can run the Langflow using the following command: + +```bash +langflow run [OPTIONS] +``` + +Find more information about the available options by running: + +```bash +langflow --help +``` diff --git a/src/backend/langflow/components/embeddings/__init__.py b/docs/docs/migration/api.mdx similarity index 100% rename from src/backend/langflow/components/embeddings/__init__.py rename to docs/docs/migration/api.mdx diff --git a/docs/docs/migration/compatibility.mdx b/docs/docs/migration/compatibility.mdx new file mode 100644 index 000000000..8223bcceb --- /dev/null +++ b/docs/docs/migration/compatibility.mdx @@ -0,0 +1,44 @@ +import Admonition from '@theme/Admonition'; + +# Compatibility with Previous Versions + + +## TLDR; + +- You'll need to add a few components to your flow to make it compatible with the new version of Langflow. +- Add a Runnable Executor, connect it to the last component (a Chain or an Agent) in your flow, and connect a Chat Input and a Chat Output to the Runnable Executor. This should work *most of the time*. +- You might also need to update the Chain or Agent component to the latest version. +- Most Components will work as they are, but you'll need to add an Input and an Output to your flow. +- You can use the Runnable Executor to run a LangChain runnable (which is the output of many components before 1.0) +- We need your feedback on this, so please let us know how it goes and what you think. + +## Introduction + +Langflow now works best with a flow that has an Input and an Output and that is mostly what you'll need to add to your existing flows. + +Hopefully, you'll find that even though you still can work with your current flows, updating all your components to the new version of Langflow will be worth it. + +We've tried to make it as easy as possible for you to adapt your existing flows to work seamlessly in the new version of Langflow. + +## How to Adapt Your Existing Flows + + +The steps to take are few but not always simple. Here's how you can adapt your existing flows to work seamlessly in the new version of Langflow: + + +

**Caution:**

+

While this should work most of the time, it might not work for all flows. You might need to update the Chain or Agent component to the latest version. Please let us know if you encounter any issues.

+
+ +1. **Check if your flow ends with a Chain or Agent component**. + - If it does not, it *should* work as it is because it probably was not a chat flow. +2. **Add a Runnable Executor**. + - Add a Runnable Executor to the end of your flow. + - Connect the last component (a Chain or an Agent) in your flow to the Runnable Executor. +3. **Add a Chat Input and a Chat Output**. + - Add a Chat Input and a Chat Output to your flow. + - Connect the Chat Input to the Runnable Executor. + - Connect the Chat Output to the Runnable Executor. + +{/* Add picture of the flow */} + diff --git a/src/backend/langflow/components/llms/__init__.py b/docs/docs/migration/component-status-and-data-passing.mdx similarity index 100% rename from src/backend/langflow/components/llms/__init__.py rename to docs/docs/migration/component-status-and-data-passing.mdx diff --git a/src/backend/langflow/components/retrievers/__init__.py b/docs/docs/migration/connecting-output-components.mdx similarity index 100% rename from src/backend/langflow/components/retrievers/__init__.py rename to docs/docs/migration/connecting-output-components.mdx diff --git a/src/backend/langflow/components/textsplitters/__init__.py b/docs/docs/migration/custom-component.mdx similarity index 100% rename from src/backend/langflow/components/textsplitters/__init__.py rename to docs/docs/migration/custom-component.mdx diff --git a/src/backend/langflow/components/toolkits/__init__.py b/docs/docs/migration/experimental-components.mdx similarity index 100% rename from src/backend/langflow/components/toolkits/__init__.py rename to docs/docs/migration/experimental-components.mdx diff --git a/src/backend/langflow/components/tools/__init__.py b/docs/docs/migration/flow-of-data.mdx similarity index 100% rename from src/backend/langflow/components/tools/__init__.py rename to docs/docs/migration/flow-of-data.mdx diff --git a/docs/docs/migration/global-variables.mdx b/docs/docs/migration/global-variables.mdx new file mode 100644 index 000000000..ce6d15a5f --- /dev/null +++ b/docs/docs/migration/global-variables.mdx @@ -0,0 +1,65 @@ +import ZoomableImage from "/src/theme/ZoomableImage.js"; +import Admonition from "@theme/Admonition"; + +# Global Variables + +Global Variables are a really useful feature of Langflow. +They allow you to define reusable variables that can be accessed from any Text field in your project. + +The first thing you need to do is find a **Text field** in a Component, so let's talk about what a Text field is. + +## Text Fields + +Text fields are the fields in a Component where you can write text but that does not allow you to open a Text Area. + +The easiest way to find fields that are Text fields, though, is to look for fields that have a 🌐 button. + + + +## Creating a Global Variable + +To create a Global Variable, you need to click on the 🌐 button in a Text field and that will open a dropdown showing your currently available variables and at the end of it **+ Add New Variable**. + + + +Click on **+ Add New Variable** and a window will open where you can define your new Global Variable. + +In it, you can define the **Name** of the variable, the optional **Type** of the variable, and the **Value** of the variable. + +The **Name** is the name that you will use to refer to the variable in your Text fields. + +The **Type** is optional for now but will be used in the future to allow for more advanced features. + +The **Value** is the value that the variable will have. +{/* say that all variables are encrypted */} + + + All Global Variables are encrypted and cannot be accessed by anyone but you. + + + + +After you have defined your variable, click on **Save Variable** and your variable will be created. + +After that, once you click on the 🌐 button in a Text field, you will see your new variable in the dropdown. diff --git a/docs/docs/migration/inputs-and-outputs.mdx b/docs/docs/migration/inputs-and-outputs.mdx new file mode 100644 index 000000000..5db3f3af2 --- /dev/null +++ b/docs/docs/migration/inputs-and-outputs.mdx @@ -0,0 +1,36 @@ +# Inputs and Outputs + +TL;DR: Inputs and Outputs are a category of components that are used to define where data comes in and out of your flow. They also +dynamically change the Interaction Panel and can be renamed to make it easier to build and maintain your flows. + +## Introduction + +Langflow 1.0 introduces new categories of components called Inputs and Outputs. They are used to make it easier to understand and interact with your flows. + +Let's start with what they have in common: + +- Components in these categories connect to components that have Text or Record inputs or outputs. Some can connect to both but you have to pick what type of data you want to output or input. +- They can be renamed to help you identify them more easily in the Interaction Panel and while using the API. +- They dynamically change the Interaction Panel to make it easier to understand and interact with your flows. + +Native Langflow Components were created to be powerful tools that work around Langflow's features. They are designed to be easy to use and understand, and to help you build your flows faster. + +Let's dive into Inputs and Outputs. + +## Inputs + +Inputs are components that are used to define where data comes into your flow. They can be used to receive data from the user, from a database, or from any other source that can be converted to Text or Record. + +The difference between Chat Input and other Input components is the format of the output, the number of configurable fields, and the way they are displayed in the Interaction Panel. + +Chat Input components can output Text or Record. When you want to pass the sender name, or sender to the next component, you can use the Record output, and when you want to pass the message only you can use the Text output. This is useful when saving the message to a database or a memory system like Zep. + +You can find out more about it and the other Inputs [here](../components/inputs). + +## Outputs + +Outputs are components that are used to define where data comes out of your flow. They can be used to send data to the user, to the Interaction Panel, or to define how the data will be displayed in the Interaction Panel. + +The Chat Output works similarly to the Chat Input but does not have a field that allows for written input. It is used as an Output definition and can be used to send data to the user. + +You can find out more about it and the other Outputs [here](../components/outputs). diff --git a/src/backend/langflow/components/vectorstores/__init__.py b/docs/docs/migration/multiple-flows.mdx similarity index 100% rename from src/backend/langflow/components/vectorstores/__init__.py rename to docs/docs/migration/multiple-flows.mdx diff --git a/src/backend/langflow/core/__init__.py b/docs/docs/migration/new-categories-and-components.mdx similarity index 100% rename from src/backend/langflow/core/__init__.py rename to docs/docs/migration/new-categories-and-components.mdx diff --git a/src/backend/langflow/custom/__init__.py b/docs/docs/migration/passing-tweaks-and-inputs.mdx similarity index 100% rename from src/backend/langflow/custom/__init__.py rename to docs/docs/migration/passing-tweaks-and-inputs.mdx diff --git a/src/backend/langflow/graph/edge/__init__.py b/docs/docs/migration/renaming-and-editing-components.mdx similarity index 100% rename from src/backend/langflow/graph/edge/__init__.py rename to docs/docs/migration/renaming-and-editing-components.mdx diff --git a/src/backend/langflow/graph/graph/__init__.py b/docs/docs/migration/sidebar-and-interaction-panel.mdx similarity index 100% rename from src/backend/langflow/graph/graph/__init__.py rename to docs/docs/migration/sidebar-and-interaction-panel.mdx diff --git a/src/backend/langflow/graph/vertex/__init__.py b/docs/docs/migration/state-management.mdx similarity index 100% rename from src/backend/langflow/graph/vertex/__init__.py rename to docs/docs/migration/state-management.mdx diff --git a/src/backend/langflow/interface/__init__.py b/docs/docs/migration/supported-frameworks.mdx similarity index 100% rename from src/backend/langflow/interface/__init__.py rename to docs/docs/migration/supported-frameworks.mdx diff --git a/docs/docs/migration/text-and-record.mdx b/docs/docs/migration/text-and-record.mdx new file mode 100644 index 000000000..cdfb26b6c --- /dev/null +++ b/docs/docs/migration/text-and-record.mdx @@ -0,0 +1,45 @@ +# Text and Record + +In Langflow 1.0 we added two main input and output types: Text and Record. Text is a simple string input and output type, while Record is a structure very similar to a dictionary in Python. It is a key-value pair data structure. + +We've created a few components to help you work with these types. Let's see how a few of them work. + +### Records To Text + +This is a Component that takes in Records and outputs a Text. It does this using a template string and concatenating the values of the Record, one per line. + +If we have the following Records: + +```json +{ + "sender_name": "Alice", + "message": "Hello!" +} +{ + "sender_name": "John", + "message": "Hi!" +} +``` + +And the template string is: _`{sender_name}: {message}`_ + +``` +Alice: Hello! +John: Hi! +``` + +### Create Record + +This Component allows you to create a Record from a number of inputs. You can add as many key-value pairs as you want (as long as it is less than 15 😅). Once you've picked that number you'll need to write the name of the Key and can pass Text values from other components to it. + +### Documents To Records + +This Component takes in a [LangChain](https://langchain.com) Document and outputs a Record. It does this by extracting the _`page_content`_ and the _`metadata`_ from the Document and adding them to the Record as _`text`_ and _`data`_ respectively. + +## Why is this useful? + +The idea was to create a unified way to work with complex data in Langflow, and to make it easier to work with data that is not just a simple string. This way you can create more complex workflows and use the data in more ways. + +## What's next? + +We are planning to integrate an array of modalities to Langflow, such as images, audio, and video. This will allow you to create even more complex workflows and use cases. Stay tuned for more updates! 🚀 diff --git a/docs/docs/whats-new/a-new-chapter-langflow.mdx b/docs/docs/whats-new/a-new-chapter-langflow.mdx new file mode 100644 index 000000000..b312dc198 --- /dev/null +++ b/docs/docs/whats-new/a-new-chapter-langflow.mdx @@ -0,0 +1,96 @@ +# A new chapter for Langflow + +# First things first + +Thank you all for being part of the Langflow community. The journey so far has been amazing and we are happy to have you with us. + +We have some exciting news to share with you. Langflow is changing, and we want to tell you all about it. + +## Where have we been? + +We spent the last few months working on a new version of Langflow. We wanted to make it more powerful, more flexible, and easier to use. +We're moving from version 0.6 straight to 1.0 (preview). This is a big change, and we want to explain why we're doing it and what it means for you. + +## Why? + +In the past year, we learned a lot from the community and our users. We saw the potential of Langflow and the need for a more powerful and flexible tool for building conversational AI applications (and beyond). +We realized that Langflow was hiding things from you that would really help you build better and more complex conversational AI applications. So we decided to make a big change. + +## The only way to go is forward + +From all the people we talked to, we learned that the most important thing for (most of) them is to have a tool that is easy to use, but also powerful and controllable. They also told us that Langflow's transparency could be improved. + +In those points, we saw an opportunity to make Langflow much more powerful and flexible, while also making it easier to use and understand. + +One key change you'll notice is that projects now require you to define Inputs and Outputs. +This is a big change, but it's also a big improvement. +It allows you to define the structure of your conversation and the data that flows through it. +This makes it easier to understand and control your conversation. + +This change comes with a new way of visualizing your projects. Before 1.0 you would connect Components to ultimately build one final Component that was processed behind the scenes. +Now, each step of the process is defined by you, is visible on the canvas, and can be monitored and controlled by you. This makes it so that Composition is now just another way of building in Langflow. **Now data flows through your project more transparently**. + +The caveat is existing projects may need some new Components to get them back to their full functionality. +[We've made this as easy as possible](../migration/compatibility), and there will be improvements to it as we get feedback in our Discord server and on GitHub. + +## Custom Interactions + +The moment we decided to make this change, we saw the potential to make Langflow even more yours. +By having a clear definition of Inputs and Outputs, we could build the experience around that which led us to create the **Interaction Panel**. + +When building a project testing and debugging is crucial. The Interaction Panel is a tool that changes dynamically based on the Inputs and Outputs you defined in your project. + +For example, let's say you are building a simple RAG application. Generally, you have an Input, some references that come from a Vector Store Search, a Prompt and the answer. +Now, you could plug the output of your Prompt into a [Text Output](../components/outputs#Text-Output), rename that to "Prompt Result" and see the output of your Prompt in the Interaction Panel. + +{/* Add image here of the described above */} + +This is just one example of how the Interaction Panel can help you build and debug your projects. + +We have many planned features for the Interaction Panel, and we're excited to see how you use it and what you think of it. + +## An easier start + +The experience for the first-time user is also something we wanted to improve. + +Meet the new and improved **New Project** screen. It's now easier to start a new project, and you can choose from a list of starter projects to get you started. + +{/* Add new project image */} + +We wanted to create start projects that would help you learn about new features and also give you a head start on your projects. + +For now, we have: + +- **[Basic Prompting (Hello, world!)](/getting-started/basic-prompting)**: A simple flow that shows you how to use the Prompt Component and how to talk like a pirate. +- **[Vector Store RAG](/getting-started/rag-with-astradb)**: A flow that shows you how to ingest data into a Vector Store and then use it to run a RAG application. +- **[Memory Chatbot](/getting-started/memory-chatbot)**: This one shows you how to create a simple chatbot that can remember things about the user. +- **[Document QA](/getting-started/document-qa)**: This flow shows you how to build a simple flow that helps you get answers about a document. +- **[Blog Writer](/getting-started/blog-writer)**: Shows you how you can expand on the Prompt variables and be creative about what inputs you add to it. + +As always, your feedback is invaluable, so please let us know what you think of the new starter projects and what you would like to see in the future. + +## Less is more + +We added many new Components to Langflow and updated some of the existing ones, and we will deprecate some of them. + +The idea is that Langflow has evolved, and we want to make sure that the Components you use are the best they can be. +Some of them don't work well with the others, and some of them are just not needed anymore. + +We are working on a list of Components that will be deprecated. +In the preview stages of 1.0, we will have a smaller list of Components so that we make sure that the ones we have are the best they can be. +Regardless, community feedback is very important in this matter, so please let us know what you think of the new Components and which ones you miss. + +We are aiming at having a more stable and reliable set of Components that helps you get quickly to useful results. +This also means that your contributions in the [Langflow Store](https://langflow.store) and throughout the community are more important than ever. + +## What's next? + +Langflow went through a big change, and we are excited to see how you use it and what you think of it. + +We plan to add more types of Input and Output like Image and Audio, and we also plan to add more Components to help you build more complex projects. + +We also have some experimental features like a State Management System (so cool!) and a new way of building Grouped Components that we are excited to show you. + +## Reach out + +One last time, we want to thank you for being part of the Langflow community. Your feedback is invaluable, and we want to hear from you. diff --git a/docs/docs/whats-new/customization-control.mdx b/docs/docs/whats-new/customization-control.mdx new file mode 100644 index 000000000..11f23f53c --- /dev/null +++ b/docs/docs/whats-new/customization-control.mdx @@ -0,0 +1 @@ +# A New Customization and Control \ No newline at end of file diff --git a/docs/docs/whats-new/debugging-reimagined.mdx b/docs/docs/whats-new/debugging-reimagined.mdx new file mode 100644 index 000000000..d30234088 --- /dev/null +++ b/docs/docs/whats-new/debugging-reimagined.mdx @@ -0,0 +1 @@ +# Debugging Reimagined \ No newline at end of file diff --git a/docs/docs/whats-new/migrating-to-one-point-zero.mdx b/docs/docs/whats-new/migrating-to-one-point-zero.mdx new file mode 100644 index 000000000..45bae6084 --- /dev/null +++ b/docs/docs/whats-new/migrating-to-one-point-zero.mdx @@ -0,0 +1,125 @@ +# Migrating to Langflow 1.0: A Guide + +Langflow 1.0 is a significant update that brings many exciting changes and improvements to the platform. +This guide will walk you through the key improvements and help you migrate your existing projects to the new version. + +If you have any questions or need assistance during the migration process, please don't hesitate to reach out to in our [Discord](https://discord.gg/wZSWQaukgJ) or [GitHub](https://github.com/logspace-ai/langflow/issues) community. + +We have a special channel in our Discord server dedicated to Langflow 1.0 migration, where you can ask questions, share your experiences, and get help from the community. + +## TLDR; + +- Inputs and Outputs of Components have changed +- We've surfaced steps that were previously run in the background +- Continued support for LangChain and new support for multiple frameworks +- Redesigned sidebar and customizable interaction panel +- New Native Categories and Components +- Improved user experience with Text and Record modes +- CustomComponent for all components +- Compatibility with previous versions using Runnable Executor +- Multiple flows in the canvas +- Improved component status +- Ability to connect Output components to any other Component +- Rename and edit component descriptions +- Pass tweaks and inputs in the API using Display Name +- Global Variables for Text Fields +- Experimental components like SubFlow and Flow as Tool +- Experimental State Management system with Notify and Listen components + +## Inputs and Outputs of Components + +Langflow 1.0 introduces adds the concept of Inputs and Outputs to flows, allowing a clear definition of the data flow between components. Discover how to use Inputs and Outputs to pass data between components and create more dynamic flows. + +[Learn more about Inputs and Outputs of Components](../migration/inputs-and-outputs) + +## To Compose or Not to Compose: the choice is yours + +Even though composition is still possible in Langflow 1.0, the new standard is getting data moving through the flow. This allows for more flexibility and control over the data flow in your projects. + +We will create guides on how to interweave LangChain components with our Core components soon. + +## Continued Support for LangChain and Multiple Frameworks + +Langflow 1.0 continues to support LangChain while also introducing support for multiple frameworks. This is another important boon that adding the paradigm of data flow brings to the table. Find out how to leverage the power of different frameworks in your projects. + +[Learn more about Supported Frameworks](../migration/supported-frameworks) + +## Sidebar Redesign and Customizable Interaction Panel + +We've expanded on the chat experience by creating a customizable interaction panel that allows you to design a panel that fits your needs and interact with it. The sidebar has also been redesigned to provide a more intuitive and user-friendly experience. Explore the new sidebar and interaction panel features to enhance your workflow. + +[Learn more about some of the UI updates](../migration/sidebar-and-interaction-panel) + +## New Native Categories and Components + +Langflow 1.0 introduces many new native categories, including Inputs, Outputs, Helpers, Experimental, Models, and more. Discover the new components available, such as Chat Input, Prompt, Files, API Request, and others. + +[Learn more about New Categories and Components](../migration/new-categories-and-components) + +## New Way of Using Langflow: Text and Record (and more to come) + +With the introduction of Text and Record types connections between Components are more intuitive and easier to understand. This is the first step in a series of improvements to the way you interact with Langflow. Learn how to use Text, and Record and how they help you build better flows. + +[Learn more about Text and Record](../migration/text-and-record) + +## CustomComponent for All Components + +Almost all components in Langflow 1.0 are now CustomComponents, allowing you to check and modify the code of each component. Discover how to leverage this feature to customize your components to your specific needs. + +[Learn more about CustomComponent](../migration/custom-component) + +## Compatibility with Previous Versions + +To use flows built in previous versions of Langflow, you can utilize the experimental component Runnable Executor along with an Input and Output. **We'd love your feedback on this**. Learn how to adapt your existing flows to work seamlessly in the new version of Langflow. + +[Learn more about Compatibility with Previous Versions](../migration/compatibility) + +## Multiple Flows in the Canvas + +Langflow 1.0 allows you to have more than one flow in the canvas and run them separately. Discover how to create and manage multiple flows within a single project. + +[Learn more about Multiple Flows](../migration/multiple-flows) + +## Improved Component Status + +Each component now displays its status more clearly, allowing you to quickly identify any issues or errors. Explore how to use the new component status feature to troubleshoot and optimize your flows. + +[Learn more about Component Status](../migration/component-status-and-data-passing) + +## Connecting Output Components + +You can now connect Output components to any other component (that has a Text output), providing a better understanding of the data flow. Explore the possibilities of connecting Output components and how it enhances your flow's functionality. + +[Learn more about Connecting Output Components](../migration/connecting-output-components) + +## Renaming and Editing Component Descriptions + +Langflow 1.0 allows you to rename and edit the description of each component, making it easier to understand and interact with the flow. Learn how to customize your component names and descriptions for improved clarity. + +[Learn more about Renaming and Editing Components](../migration/renaming-and-editing-components) + +## Passing Tweaks and Inputs in the API + +Things got a whole lot easier. You can now pass tweaks and inputs in the API by referencing the Display Name of the component. Discover how to leverage this feature to dynamically control your flow's behavior. + +[Learn more about Passing Tweaks and Inputs](../migration/passing-tweaks-and-inputs) + +## Global Variables for Text Fields + +Global Variables can be used in any Text Field across your projects. Learn how to define and utilize Global Variables to streamline your workflow. + +[Learn more about Global Variables](../migration/global-variables) + +## Experimental Components + +Explore the experimental components available in Langflow 1.0, such as SubFlow, which allows you to load a flow as a component dynamically, and Flow as Tool, which enables you to use a flow as a tool for an Agent. + +[Learn more about Experimental Components](../migration/experimental-components) + +## Experimental State Management System + +We are experimenting with a State Management system for flows that allows components to trigger other components and pass messages between them using the Notify and Listen components. Discover how to leverage this system to create more dynamic and interactive flows. + +[Learn more about State Management](../migration/state-management) + +We hope this guide helps you navigate the changes and improvements in Langflow 1.0. If you have any questions or need further assistance, please don't hesitate to reach out to us in our [Discord](https://discord.gg/wZSWQaukgJ). \ No newline at end of file diff --git a/docs/docs/whats-new/simplification-standardization.mdx b/docs/docs/whats-new/simplification-standardization.mdx new file mode 100644 index 000000000..f7e3115bc --- /dev/null +++ b/docs/docs/whats-new/simplification-standardization.mdx @@ -0,0 +1 @@ +# Simplification Through Standardization \ No newline at end of file diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 430aebcb0..979953918 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -14,6 +14,7 @@ module.exports = { organizationName: "logspace-ai", projectName: "langflow", trailingSlash: false, + staticDirectories: ["static"], customFields: { mendableAnonKey: process.env.MENDABLE_ANON_KEY, }, @@ -42,6 +43,10 @@ module.exports = { path: "docs", // sidebarPath: 'sidebars.js', }, + gtag: { + trackingID: 'G-XHC7G628ZP', + anonymizeIP: true, + }, theme: { customCss: [ require.resolve("@code-hike/mdx/styles.css"), diff --git a/docs/package-lock.json b/docs/package-lock.json index 6742b89e7..c91a4ec06 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -10,11 +10,12 @@ "dependencies": { "@babel/preset-react": "^7.22.3", "@code-hike/mdx": "^0.9.0", - "@docusaurus/core": "3.0.1", - "@docusaurus/plugin-ideal-image": "^3.0.1", - "@docusaurus/preset-classic": "3.0.1", - "@docusaurus/theme-classic": "^3.0.1", - "@docusaurus/theme-search-algolia": "^3.0.1", + "@docusaurus/core": "^3.2.0", + "@docusaurus/plugin-google-gtag": "^3.2.0", + "@docusaurus/plugin-ideal-image": "^3.2.0", + "@docusaurus/preset-classic": "^3.2.0", + "@docusaurus/theme-classic": "^3.2.0", + "@docusaurus/theme-search-algolia": "^3.2.0", "@mdx-js/react": "^2.3.0", "@mendable/search": "^0.0.154", "@pbe/react-yandex-maps": "^1.2.4", @@ -41,7 +42,7 @@ "tailwindcss": "^3.3.2" }, "devDependencies": { - "@docusaurus/module-type-aliases": "2.4.1", + "@docusaurus/module-type-aliases": "^3.2.0", "css-loader": "^6.8.1", "docusaurus-node-polyfills": "^1.0.0", "node-sass": "^9.0.0", @@ -94,74 +95,74 @@ } }, "node_modules/@algolia/cache-browser-local-storage": { - "version": "4.22.0", - "resolved": "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.22.0.tgz", - "integrity": "sha512-uZ1uZMLDZb4qODLfTSNHxSi4fH9RdrQf7DXEzW01dS8XK7QFtFh29N5NGKa9S+Yudf1vUMIF+/RiL4i/J0pWlQ==", + "version": "4.23.2", + "resolved": "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.23.2.tgz", + "integrity": "sha512-PvRQdCmtiU22dw9ZcTJkrVKgNBVAxKgD0/cfiqyxhA5+PHzA2WDt6jOmZ9QASkeM2BpyzClJb/Wr1yt2/t78Kw==", "dependencies": { - "@algolia/cache-common": "4.22.0" + "@algolia/cache-common": "4.23.2" } }, "node_modules/@algolia/cache-common": { - "version": "4.22.0", - "resolved": "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.22.0.tgz", - "integrity": "sha512-TPwUMlIGPN16eW67qamNQUmxNiGHg/WBqWcrOoCddhqNTqGDPVqmgfaM85LPbt24t3r1z0zEz/tdsmuq3Q6oaA==" + "version": "4.23.2", + "resolved": "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.23.2.tgz", + "integrity": "sha512-OUK/6mqr6CQWxzl/QY0/mwhlGvS6fMtvEPyn/7AHUx96NjqDA4X4+Ju7aXFQKh+m3jW9VPB0B9xvEQgyAnRPNw==" }, "node_modules/@algolia/cache-in-memory": { - "version": "4.22.0", - "resolved": "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.22.0.tgz", - "integrity": "sha512-kf4Cio9NpPjzp1+uXQgL4jsMDeck7MP89BYThSvXSjf2A6qV/0KeqQf90TL2ECS02ovLOBXkk98P7qVarM+zGA==", + "version": "4.23.2", + "resolved": "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.23.2.tgz", + "integrity": "sha512-rfbi/SnhEa3MmlqQvgYz/9NNJ156NkU6xFxjbxBtLWnHbpj+qnlMoKd+amoiacHRITpajg6zYbLM9dnaD3Bczw==", "dependencies": { - "@algolia/cache-common": "4.22.0" + "@algolia/cache-common": "4.23.2" } }, "node_modules/@algolia/client-account": { - "version": "4.22.0", - "resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.22.0.tgz", - "integrity": "sha512-Bjb5UXpWmJT+yGWiqAJL0prkENyEZTBzdC+N1vBuHjwIJcjLMjPB6j1hNBRbT12Lmwi55uzqeMIKS69w+0aPzA==", + "version": "4.23.2", + "resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.23.2.tgz", + "integrity": "sha512-VbrOCLIN/5I7iIdskSoSw3uOUPF516k4SjDD4Qz3BFwa3of7D9A0lzBMAvQEJJEPHWdVraBJlGgdJq/ttmquJQ==", "dependencies": { - "@algolia/client-common": "4.22.0", - "@algolia/client-search": "4.22.0", - "@algolia/transporter": "4.22.0" + "@algolia/client-common": "4.23.2", + "@algolia/client-search": "4.23.2", + "@algolia/transporter": "4.23.2" } }, "node_modules/@algolia/client-analytics": { - "version": "4.22.0", - "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.22.0.tgz", - "integrity": "sha512-os2K+kHUcwwRa4ArFl5p/3YbF9lN3TLOPkbXXXxOvDpqFh62n9IRZuzfxpHxMPKAQS3Et1s0BkKavnNP02E9Hg==", + "version": "4.23.2", + "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.23.2.tgz", + "integrity": "sha512-lLj7irsAztGhMoEx/SwKd1cwLY6Daf1Q5f2AOsZacpppSvuFvuBrmkzT7pap1OD/OePjLKxicJS8wNA0+zKtuw==", "dependencies": { - "@algolia/client-common": "4.22.0", - "@algolia/client-search": "4.22.0", - "@algolia/requester-common": "4.22.0", - "@algolia/transporter": "4.22.0" + "@algolia/client-common": "4.23.2", + "@algolia/client-search": "4.23.2", + "@algolia/requester-common": "4.23.2", + "@algolia/transporter": "4.23.2" } }, "node_modules/@algolia/client-common": { - "version": "4.22.0", - "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.22.0.tgz", - "integrity": "sha512-BlbkF4qXVWuwTmYxVWvqtatCR3lzXwxx628p1wj1Q7QP2+LsTmGt1DiUYRuy9jG7iMsnlExby6kRMOOlbhv2Ag==", + "version": "4.23.2", + "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.23.2.tgz", + "integrity": "sha512-Q2K1FRJBern8kIfZ0EqPvUr3V29ICxCm/q42zInV+VJRjldAD9oTsMGwqUQ26GFMdFYmqkEfCbY4VGAiQhh22g==", "dependencies": { - "@algolia/requester-common": "4.22.0", - "@algolia/transporter": "4.22.0" + "@algolia/requester-common": "4.23.2", + "@algolia/transporter": "4.23.2" } }, "node_modules/@algolia/client-personalization": { - "version": "4.22.0", - "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-4.22.0.tgz", - "integrity": "sha512-pEOftCxeBdG5pL97WngOBi9w5Vxr5KCV2j2D+xMVZH8MuU/JX7CglDSDDb0ffQWYqcUN+40Ry+xtXEYaGXTGow==", + "version": "4.23.2", + "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-4.23.2.tgz", + "integrity": "sha512-vwPsgnCGhUcHhhQG5IM27z8q7dWrN9itjdvgA6uKf2e9r7vB+WXt4OocK0CeoYQt3OGEAExryzsB8DWqdMK5wg==", "dependencies": { - "@algolia/client-common": "4.22.0", - "@algolia/requester-common": "4.22.0", - "@algolia/transporter": "4.22.0" + "@algolia/client-common": "4.23.2", + "@algolia/requester-common": "4.23.2", + "@algolia/transporter": "4.23.2" } }, "node_modules/@algolia/client-search": { - "version": "4.22.0", - "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.22.0.tgz", - "integrity": "sha512-bn4qQiIdRPBGCwsNuuqB8rdHhGKKWIij9OqidM1UkQxnSG8yzxHdb7CujM30pvp5EnV7jTqDZRbxacbjYVW20Q==", + "version": "4.23.2", + "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.23.2.tgz", + "integrity": "sha512-CxSB29OVGSE7l/iyoHvamMonzq7Ev8lnk/OkzleODZ1iBcCs3JC/XgTIKzN/4RSTrJ9QybsnlrN/bYCGufo7qw==", "dependencies": { - "@algolia/client-common": "4.22.0", - "@algolia/requester-common": "4.22.0", - "@algolia/transporter": "4.22.0" + "@algolia/client-common": "4.23.2", + "@algolia/requester-common": "4.23.2", + "@algolia/transporter": "4.23.2" } }, "node_modules/@algolia/events": { @@ -170,47 +171,65 @@ "integrity": "sha512-FQzvOCgoFXAbf5Y6mYozw2aj5KCJoA3m4heImceldzPSMbdyS4atVjJzXKMsfX3wnZTFYwkkt8/z8UesLHlSBQ==" }, "node_modules/@algolia/logger-common": { - "version": "4.22.0", - "resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.22.0.tgz", - "integrity": "sha512-HMUQTID0ucxNCXs5d1eBJ5q/HuKg8rFVE/vOiLaM4Abfeq1YnTtGV3+rFEhOPWhRQxNDd+YHa4q864IMc0zHpQ==" + "version": "4.23.2", + "resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.23.2.tgz", + "integrity": "sha512-jGM49Q7626cXZ7qRAWXn0jDlzvoA1FvN4rKTi1g0hxKsTTSReyYk0i1ADWjChDPl3Q+nSDhJuosM2bBUAay7xw==" }, "node_modules/@algolia/logger-console": { - "version": "4.22.0", - "resolved": "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.22.0.tgz", - "integrity": "sha512-7JKb6hgcY64H7CRm3u6DRAiiEVXMvCJV5gRE672QFOUgDxo4aiDpfU61g6Uzy8NKjlEzHMmgG4e2fklELmPXhQ==", + "version": "4.23.2", + "resolved": "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.23.2.tgz", + "integrity": "sha512-oo+lnxxEmlhTBTFZ3fGz1O8PJ+G+8FiAoMY2Qo3Q4w23xocQev6KqDTA1JQAGPDxAewNA2VBwWOsVXeXFjrI/Q==", "dependencies": { - "@algolia/logger-common": "4.22.0" + "@algolia/logger-common": "4.23.2" + } + }, + "node_modules/@algolia/recommend": { + "version": "4.23.2", + "resolved": "https://registry.npmjs.org/@algolia/recommend/-/recommend-4.23.2.tgz", + "integrity": "sha512-Q75CjnzRCDzgIlgWfPnkLtrfF4t82JCirhalXkSSwe/c1GH5pWh4xUyDOR3KTMo+YxxX3zTlrL/FjHmUJEWEcg==", + "dependencies": { + "@algolia/cache-browser-local-storage": "4.23.2", + "@algolia/cache-common": "4.23.2", + "@algolia/cache-in-memory": "4.23.2", + "@algolia/client-common": "4.23.2", + "@algolia/client-search": "4.23.2", + "@algolia/logger-common": "4.23.2", + "@algolia/logger-console": "4.23.2", + "@algolia/requester-browser-xhr": "4.23.2", + "@algolia/requester-common": "4.23.2", + "@algolia/requester-node-http": "4.23.2", + "@algolia/transporter": "4.23.2" } }, "node_modules/@algolia/requester-browser-xhr": { - "version": "4.22.0", - "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.22.0.tgz", - "integrity": "sha512-BHfv1h7P9/SyvcDJDaRuIwDu2yrDLlXlYmjvaLZTtPw6Ok/ZVhBR55JqW832XN/Fsl6k3LjdkYHHR7xnsa5Wvg==", + "version": "4.23.2", + "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.23.2.tgz", + "integrity": "sha512-TO9wLlp8+rvW9LnIfyHsu8mNAMYrqNdQ0oLF6eTWFxXfxG3k8F/Bh7nFYGk2rFAYty4Fw4XUtrv/YjeNDtM5og==", "dependencies": { - "@algolia/requester-common": "4.22.0" + "@algolia/requester-common": "4.23.2" } }, "node_modules/@algolia/requester-common": { - "version": "4.22.0", - "resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.22.0.tgz", - "integrity": "sha512-Y9cEH/cKjIIZgzvI1aI0ARdtR/xRrOR13g5psCxkdhpgRN0Vcorx+zePhmAa4jdQNqexpxtkUdcKYugBzMZJgQ==" + "version": "4.23.2", + "resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.23.2.tgz", + "integrity": "sha512-3EfpBS0Hri0lGDB5H/BocLt7Vkop0bTTLVUBB844HH6tVycwShmsV6bDR7yXbQvFP1uNpgePRD3cdBCjeHmk6Q==" }, "node_modules/@algolia/requester-node-http": { - "version": "4.22.0", - "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.22.0.tgz", - "integrity": "sha512-8xHoGpxVhz3u2MYIieHIB6MsnX+vfd5PS4REgglejJ6lPigftRhTdBCToe6zbwq4p0anZXjjPDvNWMlgK2+xYA==", + "version": "4.23.2", + "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.23.2.tgz", + "integrity": "sha512-SVzgkZM/malo+2SB0NWDXpnT7nO5IZwuDTaaH6SjLeOHcya1o56LSWXk+3F3rNLz2GVH+I/rpYKiqmHhSOjerw==", "dependencies": { - "@algolia/requester-common": "4.22.0" + "@algolia/requester-common": "4.23.2" } }, "node_modules/@algolia/transporter": { - "version": "4.22.0", - "resolved": "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.22.0.tgz", - "integrity": "sha512-ieO1k8x2o77GNvOoC+vAkFKppydQSVfbjM3YrSjLmgywiBejPTvU1R1nEvG59JIIUvtSLrZsLGPkd6vL14zopA==", + "version": "4.23.2", + "resolved": "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.23.2.tgz", + "integrity": "sha512-GY3aGKBy+8AK4vZh8sfkatDciDVKad5rTY2S10Aefyjh7e7UGBP4zigf42qVXwU8VOPwi7l/L7OACGMOFcjB0Q==", "dependencies": { - "@algolia/cache-common": "4.22.0", - "@algolia/logger-common": "4.22.0", - "@algolia/requester-common": "4.22.0" + "@algolia/cache-common": "4.23.2", + "@algolia/logger-common": "4.23.2", + "@algolia/requester-common": "4.23.2" } }, "node_modules/@alloc/quick-lru": { @@ -2033,18 +2052,18 @@ } }, "node_modules/@docsearch/css": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-3.5.2.tgz", - "integrity": "sha512-SPiDHaWKQZpwR2siD0KQUwlStvIAnEyK6tAE2h2Wuoq8ue9skzhlyVQ1ddzOxX6khULnAALDiR/isSF3bnuciA==" + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-3.6.0.tgz", + "integrity": "sha512-+sbxb71sWre+PwDK7X2T8+bhS6clcVMLwBPznX45Qu6opJcgRjAp7gYSDzVFp187J+feSj5dNBN1mJoi6ckkUQ==" }, "node_modules/@docsearch/react": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-3.5.2.tgz", - "integrity": "sha512-9Ahcrs5z2jq/DcAvYtvlqEBHImbm4YJI8M9y0x6Tqg598P40HTEkX7hsMcIuThI+hTFxRGZ9hll0Wygm2yEjng==", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-3.6.0.tgz", + "integrity": "sha512-HUFut4ztcVNmqy9gp/wxNbC7pTOHhgVVkHVGCACTuLhUKUhKAF9KYHJtMiLUJxEqiFLQiuri1fWF8zqwM/cu1w==", "dependencies": { "@algolia/autocomplete-core": "1.9.3", "@algolia/autocomplete-preset-algolia": "1.9.3", - "@docsearch/css": "3.5.2", + "@docsearch/css": "3.6.0", "algoliasearch": "^4.19.1" }, "peerDependencies": { @@ -2069,9 +2088,9 @@ } }, "node_modules/@docusaurus/core": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-3.0.1.tgz", - "integrity": "sha512-CXrLpOnW+dJdSv8M5FAJ3JBwXtL6mhUWxFA8aS0ozK6jBG/wgxERk5uvH28fCeFxOGbAT9v1e9dOMo1X2IEVhQ==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/core/-/core-3.2.0.tgz", + "integrity": "sha512-WTO6vW4404nhTmK9NL+95nd13I1JveFwZ8iOBYxb4xt+N2S3KzY+mm+1YtWw2vV37FbYfH+w+KrlrRaWuy5Hzw==", "dependencies": { "@babel/core": "^7.23.3", "@babel/generator": "^7.23.3", @@ -2083,14 +2102,13 @@ "@babel/runtime": "^7.22.6", "@babel/runtime-corejs3": "^7.22.6", "@babel/traverse": "^7.22.8", - "@docusaurus/cssnano-preset": "3.0.1", - "@docusaurus/logger": "3.0.1", - "@docusaurus/mdx-loader": "3.0.1", + "@docusaurus/cssnano-preset": "3.2.0", + "@docusaurus/logger": "3.2.0", + "@docusaurus/mdx-loader": "3.2.0", "@docusaurus/react-loadable": "5.5.2", - "@docusaurus/utils": "3.0.1", - "@docusaurus/utils-common": "3.0.1", - "@docusaurus/utils-validation": "3.0.1", - "@slorber/static-site-generator-webpack-plugin": "^4.0.7", + "@docusaurus/utils": "3.2.0", + "@docusaurus/utils-common": "3.2.0", + "@docusaurus/utils-validation": "3.2.0", "@svgr/webpack": "^6.5.1", "autoprefixer": "^10.4.14", "babel-loader": "^9.1.3", @@ -2111,6 +2129,7 @@ "detect-port": "^1.5.1", "escape-html": "^1.0.3", "eta": "^2.2.0", + "eval": "^0.1.8", "file-loader": "^6.2.0", "fs-extra": "^11.1.1", "html-minifier-terser": "^7.2.0", @@ -2119,6 +2138,7 @@ "leven": "^3.1.0", "lodash": "^4.17.21", "mini-css-extract-plugin": "^2.7.6", + "p-map": "^4.0.0", "postcss": "^8.4.26", "postcss-loader": "^7.3.3", "prompts": "^2.4.2", @@ -2249,9 +2269,9 @@ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/@docusaurus/cssnano-preset": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-3.0.1.tgz", - "integrity": "sha512-wjuXzkHMW+ig4BD6Ya1Yevx9UJadO4smNZCEljqBoQfIQrQskTswBs7lZ8InHP7mCt273a/y/rm36EZhqJhknQ==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-3.2.0.tgz", + "integrity": "sha512-H88RXGUia7r/VF3XfyoA4kbwgpUZcKsObF6VvwBOP91EdArTf6lnHbJ/x8Ca79KS/zf98qaWyBGzW+5ez58Iyw==", "dependencies": { "cssnano-preset-advanced": "^5.3.10", "postcss": "^8.4.26", @@ -2263,9 +2283,9 @@ } }, "node_modules/@docusaurus/logger": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-3.0.1.tgz", - "integrity": "sha512-I5L6Nk8OJzkVA91O2uftmo71LBSxe1vmOn9AMR6JRCzYeEBrqneWMH02AqMvjJ2NpMiviO+t0CyPjyYV7nxCWQ==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-3.2.0.tgz", + "integrity": "sha512-Z1R1NcOGXZ8CkIJSvjvyxnuDDSlx/+1xlh20iVTw1DZRjonFmI3T3tTgk40YpXyWUYQpIgAoqqPMpuseMMdgRQ==", "dependencies": { "chalk": "^4.1.2", "tslib": "^2.6.0" @@ -2339,11 +2359,11 @@ } }, "node_modules/@docusaurus/lqip-loader": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/lqip-loader/-/lqip-loader-3.0.1.tgz", - "integrity": "sha512-hFSu8ltYo0ZnWBWmjMhSprOr6nNKG01YdMDxH/hahBfyaNDCkZU4o7mQNgUW845lvYdp6bhjyW31WJwBjOnLqw==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/lqip-loader/-/lqip-loader-3.2.0.tgz", + "integrity": "sha512-lmYT3fslGH3ibdXySSUgtd4E3B8sQ7xizlNhmTj5eiqSQdiRiD15rVH73RohK8h+yrbu2QUDHTDAH6j7O5e2Gg==", "dependencies": { - "@docusaurus/logger": "3.0.1", + "@docusaurus/logger": "3.2.0", "file-loader": "^6.2.0", "lodash": "^4.17.21", "sharp": "^0.32.3", @@ -2354,15 +2374,13 @@ } }, "node_modules/@docusaurus/mdx-loader": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-3.0.1.tgz", - "integrity": "sha512-ldnTmvnvlrONUq45oKESrpy+lXtbnTcTsFkOTIDswe5xx5iWJjt6eSa0f99ZaWlnm24mlojcIGoUWNCS53qVlQ==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-3.2.0.tgz", + "integrity": "sha512-JtkI5o6R/rJSr1Y23cHKz085aBJCvJw3AYHihJ7r+mBX+O8EuQIynG0e6/XpbSCpr7Ino0U50UtxaXcEbFwg9Q==", "dependencies": { - "@babel/parser": "^7.22.7", - "@babel/traverse": "^7.22.8", - "@docusaurus/logger": "3.0.1", - "@docusaurus/utils": "3.0.1", - "@docusaurus/utils-validation": "3.0.1", + "@docusaurus/logger": "3.2.0", + "@docusaurus/utils": "3.2.0", + "@docusaurus/utils-validation": "3.2.0", "@mdx-js/mdx": "^3.0.0", "@slorber/remark-comment": "^1.0.0", "escape-html": "^1.0.3", @@ -2394,13 +2412,12 @@ } }, "node_modules/@docusaurus/module-type-aliases": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-2.4.1.tgz", - "integrity": "sha512-gLBuIFM8Dp2XOCWffUDSjtxY7jQgKvYujt7Mx5s4FCTfoL5dN1EVbnrn+O2Wvh8b0a77D57qoIDY7ghgmatR1A==", - "dev": true, + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.2.0.tgz", + "integrity": "sha512-jRSp9YkvBwwNz6Xgy0RJPsnie+Ebb//gy7GdbkJ2pW2gvvlYKGib2+jSF0pfIzvyZLulfCynS1KQdvDKdSl8zQ==", "dependencies": { "@docusaurus/react-loadable": "5.5.2", - "@docusaurus/types": "2.4.1", + "@docusaurus/types": "3.2.0", "@types/history": "^4.7.11", "@types/react": "*", "@types/react-router-config": "*", @@ -2413,38 +2430,18 @@ "react-dom": "*" } }, - "node_modules/@docusaurus/module-type-aliases/node_modules/@docusaurus/types": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-2.4.1.tgz", - "integrity": "sha512-0R+cbhpMkhbRXX138UOc/2XZFF8hiZa6ooZAEEJFp5scytzCw4tC1gChMFXrpa3d2tYE6AX8IrOEpSonLmfQuQ==", - "dev": true, - "dependencies": { - "@types/history": "^4.7.11", - "@types/react": "*", - "commander": "^5.1.0", - "joi": "^17.6.0", - "react-helmet-async": "^1.3.0", - "utility-types": "^3.10.0", - "webpack": "^5.73.0", - "webpack-merge": "^5.8.0" - }, - "peerDependencies": { - "react": "^16.8.4 || ^17.0.0", - "react-dom": "^16.8.4 || ^17.0.0" - } - }, "node_modules/@docusaurus/plugin-content-blog": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.0.1.tgz", - "integrity": "sha512-cLOvtvAyaMQFLI8vm4j26svg3ktxMPSXpuUJ7EERKoGbfpJSsgtowNHcRsaBVmfuCsRSk1HZ/yHBsUkTmHFEsg==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.2.0.tgz", + "integrity": "sha512-MABqwjSicyHmYEfQueMthPCz18JkVxhK3EGhXTSRWwReAZ0UTuw9pG6+Wo+uXAugDaIcJH28rVZSwTDINPm2bw==", "dependencies": { - "@docusaurus/core": "3.0.1", - "@docusaurus/logger": "3.0.1", - "@docusaurus/mdx-loader": "3.0.1", - "@docusaurus/types": "3.0.1", - "@docusaurus/utils": "3.0.1", - "@docusaurus/utils-common": "3.0.1", - "@docusaurus/utils-validation": "3.0.1", + "@docusaurus/core": "3.2.0", + "@docusaurus/logger": "3.2.0", + "@docusaurus/mdx-loader": "3.2.0", + "@docusaurus/types": "3.2.0", + "@docusaurus/utils": "3.2.0", + "@docusaurus/utils-common": "3.2.0", + "@docusaurus/utils-validation": "3.2.0", "cheerio": "^1.0.0-rc.12", "feed": "^4.2.2", "fs-extra": "^11.1.1", @@ -2465,17 +2462,18 @@ } }, "node_modules/@docusaurus/plugin-content-docs": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.0.1.tgz", - "integrity": "sha512-dRfAOA5Ivo+sdzzJGXEu33yAtvGg8dlZkvt/NEJ7nwi1F2j4LEdsxtfX2GKeETB2fP6XoGNSQnFXqa2NYGrHFg==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.2.0.tgz", + "integrity": "sha512-uuqhahmsBnirxOz+SXksnWt7+wc+iN4ntxNRH48BUgo7QRNLATWjHCgI8t6zrMJxK4o+QL9DhLaPDlFHs91B3Q==", "dependencies": { - "@docusaurus/core": "3.0.1", - "@docusaurus/logger": "3.0.1", - "@docusaurus/mdx-loader": "3.0.1", - "@docusaurus/module-type-aliases": "3.0.1", - "@docusaurus/types": "3.0.1", - "@docusaurus/utils": "3.0.1", - "@docusaurus/utils-validation": "3.0.1", + "@docusaurus/core": "3.2.0", + "@docusaurus/logger": "3.2.0", + "@docusaurus/mdx-loader": "3.2.0", + "@docusaurus/module-type-aliases": "3.2.0", + "@docusaurus/types": "3.2.0", + "@docusaurus/utils": "3.2.0", + "@docusaurus/utils-common": "3.2.0", + "@docusaurus/utils-validation": "3.2.0", "@types/react-router-config": "^5.0.7", "combine-promises": "^1.1.0", "fs-extra": "^11.1.1", @@ -2493,35 +2491,16 @@ "react-dom": "^18.0.0" } }, - "node_modules/@docusaurus/plugin-content-docs/node_modules/@docusaurus/module-type-aliases": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.0.1.tgz", - "integrity": "sha512-DEHpeqUDsLynl3AhQQiO7AbC7/z/lBra34jTcdYuvp9eGm01pfH1wTVq8YqWZq6Jyx0BgcVl/VJqtE9StRd9Ag==", - "dependencies": { - "@docusaurus/react-loadable": "5.5.2", - "@docusaurus/types": "3.0.1", - "@types/history": "^4.7.11", - "@types/react": "*", - "@types/react-router-config": "*", - "@types/react-router-dom": "*", - "react-helmet-async": "*", - "react-loadable": "npm:@docusaurus/react-loadable@5.5.2" - }, - "peerDependencies": { - "react": "*", - "react-dom": "*" - } - }, "node_modules/@docusaurus/plugin-content-pages": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.0.1.tgz", - "integrity": "sha512-oP7PoYizKAXyEttcvVzfX3OoBIXEmXTMzCdfmC4oSwjG4SPcJsRge3mmI6O8jcZBgUPjIzXD21bVGWEE1iu8gg==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.2.0.tgz", + "integrity": "sha512-4ofAN7JDsdb4tODO9OIrizWY5DmEJXr0eu+UDIkLqGP+gXXTahJZv8h2mlxO+lPXGXRCVBOfA14OG1hOYJVPwA==", "dependencies": { - "@docusaurus/core": "3.0.1", - "@docusaurus/mdx-loader": "3.0.1", - "@docusaurus/types": "3.0.1", - "@docusaurus/utils": "3.0.1", - "@docusaurus/utils-validation": "3.0.1", + "@docusaurus/core": "3.2.0", + "@docusaurus/mdx-loader": "3.2.0", + "@docusaurus/types": "3.2.0", + "@docusaurus/utils": "3.2.0", + "@docusaurus/utils-validation": "3.2.0", "fs-extra": "^11.1.1", "tslib": "^2.6.0", "webpack": "^5.88.1" @@ -2535,13 +2514,13 @@ } }, "node_modules/@docusaurus/plugin-debug": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-3.0.1.tgz", - "integrity": "sha512-09dxZMdATky4qdsZGzhzlUvvC+ilQ2hKbYF+wez+cM2mGo4qHbv8+qKXqxq0CQZyimwlAOWQLoSozIXU0g0i7g==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-3.2.0.tgz", + "integrity": "sha512-p6WxtO5XZGz66y6QNQtCJwBefq4S6/w75XaXVvH1/2P9uaijvF7R+Cm2EWQZ5WsvA5wl//DFWblyDHRyVC207Q==", "dependencies": { - "@docusaurus/core": "3.0.1", - "@docusaurus/types": "3.0.1", - "@docusaurus/utils": "3.0.1", + "@docusaurus/core": "3.2.0", + "@docusaurus/types": "3.2.0", + "@docusaurus/utils": "3.2.0", "fs-extra": "^11.1.1", "react-json-view-lite": "^1.2.0", "tslib": "^2.6.0" @@ -2555,13 +2534,13 @@ } }, "node_modules/@docusaurus/plugin-google-analytics": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-3.0.1.tgz", - "integrity": "sha512-jwseSz1E+g9rXQwDdr0ZdYNjn8leZBnKPjjQhMBEiwDoenL3JYFcNW0+p0sWoVF/f2z5t7HkKA+cYObrUh18gg==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-3.2.0.tgz", + "integrity": "sha512-//TepJTEyAZSvBwHKEbXHu9xT/VkK3wUil2ZakKvQZYfUC01uWn6A1E3toa8R7WhCy1xPUeIukqmJy1Clg8njQ==", "dependencies": { - "@docusaurus/core": "3.0.1", - "@docusaurus/types": "3.0.1", - "@docusaurus/utils-validation": "3.0.1", + "@docusaurus/core": "3.2.0", + "@docusaurus/types": "3.2.0", + "@docusaurus/utils-validation": "3.2.0", "tslib": "^2.6.0" }, "engines": { @@ -2573,13 +2552,13 @@ } }, "node_modules/@docusaurus/plugin-google-gtag": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-3.0.1.tgz", - "integrity": "sha512-UFTDvXniAWrajsulKUJ1DB6qplui1BlKLQZjX4F7qS/qfJ+qkKqSkhJ/F4VuGQ2JYeZstYb+KaUzUzvaPK1aRQ==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-3.2.0.tgz", + "integrity": "sha512-3s6zxlaMMb87MW2Rxy6EnSRDs0WDEQPuHilZZH402C8kOrUnIwlhlfjWZ4ZyLDziGl/Eec/DvD0PVqj0qHRomA==", "dependencies": { - "@docusaurus/core": "3.0.1", - "@docusaurus/types": "3.0.1", - "@docusaurus/utils-validation": "3.0.1", + "@docusaurus/core": "3.2.0", + "@docusaurus/types": "3.2.0", + "@docusaurus/utils-validation": "3.2.0", "@types/gtag.js": "^0.0.12", "tslib": "^2.6.0" }, @@ -2592,13 +2571,13 @@ } }, "node_modules/@docusaurus/plugin-google-tag-manager": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.0.1.tgz", - "integrity": "sha512-IPFvuz83aFuheZcWpTlAdiiX1RqWIHM+OH8wS66JgwAKOiQMR3+nLywGjkLV4bp52x7nCnwhNk1rE85Cpy/CIw==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.2.0.tgz", + "integrity": "sha512-rAKtsJ11vPHA7dTAqWCgyIy7AyFRF/lpI77Zd/4HKgqcIvIayVBvL3QtelhUazfYTLTH6ls6kQ9wjMcIFxRiGg==", "dependencies": { - "@docusaurus/core": "3.0.1", - "@docusaurus/types": "3.0.1", - "@docusaurus/utils-validation": "3.0.1", + "@docusaurus/core": "3.2.0", + "@docusaurus/types": "3.2.0", + "@docusaurus/utils-validation": "3.2.0", "tslib": "^2.6.0" }, "engines": { @@ -2610,16 +2589,16 @@ } }, "node_modules/@docusaurus/plugin-ideal-image": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-ideal-image/-/plugin-ideal-image-3.0.1.tgz", - "integrity": "sha512-IvAUpEIz6v1/fVz6UTdQY12pYIE5geNFtsuKpsULpMaotwYf3Gs7acXjQog4qquKkc65yV5zuvMj8BZMHEwLyQ==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-ideal-image/-/plugin-ideal-image-3.2.0.tgz", + "integrity": "sha512-FPvNyOmSRBnSUQkiti7098N9W950EC4z7hXRn5ZwaG7Q+JGLdXC7sYWyTsjR5KZbEGolJWG1uyi+bG2vd1zDsw==", "dependencies": { - "@docusaurus/core": "3.0.1", - "@docusaurus/lqip-loader": "3.0.1", + "@docusaurus/core": "3.2.0", + "@docusaurus/lqip-loader": "3.2.0", "@docusaurus/responsive-loader": "^1.7.0", - "@docusaurus/theme-translations": "3.0.1", - "@docusaurus/types": "3.0.1", - "@docusaurus/utils-validation": "3.0.1", + "@docusaurus/theme-translations": "3.2.0", + "@docusaurus/types": "3.2.0", + "@docusaurus/utils-validation": "3.2.0", "@slorber/react-ideal-image": "^0.0.12", "react-waypoint": "^10.3.0", "sharp": "^0.32.3", @@ -2641,16 +2620,16 @@ } }, "node_modules/@docusaurus/plugin-sitemap": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-3.0.1.tgz", - "integrity": "sha512-xARiWnjtVvoEniZudlCq5T9ifnhCu/GAZ5nA7XgyLfPcNpHQa241HZdsTlLtVcecEVVdllevBKOp7qknBBaMGw==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-3.2.0.tgz", + "integrity": "sha512-gnWDFt6MStjLkdtt63Lzc+14EPSd8B6mzJGJp9GQMvWDUoMAUijUqpVIHYQq+DPMcI4PJZ5I2nsl5XFf1vOldA==", "dependencies": { - "@docusaurus/core": "3.0.1", - "@docusaurus/logger": "3.0.1", - "@docusaurus/types": "3.0.1", - "@docusaurus/utils": "3.0.1", - "@docusaurus/utils-common": "3.0.1", - "@docusaurus/utils-validation": "3.0.1", + "@docusaurus/core": "3.2.0", + "@docusaurus/logger": "3.2.0", + "@docusaurus/types": "3.2.0", + "@docusaurus/utils": "3.2.0", + "@docusaurus/utils-common": "3.2.0", + "@docusaurus/utils-validation": "3.2.0", "fs-extra": "^11.1.1", "sitemap": "^7.1.1", "tslib": "^2.6.0" @@ -2664,23 +2643,23 @@ } }, "node_modules/@docusaurus/preset-classic": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-3.0.1.tgz", - "integrity": "sha512-il9m9xZKKjoXn6h0cRcdnt6wce0Pv1y5t4xk2Wx7zBGhKG1idu4IFHtikHlD0QPuZ9fizpXspXcTzjL5FXc1Gw==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-3.2.0.tgz", + "integrity": "sha512-t7tXyk8kUgT7hUqEOgSJnPs+Foem9ucuan/a9QVYaVFCDjp92Sb2FpCY8bVasAokYCjodYe2LfpAoSCj5YDYWg==", "dependencies": { - "@docusaurus/core": "3.0.1", - "@docusaurus/plugin-content-blog": "3.0.1", - "@docusaurus/plugin-content-docs": "3.0.1", - "@docusaurus/plugin-content-pages": "3.0.1", - "@docusaurus/plugin-debug": "3.0.1", - "@docusaurus/plugin-google-analytics": "3.0.1", - "@docusaurus/plugin-google-gtag": "3.0.1", - "@docusaurus/plugin-google-tag-manager": "3.0.1", - "@docusaurus/plugin-sitemap": "3.0.1", - "@docusaurus/theme-classic": "3.0.1", - "@docusaurus/theme-common": "3.0.1", - "@docusaurus/theme-search-algolia": "3.0.1", - "@docusaurus/types": "3.0.1" + "@docusaurus/core": "3.2.0", + "@docusaurus/plugin-content-blog": "3.2.0", + "@docusaurus/plugin-content-docs": "3.2.0", + "@docusaurus/plugin-content-pages": "3.2.0", + "@docusaurus/plugin-debug": "3.2.0", + "@docusaurus/plugin-google-analytics": "3.2.0", + "@docusaurus/plugin-google-gtag": "3.2.0", + "@docusaurus/plugin-google-tag-manager": "3.2.0", + "@docusaurus/plugin-sitemap": "3.2.0", + "@docusaurus/theme-classic": "3.2.0", + "@docusaurus/theme-common": "3.2.0", + "@docusaurus/theme-search-algolia": "3.2.0", + "@docusaurus/types": "3.2.0" }, "engines": { "node": ">=18.0" @@ -2726,22 +2705,22 @@ } }, "node_modules/@docusaurus/theme-classic": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-3.0.1.tgz", - "integrity": "sha512-XD1FRXaJiDlmYaiHHdm27PNhhPboUah9rqIH0lMpBt5kYtsGjJzhqa27KuZvHLzOP2OEpqd2+GZ5b6YPq7Q05Q==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-3.2.0.tgz", + "integrity": "sha512-4oSO5BQOJ5ja7WYdL6jK1n4J96tp+VJHamdwao6Ea252sA3W3vvR0otTflG4p4XVjNZH6hlPQoi5lKW0HeRgfQ==", "dependencies": { - "@docusaurus/core": "3.0.1", - "@docusaurus/mdx-loader": "3.0.1", - "@docusaurus/module-type-aliases": "3.0.1", - "@docusaurus/plugin-content-blog": "3.0.1", - "@docusaurus/plugin-content-docs": "3.0.1", - "@docusaurus/plugin-content-pages": "3.0.1", - "@docusaurus/theme-common": "3.0.1", - "@docusaurus/theme-translations": "3.0.1", - "@docusaurus/types": "3.0.1", - "@docusaurus/utils": "3.0.1", - "@docusaurus/utils-common": "3.0.1", - "@docusaurus/utils-validation": "3.0.1", + "@docusaurus/core": "3.2.0", + "@docusaurus/mdx-loader": "3.2.0", + "@docusaurus/module-type-aliases": "3.2.0", + "@docusaurus/plugin-content-blog": "3.2.0", + "@docusaurus/plugin-content-docs": "3.2.0", + "@docusaurus/plugin-content-pages": "3.2.0", + "@docusaurus/theme-common": "3.2.0", + "@docusaurus/theme-translations": "3.2.0", + "@docusaurus/types": "3.2.0", + "@docusaurus/utils": "3.2.0", + "@docusaurus/utils-common": "3.2.0", + "@docusaurus/utils-validation": "3.2.0", "@mdx-js/react": "^3.0.0", "clsx": "^2.0.0", "copy-text-to-clipboard": "^3.2.0", @@ -2764,29 +2743,10 @@ "react-dom": "^18.0.0" } }, - "node_modules/@docusaurus/theme-classic/node_modules/@docusaurus/module-type-aliases": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.0.1.tgz", - "integrity": "sha512-DEHpeqUDsLynl3AhQQiO7AbC7/z/lBra34jTcdYuvp9eGm01pfH1wTVq8YqWZq6Jyx0BgcVl/VJqtE9StRd9Ag==", - "dependencies": { - "@docusaurus/react-loadable": "5.5.2", - "@docusaurus/types": "3.0.1", - "@types/history": "^4.7.11", - "@types/react": "*", - "@types/react-router-config": "*", - "@types/react-router-dom": "*", - "react-helmet-async": "*", - "react-loadable": "npm:@docusaurus/react-loadable@5.5.2" - }, - "peerDependencies": { - "react": "*", - "react-dom": "*" - } - }, "node_modules/@docusaurus/theme-classic/node_modules/@mdx-js/react": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@mdx-js/react/-/react-3.0.0.tgz", - "integrity": "sha512-nDctevR9KyYFyV+m+/+S4cpzCWHqj+iHDHq3QrsWezcC+B17uZdIWgCguESUkwFhM3n/56KxWVE3V6EokrmONQ==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@mdx-js/react/-/react-3.0.1.tgz", + "integrity": "sha512-9ZrPIU4MGf6et1m1ov3zKf+q9+deetI51zprKB1D/z3NOb+rUxxtEl3mCjW5wTGh6VhRdwPueh1oRzi6ezkA8A==", "dependencies": { "@types/mdx": "^2.0.0" }, @@ -2800,9 +2760,9 @@ } }, "node_modules/@docusaurus/theme-classic/node_modules/clsx": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.0.0.tgz", - "integrity": "sha512-rQ1+kcj+ttHG0MKVGBUXwayCCF1oh39BF5COIpRzuCEv8Mwjv0XucrI2ExNTOn9IlLifGClWQcU9BrZORvtw6Q==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.0.tgz", + "integrity": "sha512-m3iNNWpd9rl3jvvcBnu70ylMdrXt8Vlq4HYadnU5fwcOtvkSQWPmj7amUcDT2qYI7risszBjI5AUIUox9D16pg==", "engines": { "node": ">=6" } @@ -2820,17 +2780,17 @@ } }, "node_modules/@docusaurus/theme-common": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-3.0.1.tgz", - "integrity": "sha512-cr9TOWXuIOL0PUfuXv6L5lPlTgaphKP+22NdVBOYah5jSq5XAAulJTjfe+IfLsEG4L7lJttLbhW7LXDFSAI7Ag==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-3.2.0.tgz", + "integrity": "sha512-sFbw9XviNJJ+760kAcZCQMQ3jkNIznGqa6MQ70E5BnbP+ja36kGgPOfjcsvAcNey1H1Rkhh3p2Mhf4HVLdKVVw==", "dependencies": { - "@docusaurus/mdx-loader": "3.0.1", - "@docusaurus/module-type-aliases": "3.0.1", - "@docusaurus/plugin-content-blog": "3.0.1", - "@docusaurus/plugin-content-docs": "3.0.1", - "@docusaurus/plugin-content-pages": "3.0.1", - "@docusaurus/utils": "3.0.1", - "@docusaurus/utils-common": "3.0.1", + "@docusaurus/mdx-loader": "3.2.0", + "@docusaurus/module-type-aliases": "3.2.0", + "@docusaurus/plugin-content-blog": "3.2.0", + "@docusaurus/plugin-content-docs": "3.2.0", + "@docusaurus/plugin-content-pages": "3.2.0", + "@docusaurus/utils": "3.2.0", + "@docusaurus/utils-common": "3.2.0", "@types/history": "^4.7.11", "@types/react": "*", "@types/react-router-config": "*", @@ -2848,29 +2808,10 @@ "react-dom": "^18.0.0" } }, - "node_modules/@docusaurus/theme-common/node_modules/@docusaurus/module-type-aliases": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-3.0.1.tgz", - "integrity": "sha512-DEHpeqUDsLynl3AhQQiO7AbC7/z/lBra34jTcdYuvp9eGm01pfH1wTVq8YqWZq6Jyx0BgcVl/VJqtE9StRd9Ag==", - "dependencies": { - "@docusaurus/react-loadable": "5.5.2", - "@docusaurus/types": "3.0.1", - "@types/history": "^4.7.11", - "@types/react": "*", - "@types/react-router-config": "*", - "@types/react-router-dom": "*", - "react-helmet-async": "*", - "react-loadable": "npm:@docusaurus/react-loadable@5.5.2" - }, - "peerDependencies": { - "react": "*", - "react-dom": "*" - } - }, "node_modules/@docusaurus/theme-common/node_modules/clsx": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.0.0.tgz", - "integrity": "sha512-rQ1+kcj+ttHG0MKVGBUXwayCCF1oh39BF5COIpRzuCEv8Mwjv0XucrI2ExNTOn9IlLifGClWQcU9BrZORvtw6Q==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.0.tgz", + "integrity": "sha512-m3iNNWpd9rl3jvvcBnu70ylMdrXt8Vlq4HYadnU5fwcOtvkSQWPmj7amUcDT2qYI7risszBjI5AUIUox9D16pg==", "engines": { "node": ">=6" } @@ -2888,18 +2829,18 @@ } }, "node_modules/@docusaurus/theme-search-algolia": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.0.1.tgz", - "integrity": "sha512-DDiPc0/xmKSEdwFkXNf1/vH1SzJPzuJBar8kMcBbDAZk/SAmo/4lf6GU2drou4Ae60lN2waix+jYWTWcJRahSA==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.2.0.tgz", + "integrity": "sha512-PgvF4qHoqJp8+GfqClUbTF/zYNOsz4De251IuzXon7+7FAXwvb2qmYtA2nEwyMbB7faKOz33Pxzv+y+153KS/g==", "dependencies": { "@docsearch/react": "^3.5.2", - "@docusaurus/core": "3.0.1", - "@docusaurus/logger": "3.0.1", - "@docusaurus/plugin-content-docs": "3.0.1", - "@docusaurus/theme-common": "3.0.1", - "@docusaurus/theme-translations": "3.0.1", - "@docusaurus/utils": "3.0.1", - "@docusaurus/utils-validation": "3.0.1", + "@docusaurus/core": "3.2.0", + "@docusaurus/logger": "3.2.0", + "@docusaurus/plugin-content-docs": "3.2.0", + "@docusaurus/theme-common": "3.2.0", + "@docusaurus/theme-translations": "3.2.0", + "@docusaurus/utils": "3.2.0", + "@docusaurus/utils-validation": "3.2.0", "algoliasearch": "^4.18.0", "algoliasearch-helper": "^3.13.3", "clsx": "^2.0.0", @@ -2918,17 +2859,17 @@ } }, "node_modules/@docusaurus/theme-search-algolia/node_modules/clsx": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.0.0.tgz", - "integrity": "sha512-rQ1+kcj+ttHG0MKVGBUXwayCCF1oh39BF5COIpRzuCEv8Mwjv0XucrI2ExNTOn9IlLifGClWQcU9BrZORvtw6Q==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.0.tgz", + "integrity": "sha512-m3iNNWpd9rl3jvvcBnu70ylMdrXt8Vlq4HYadnU5fwcOtvkSQWPmj7amUcDT2qYI7risszBjI5AUIUox9D16pg==", "engines": { "node": ">=6" } }, "node_modules/@docusaurus/theme-translations": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-3.0.1.tgz", - "integrity": "sha512-6UrbpzCTN6NIJnAtZ6Ne9492vmPVX+7Fsz4kmp+yor3KQwA1+MCzQP7ItDNkP38UmVLnvB/cYk/IvehCUqS3dg==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-3.2.0.tgz", + "integrity": "sha512-VXzZJBuyVEmwUYyud+7IgJQEBRM6R2u/s10Rp3DOP19CBQxeKgHYTKkKhFtDeKMHDassb665kjgOi0YlJfUT6w==", "dependencies": { "fs-extra": "^11.1.1", "tslib": "^2.6.0" @@ -2938,10 +2879,11 @@ } }, "node_modules/@docusaurus/types": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-3.0.1.tgz", - "integrity": "sha512-plyX2iU1tcUsF46uQ01pAd4JhexR7n0iiQ5MSnBFX6M6NSJgDYdru/i1/YNPKOnQHBoXGLHv0dNT6OAlDWNjrg==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/types/-/types-3.2.0.tgz", + "integrity": "sha512-uG3FfTkkkbZIPPNYx6xRfZHKeGyRd/inIT1cqvYt1FobFLd+7WhRXrSBqwJ9JajJjEAjNioRMVFgGofGf/Wdww==", "dependencies": { + "@mdx-js/mdx": "^3.0.0", "@types/history": "^4.7.11", "@types/react": "*", "commander": "^5.1.0", @@ -2957,11 +2899,12 @@ } }, "node_modules/@docusaurus/utils": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-3.0.1.tgz", - "integrity": "sha512-TwZ33Am0q4IIbvjhUOs+zpjtD/mXNmLmEgeTGuRq01QzulLHuPhaBTTAC/DHu6kFx3wDgmgpAlaRuCHfTcXv8g==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-3.2.0.tgz", + "integrity": "sha512-3rgrE7iL60yV2JQivlcoxUNNTK2APmn+OHLUmTvX2pueIM8DEOCEFHpJO4MiWjFO7V/Wq3iA/W1M03JnjdugVw==", "dependencies": { - "@docusaurus/logger": "3.0.1", + "@docusaurus/logger": "3.2.0", + "@docusaurus/utils-common": "3.2.0", "@svgr/webpack": "^6.5.1", "escape-string-regexp": "^4.0.0", "file-loader": "^6.2.0", @@ -2973,6 +2916,7 @@ "js-yaml": "^4.1.0", "lodash": "^4.17.21", "micromatch": "^4.0.5", + "prompts": "^2.4.2", "resolve-pathname": "^3.0.0", "shelljs": "^0.8.5", "tslib": "^2.6.0", @@ -2992,9 +2936,9 @@ } }, "node_modules/@docusaurus/utils-common": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-3.0.1.tgz", - "integrity": "sha512-W0AxD6w6T8g6bNro8nBRWf7PeZ/nn7geEWM335qHU2DDDjHuV4UZjgUGP1AQsdcSikPrlIqTJJbKzer1lRSlIg==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-3.2.0.tgz", + "integrity": "sha512-WEQT5L2lT/tBQgDRgeZQAIi9YJBrwEILb1BuObQn1St3T/4K1gx5fWwOT8qdLOov296XLd1FQg9Ywu27aE9svw==", "dependencies": { "tslib": "^2.6.0" }, @@ -3011,12 +2955,13 @@ } }, "node_modules/@docusaurus/utils-validation": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-3.0.1.tgz", - "integrity": "sha512-ujTnqSfyGQ7/4iZdB4RRuHKY/Nwm58IIb+41s5tCXOv/MBU2wGAjOHq3U+AEyJ8aKQcHbxvTKJaRchNHYUVUQg==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-3.2.0.tgz", + "integrity": "sha512-rCzMTqwNrBrEOyU8EaD1fYWdig4TDhfj+YLqB8DY68VUAqSIgbY+yshpqFKB0bznFYNBJbn0bGpvVuImQOa/vA==", "dependencies": { - "@docusaurus/logger": "3.0.1", - "@docusaurus/utils": "3.0.1", + "@docusaurus/logger": "3.2.0", + "@docusaurus/utils": "3.2.0", + "@docusaurus/utils-common": "3.2.0", "joi": "^17.9.2", "js-yaml": "^4.1.0", "tslib": "^2.6.0" @@ -3234,9 +3179,9 @@ "integrity": "sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A==" }, "node_modules/@mdx-js/mdx": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@mdx-js/mdx/-/mdx-3.0.0.tgz", - "integrity": "sha512-Icm0TBKBLYqroYbNW3BPnzMGn+7mwpQOK310aZ7+fkCtiU3aqv2cdcX+nd0Ydo3wI5Rx8bX2Z2QmGb/XcAClCw==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@mdx-js/mdx/-/mdx-3.0.1.tgz", + "integrity": "sha512-eIQ4QTrOWyL3LWEe/bu6Taqzq2HQvHcyTMaOrI95P2/LmJE7AsfPfgJGuFLPVqBUE1BC1rik3VIhU+s9u72arA==", "dependencies": { "@types/estree": "^1.0.0", "@types/estree-jsx": "^1.0.0", @@ -3480,9 +3425,9 @@ } }, "node_modules/@sideway/address": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/@sideway/address/-/address-4.1.4.tgz", - "integrity": "sha512-7vwq+rOHVWjyXxVlR76Agnvhy8I9rpzjosTESvmhNeXOXdZZB15Fl+TI9x1SiHZH5Jv2wTGduSxFDIaq0m3DUw==", + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@sideway/address/-/address-4.1.5.tgz", + "integrity": "sha512-IqO/DUQHUkPeixNQ8n0JA6102hT9CmaljNTPmQ1u8MEhBo/R4Q8eKLN/vGZxuebwOroDB4cbpjheD4+/sKFK4Q==", "dependencies": { "@hapi/hoek": "^9.0.0" } @@ -3537,19 +3482,6 @@ "micromark-util-symbol": "^1.0.1" } }, - "node_modules/@slorber/static-site-generator-webpack-plugin": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/@slorber/static-site-generator-webpack-plugin/-/static-site-generator-webpack-plugin-4.0.7.tgz", - "integrity": "sha512-Ug7x6z5lwrz0WqdnNFOMYrDQNTPAprvHLSh6+/fmml3qUiz6l5eq+2MzLKWtn/q5K5NpSiFsZTP/fck/3vjSxA==", - "dependencies": { - "eval": "^0.1.8", - "p-map": "^4.0.0", - "webpack-sources": "^3.2.2" - }, - "engines": { - "node": ">=14" - } - }, "node_modules/@svgr/babel-plugin-add-jsx-attribute": { "version": "6.5.1", "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-6.5.1.tgz", @@ -3930,9 +3862,9 @@ "integrity": "sha512-YQV9bUsemkzG81Ea295/nF/5GijnD2Af7QhEofh7xu+kvCN6RdodgNwwGWXB5GMI3NoyvQo0odNctoH/qLMIpg==" }, "node_modules/@types/hast": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.3.tgz", - "integrity": "sha512-2fYGlaDy/qyLlhidX42wAH0KBi2TCjKMH8CHmBXgRlJ3Y+OXTiqsPQ6IWarZKwF1JoUcAJdPogv1d4b0COTpmQ==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", "dependencies": { "@types/unist": "*" } @@ -4505,30 +4437,31 @@ } }, "node_modules/algoliasearch": { - "version": "4.22.0", - "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.22.0.tgz", - "integrity": "sha512-gfceltjkwh7PxXwtkS8KVvdfK+TSNQAWUeNSxf4dA29qW5tf2EGwa8jkJujlT9jLm17cixMVoGNc+GJFO1Mxhg==", + "version": "4.23.2", + "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.23.2.tgz", + "integrity": "sha512-8aCl055IsokLuPU8BzLjwzXjb7ty9TPcUFFOk0pYOwsE5DMVhE3kwCMFtsCFKcnoPZK7oObm+H5mbnSO/9ioxQ==", "dependencies": { - "@algolia/cache-browser-local-storage": "4.22.0", - "@algolia/cache-common": "4.22.0", - "@algolia/cache-in-memory": "4.22.0", - "@algolia/client-account": "4.22.0", - "@algolia/client-analytics": "4.22.0", - "@algolia/client-common": "4.22.0", - "@algolia/client-personalization": "4.22.0", - "@algolia/client-search": "4.22.0", - "@algolia/logger-common": "4.22.0", - "@algolia/logger-console": "4.22.0", - "@algolia/requester-browser-xhr": "4.22.0", - "@algolia/requester-common": "4.22.0", - "@algolia/requester-node-http": "4.22.0", - "@algolia/transporter": "4.22.0" + "@algolia/cache-browser-local-storage": "4.23.2", + "@algolia/cache-common": "4.23.2", + "@algolia/cache-in-memory": "4.23.2", + "@algolia/client-account": "4.23.2", + "@algolia/client-analytics": "4.23.2", + "@algolia/client-common": "4.23.2", + "@algolia/client-personalization": "4.23.2", + "@algolia/client-search": "4.23.2", + "@algolia/logger-common": "4.23.2", + "@algolia/logger-console": "4.23.2", + "@algolia/recommend": "4.23.2", + "@algolia/requester-browser-xhr": "4.23.2", + "@algolia/requester-common": "4.23.2", + "@algolia/requester-node-http": "4.23.2", + "@algolia/transporter": "4.23.2" } }, "node_modules/algoliasearch-helper": { - "version": "3.16.1", - "resolved": "https://registry.npmjs.org/algoliasearch-helper/-/algoliasearch-helper-3.16.1.tgz", - "integrity": "sha512-qxAHVjjmT7USVvrM8q6gZGaJlCK1fl4APfdAA7o8O6iXEc68G0xMNrzRkxoB/HmhhvyHnoteS/iMTiHiTcQQcg==", + "version": "3.17.0", + "resolved": "https://registry.npmjs.org/algoliasearch-helper/-/algoliasearch-helper-3.17.0.tgz", + "integrity": "sha512-R5422OiQjvjlK3VdpNQ/Qk7KsTIGeM5ACm8civGifOVWdRRV/3SgXuKmeNxe94Dz6fwj/IgpVmXbHutU4mHubg==", "dependencies": { "@algolia/events": "^4.0.1" }, @@ -4928,12 +4861,12 @@ "dev": true }, "node_modules/body-parser": { - "version": "1.20.1", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz", - "integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==", + "version": "1.20.2", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz", + "integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==", "dependencies": { "bytes": "3.1.2", - "content-type": "~1.0.4", + "content-type": "~1.0.5", "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", @@ -4941,7 +4874,7 @@ "iconv-lite": "0.4.24", "on-finished": "2.4.1", "qs": "6.11.0", - "raw-body": "2.5.1", + "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" }, @@ -6124,9 +6057,9 @@ "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==" }, "node_modules/cookie": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", - "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", "engines": { "node": ">= 0.6" } @@ -9620,16 +9553,16 @@ } }, "node_modules/express": { - "version": "4.18.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz", - "integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==", + "version": "4.19.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz", + "integrity": "sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==", "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", - "body-parser": "1.20.1", + "body-parser": "1.20.2", "content-disposition": "0.5.4", "content-type": "~1.0.4", - "cookie": "0.5.0", + "cookie": "0.6.0", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", @@ -10005,9 +9938,9 @@ } }, "node_modules/follow-redirects": { - "version": "1.15.3", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz", - "integrity": "sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==", + "version": "1.15.6", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", + "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", "funding": [ { "type": "individual", @@ -10923,9 +10856,9 @@ } }, "node_modules/hast-util-raw": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-9.0.1.tgz", - "integrity": "sha512-5m1gmba658Q+lO5uqL5YNGQWeh1MYWZbZmWrM5lncdcuiXuo5E2HT/CIOp0rLF8ksfSwiCVJ3twlgVRyTGThGA==", + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-9.0.2.tgz", + "integrity": "sha512-PldBy71wO9Uq1kyaMch9AHIghtQvIwxBUkv823pKmkTM3oV1JxtsTNYdevMxvUHqcnOAuO65JKU2+0NOxc2ksA==", "dependencies": { "@types/hast": "^3.0.0", "@types/unist": "^3.0.0", @@ -11000,16 +10933,16 @@ } }, "node_modules/hast-util-to-jsx-runtime/node_modules/inline-style-parser": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.2.2.tgz", - "integrity": "sha512-EcKzdTHVe8wFVOGEYXiW9WmJXPjqi1T+234YpJr98RiFYKHV3cdy1+3mkTE+KHTHxFFLH51SfaGOoUdW+v7ViQ==" + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.2.3.tgz", + "integrity": "sha512-qlD8YNDqyTKTyuITrDOffsl6Tdhv+UC4hcdAVuQsK4IMQ99nSgd1MIA/Q+jQYoh9r3hVUXhYh7urSRmXPkW04g==" }, "node_modules/hast-util-to-jsx-runtime/node_modules/style-to-object": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.5.tgz", - "integrity": "sha512-rDRwHtoDD3UMMrmZ6BzOW0naTjMsVZLIjsGleSKS/0Oz+cgCfAPRspaqJuE8rDzpKha/nEvnM0IF4seEAZUTKQ==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.6.tgz", + "integrity": "sha512-khxq+Qm3xEyZfKd/y9L3oIWQimxuc4STrQKtQn8aSDRHb8mFgpukgX1hdzfrMEW6JCjyJ8p89x+IUMVnCBI1PA==", "dependencies": { - "inline-style-parser": "0.2.2" + "inline-style-parser": "0.2.3" } }, "node_modules/hast-util-to-parse5": { @@ -11688,9 +11621,9 @@ } }, "node_modules/ip": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.0.tgz", - "integrity": "sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.1.tgz", + "integrity": "sha512-lJUL9imLTNi1ZfXT+DU6rBBdbiKGBuay9B6xGSPVjUeQwaH1RIGqef8RZkUtHioLmSNpPR5M4HVKJGm1j8FWVQ==", "dev": true }, "node_modules/ipaddr.js": { @@ -12226,13 +12159,13 @@ } }, "node_modules/joi": { - "version": "17.11.0", - "resolved": "https://registry.npmjs.org/joi/-/joi-17.11.0.tgz", - "integrity": "sha512-NgB+lZLNoqISVy1rZocE9PZI36bL/77ie924Ri43yEvi9GUUMPeyVIr8KdFTMUlby1p0PBYMk9spIxEUQYqrJQ==", + "version": "17.12.2", + "resolved": "https://registry.npmjs.org/joi/-/joi-17.12.2.tgz", + "integrity": "sha512-RonXAIzCiHLc8ss3Ibuz45u28GOsWE1UpfDXLbN/9NKbL4tCJf8TWYVKsoYuuh+sAUt7fsSNpA+r2+TBA6Wjmw==", "dependencies": { - "@hapi/hoek": "^9.0.0", - "@hapi/topo": "^5.0.0", - "@sideway/address": "^4.1.3", + "@hapi/hoek": "^9.3.0", + "@hapi/topo": "^5.1.0", + "@sideway/address": "^4.1.5", "@sideway/formula": "^3.0.1", "@sideway/pinpoint": "^2.0.0" } @@ -12820,9 +12753,9 @@ } }, "node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz", - "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", "funding": [ { "type": "GitHub Sponsors", @@ -12948,9 +12881,9 @@ } }, "node_modules/mdast-util-mdx-jsx": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.0.0.tgz", - "integrity": "sha512-XZuPPzQNBPAlaqsTTgRrcJnyFbSOBovSadFgbFu8SnuNgm+6Bdx1K+IWoitsmj6Lq6MNtI+ytOqwN70n//NaBA==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.1.2.tgz", + "integrity": "sha512-eKMQDeywY2wlHc97k5eD8VC+9ASMjN8ItEZQNGwJ6E0XWKiW/Z0V5/H8pvoXUf+y+Mj0VIgeRRbujBmFn4FTyA==", "dependencies": { "@types/estree-jsx": "^1.0.0", "@types/hast": "^3.0.0", @@ -12989,9 +12922,9 @@ } }, "node_modules/mdast-util-phrasing": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.0.0.tgz", - "integrity": "sha512-xadSsJayQIucJ9n053dfQwVu1kuXg7jCTdYsMK8rqzKZh52nLfSH/k0sAxE0u+pj/zKZX+o5wB+ML5mRayOxFA==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz", + "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==", "dependencies": { "@types/mdast": "^4.0.0", "unist-util-is": "^6.0.0" @@ -13002,9 +12935,9 @@ } }, "node_modules/mdast-util-to-hast": { - "version": "13.0.2", - "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.0.2.tgz", - "integrity": "sha512-U5I+500EOOw9e3ZrclN3Is3fRpw8c19SMyNZlZ2IS+7vLsNzb2Om11VpIVOR+/0137GhZsFEF6YiKD5+0Hr2Og==", + "version": "13.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.1.0.tgz", + "integrity": "sha512-/e2l/6+OdGp/FB+ctrJ9Avz71AN/GRH3oi/3KAx/kMnoUsD6q0woXlDT8lLEeViVKE7oZxE7RXzvO3T8kF2/sA==", "dependencies": { "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", @@ -13013,7 +12946,8 @@ "micromark-util-sanitize-uri": "^2.0.0", "trim-lines": "^3.0.0", "unist-util-position": "^5.0.0", - "unist-util-visit": "^5.0.0" + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" }, "funding": { "type": "opencollective", @@ -13236,9 +13170,9 @@ } }, "node_modules/micromark-core-commonmark/node_modules/micromark-util-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz", - "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", "funding": [ { "type": "GitHub Sponsors", @@ -13307,9 +13241,9 @@ } }, "node_modules/micromark-extension-directive/node_modules/micromark-util-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz", - "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", "funding": [ { "type": "GitHub Sponsors", @@ -13356,9 +13290,9 @@ } }, "node_modules/micromark-extension-frontmatter/node_modules/micromark-util-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz", - "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", "funding": [ { "type": "GitHub Sponsors", @@ -13424,9 +13358,9 @@ } }, "node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz", - "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", "funding": [ { "type": "GitHub Sponsors", @@ -13496,9 +13430,9 @@ } }, "node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz", - "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", "funding": [ { "type": "GitHub Sponsors", @@ -13597,9 +13531,9 @@ } }, "node_modules/micromark-extension-gfm-table/node_modules/micromark-util-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz", - "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", "funding": [ { "type": "GitHub Sponsors", @@ -13678,9 +13612,9 @@ } }, "node_modules/micromark-extension-gfm-task-list-item/node_modules/micromark-util-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz", - "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", "funding": [ { "type": "GitHub Sponsors", @@ -13756,9 +13690,9 @@ } }, "node_modules/micromark-extension-mdx-expression/node_modules/micromark-util-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz", - "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", "funding": [ { "type": "GitHub Sponsors", @@ -13830,9 +13764,9 @@ } }, "node_modules/micromark-extension-mdx-jsx/node_modules/micromark-util-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz", - "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", "funding": [ { "type": "GitHub Sponsors", @@ -13915,9 +13849,9 @@ } }, "node_modules/micromark-extension-mdxjs-esm/node_modules/micromark-util-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz", - "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", "funding": [ { "type": "GitHub Sponsors", @@ -13969,9 +13903,9 @@ } }, "node_modules/micromark-factory-destination/node_modules/micromark-util-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz", - "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", "funding": [ { "type": "GitHub Sponsors", @@ -14024,9 +13958,9 @@ } }, "node_modules/micromark-factory-label/node_modules/micromark-util-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz", - "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", "funding": [ { "type": "GitHub Sponsors", @@ -14083,9 +14017,9 @@ } }, "node_modules/micromark-factory-mdx-expression/node_modules/micromark-util-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz", - "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", "funding": [ { "type": "GitHub Sponsors", @@ -14191,9 +14125,9 @@ } }, "node_modules/micromark-factory-title/node_modules/micromark-util-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz", - "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", "funding": [ { "type": "GitHub Sponsors", @@ -14265,9 +14199,9 @@ } }, "node_modules/micromark-factory-whitespace/node_modules/micromark-util-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz", - "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", "funding": [ { "type": "GitHub Sponsors", @@ -14386,9 +14320,9 @@ } }, "node_modules/micromark-util-classify-character/node_modules/micromark-util-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz", - "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", "funding": [ { "type": "GitHub Sponsors", @@ -14493,9 +14427,9 @@ } }, "node_modules/micromark-util-decode-string/node_modules/micromark-util-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz", - "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", "funding": [ { "type": "GitHub Sponsors", @@ -14668,9 +14602,9 @@ } }, "node_modules/micromark-util-sanitize-uri/node_modules/micromark-util-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz", - "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", "funding": [ { "type": "GitHub Sponsors", @@ -14787,9 +14721,9 @@ } }, "node_modules/micromark/node_modules/micromark-util-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.0.1.tgz", - "integrity": "sha512-3wgnrmEAJ4T+mGXAUfMvMAbxU9RDG43XmGce4j6CwPtVxB3vfwXSZ6KhFwDzZ3mZHhmPimMAXg71veiBGzeAZw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", "funding": [ { "type": "GitHub Sponsors", @@ -17476,9 +17410,9 @@ } }, "node_modules/raw-body": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", - "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", @@ -17780,9 +17714,9 @@ "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" }, "node_modules/react-json-view-lite": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/react-json-view-lite/-/react-json-view-lite-1.2.1.tgz", - "integrity": "sha512-Itc0g86fytOmKZoIoJyGgvNqohWSbh3NXIKNgH6W6FT9PC1ck4xas1tT3Rr/b3UlFXyA9Jjaw9QSXdZy2JwGMQ==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/react-json-view-lite/-/react-json-view-lite-1.3.0.tgz", + "integrity": "sha512-aN1biKC5v4DQkmQBlZjuMFR09MKZGMPtIg+cut8zEeg2HXd6gl2gRy0n4HMacHf0dznQgo0SVXN7eT8zV3hEuQ==", "engines": { "node": ">=14" }, @@ -18587,9 +18521,9 @@ } }, "node_modules/remark-mdx": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/remark-mdx/-/remark-mdx-3.0.0.tgz", - "integrity": "sha512-O7yfjuC6ra3NHPbRVxfflafAj3LTwx3b73aBvkEFU5z4PsD6FD4vrqJAkE5iNGLz71GdjXfgRqm3SQ0h0VuE7g==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/remark-mdx/-/remark-mdx-3.0.1.tgz", + "integrity": "sha512-3Pz3yPQ5Rht2pM5R+0J2MrGoBSrzf+tJG94N+t/ilfdh8YLyyKYtidAYwTveB20BoHAcwIopOUqhcmh2F7hGYA==", "dependencies": { "mdast-util-mdx": "^3.0.0", "micromark-extension-mdxjs": "^3.0.0" @@ -19088,9 +19022,9 @@ } }, "node_modules/remark-rehype": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-11.0.0.tgz", - "integrity": "sha512-vx8x2MDMcxuE4lBmQ46zYUDfcFMmvg80WYX+UNLeG6ixjdCCLcw1lrgAukwBTuOFsS78eoAedHGn9sNM0w7TPw==", + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-11.1.0.tgz", + "integrity": "sha512-z3tJrAs2kIs1AqIIy6pzHmAHlF1hWQ+OdY4/hv+Wxe35EhyLKcajL33iUEn3ScxtFox9nUvRufR/Zre8Q08H/g==", "dependencies": { "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", @@ -21778,9 +21712,9 @@ "integrity": "sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA==" }, "node_modules/utility-types": { - "version": "3.10.0", - "resolved": "https://registry.npmjs.org/utility-types/-/utility-types-3.10.0.tgz", - "integrity": "sha512-O11mqxmi7wMKCo6HKFt5AhO4BwY3VV68YU07tgxfz8zJTIxr4BpsezN49Ffwy9j3ZpwwJp4fkRwjRzq3uWE6Rg==", + "version": "3.11.0", + "resolved": "https://registry.npmjs.org/utility-types/-/utility-types-3.11.0.tgz", + "integrity": "sha512-6Z7Ma2aVEWisaL6TvBCy7P8rm2LQoPv6dJ7ecIaIixHcwfbJ0x7mWdbcwlIM5IGQxPZSFYeqRCqlOOeKoJYMkw==", "engines": { "node": ">= 4" } @@ -22018,9 +21952,9 @@ } }, "node_modules/webpack-dev-middleware": { - "version": "5.3.3", - "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.3.tgz", - "integrity": "sha512-hj5CYrY0bZLB+eTO+x/j67Pkrquiy7kWepMHmUMoPsmcUaeEnQJqFzHJOyxgWlq746/wUuA64p9ta34Kyb01pA==", + "version": "5.3.4", + "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.4.tgz", + "integrity": "sha512-BVdTqhhs+0IfoeAf7EoH5WE+exCmqGerHfDM0IL096Px60Tq2Mn9MAbnaGUe6HiMa41KMCYF19gyzZmBcq/o4Q==", "dependencies": { "colorette": "^2.0.10", "memfs": "^3.4.3", diff --git a/docs/package.json b/docs/package.json index 35f38de59..87f3d3d71 100644 --- a/docs/package.json +++ b/docs/package.json @@ -16,11 +16,12 @@ "dependencies": { "@babel/preset-react": "^7.22.3", "@code-hike/mdx": "^0.9.0", - "@docusaurus/core": "3.0.1", - "@docusaurus/plugin-ideal-image": "^3.0.1", - "@docusaurus/preset-classic": "3.0.1", - "@docusaurus/theme-classic": "^3.0.1", - "@docusaurus/theme-search-algolia": "^3.0.1", + "@docusaurus/core": "^3.2.0", + "@docusaurus/plugin-google-gtag": "^3.2.0", + "@docusaurus/plugin-ideal-image": "^3.2.0", + "@docusaurus/preset-classic": "^3.2.0", + "@docusaurus/theme-classic": "^3.2.0", + "@docusaurus/theme-search-algolia": "^3.2.0", "@mdx-js/react": "^2.3.0", "@mendable/search": "^0.0.154", "@pbe/react-yandex-maps": "^1.2.4", @@ -47,7 +48,7 @@ "tailwindcss": "^3.3.2" }, "devDependencies": { - "@docusaurus/module-type-aliases": "2.4.1", + "@docusaurus/module-type-aliases": "^3.2.0", "css-loader": "^6.8.1", "docusaurus-node-polyfills": "^1.0.0", "node-sass": "^9.0.0", @@ -69,4 +70,4 @@ "engines": { "node": ">=16.14" } -} \ No newline at end of file +} diff --git a/docs/sidebars.js b/docs/sidebars.js index e238e1860..ddc9af202 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -2,13 +2,49 @@ module.exports = { docs: [ { type: "category", - label: "Getting Started", + label: " Getting Started", collapsed: false, items: [ "index", - "getting-started/installation", - "getting-started/hugging-face-spaces", - "getting-started/creating-flows", + "getting-started/cli", + "getting-started/basic-prompting", + "getting-started/document-qa", + "getting-started/blog-writer", + "getting-started/memory-chatbot", + "getting-started/rag-with-astradb", + ], + }, + { + type: "category", + label: " What's New", + collapsed: false, + items: [ + "whats-new/a-new-chapter-langflow", + "whats-new/migrating-to-one-point-zero", + ], + }, + + { + type: "category", + label: " Migration Guides", + collapsed: false, + items: [ + // "migration/flow-of-data", + "migration/inputs-and-outputs", + // "migration/supported-frameworks", + // "migration/sidebar-and-interaction-panel", + // "migration/new-categories-and-components", + "migration/text-and-record", + // "migration/custom-component", + "migration/compatibility", + // "migration/multiple-flows", + // "migration/component-status-and-data-passing", + // "migration/connecting-output-components", + // "migration/renaming-and-editing-components", + // "migration/passing-tweaks-and-inputs", + "migration/global-variables", + // "migration/experimental-components", + // "migration/state-management", ], }, { @@ -18,7 +54,6 @@ module.exports = { items: [ "guidelines/login", "guidelines/api", - "guidelines/async-api", "guidelines/components", "guidelines/features", "guidelines/collection", @@ -30,47 +65,42 @@ module.exports = { }, { type: "category", - label: "Component Reference", + label: "Step-by-Step Guides", + collapsed: false, + items: ["guides/langfuse_integration"], + }, + { + type: "category", + label: "Core Components", collapsed: false, items: [ - "components/agents", - "components/chains", - "components/custom", - "components/embeddings", - "components/llms", - "components/loaders", - "components/memories", - "components/prompts", - "components/retrievers", - "components/text-splitters", - "components/toolkits", - "components/tools", - "components/utilities", + "components/inputs", + "components/outputs", + "components/data", + "components/models", + "components/helpers", "components/vector-stores", - "components/wrappers", + "components/embeddings", ], }, { type: "category", - label: "Step-by-Step Guides", + label: "Extended Components", collapsed: false, items: [ - "guides/async-tasks", - "guides/loading_document", - "guides/chatprompttemplate_guide", - "guides/langfuse_integration", + "components/agents", + "components/chains", + "components/loaders", + "components/experimental", + "components/utilities", + "components/memories", + "components/model_specs", + "components/retrievers", + "components/text-splitters", + "components/toolkits", + "components/tools", ], }, - // { - // type: 'category', - // label: 'Components', - // collapsed: false, - // items: [ - // 'components/agents', 'components/chains', 'components/loaders', 'components/embeddings', 'components/llms', - // 'components/memories', 'components/prompts','components/text-splitters', 'components/toolkits', 'components/tools', - // 'components/utilities', 'components/vector-stores', 'components/wrappers', - // ], - // }, { type: "category", label: "Examples", @@ -79,13 +109,10 @@ module.exports = { "examples/flow-runner", "examples/conversation-chain", "examples/buffer-memory", - "examples/midjourney-prompt-chain", "examples/csv-loader", "examples/searchapi-tool", "examples/serp-api-tool", - "examples/multiple-vectorstores", "examples/python-function", - "examples/how-upload-examples", ], }, { diff --git a/docs/src/theme/DownloadableJsonFile.js b/docs/src/theme/DownloadableJsonFile.js new file mode 100644 index 000000000..7b5466eac --- /dev/null +++ b/docs/src/theme/DownloadableJsonFile.js @@ -0,0 +1,29 @@ +const DownloadableJsonFile = ({ source, title }) => { + const handleDownload = (event) => { + event.preventDefault(); + fetch(source) + .then((response) => response.blob()) + .then((blob) => { + const url = window.URL.createObjectURL( + new Blob([blob], { type: "application/json" }) + ); + const link = document.createElement("a"); + link.href = url; + link.setAttribute("download", title); + document.body.appendChild(link); + link.click(); + link.parentNode.removeChild(link); + }) + .catch((error) => { + console.error("Error downloading file:", error); + }); + }; + + return ( + + {title} + + ); +}; + +export default DownloadableJsonFile; diff --git a/docs/static/data/AstraDB-RAG-Flows.json b/docs/static/data/AstraDB-RAG-Flows.json new file mode 100644 index 000000000..5706a0fbf --- /dev/null +++ b/docs/static/data/AstraDB-RAG-Flows.json @@ -0,0 +1,3403 @@ +{ + "id": "51e2b78a-199b-4054-9f32-e288eef6924c", + "data": { + "nodes": [ + { + "id": "ChatInput-yxMKE", + "type": "genericNode", + "position": { + "x": 1195.5276981160775, + "y": 209.421875 + }, + "data": { + "type": "ChatInput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Interaction Panel.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Message", + "advanced": false, + "input_types": [], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "value": "what is a line" + }, + "return_record": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_record", + "display_name": "Return Record", + "advanced": true, + "dynamic": false, + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "load_from_db": false, + "title_case": false + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "User", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Machine", + "User" + ], + "name": "sender", + "display_name": "Sender Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "User", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": true, + "dynamic": false, + "info": "If provided, the message will be stored in the memory.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Get chat inputs from the Interaction Panel.", + "icon": "ChatInput", + "base_classes": [ + "Text", + "str", + "object", + "Record" + ], + "display_name": "Chat Input", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "input_value": null, + "session_id": null, + "return_record": null + }, + "output_types": [ + "Text", + "Record" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "ChatInput-yxMKE" + }, + "selected": false, + "width": 384, + "height": 383 + }, + { + "id": "TextOutput-BDknO", + "type": "genericNode", + "position": { + "x": 2322.600672827879, + "y": 604.9467307442569 + }, + "data": { + "type": "TextOutput", + "node": { + "template": { + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Value", + "advanced": false, + "input_types": [ + "Record", + "Text" + ], + "dynamic": false, + "info": "Text or Record to be passed as output.", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Interaction Panel.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "record_template": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "{text}", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "record_template", + "display_name": "Record Template", + "advanced": true, + "dynamic": false, + "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Display a text output in the Interaction Panel.", + "icon": "type", + "base_classes": [ + "object", + "Text", + "str" + ], + "display_name": "Extracted Chunks", + "documentation": "", + "custom_fields": { + "input_value": null, + "record_template": null + }, + "output_types": [ + "Text" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "TextOutput-BDknO" + }, + "selected": false, + "width": 384, + "height": 289, + "positionAbsolute": { + "x": 2322.600672827879, + "y": 604.9467307442569 + }, + "dragging": false + }, + { + "id": "OpenAIEmbeddings-ZlOk1", + "type": "genericNode", + "position": { + "x": 1183.667250865064, + "y": 687.3171828430261 + }, + "data": { + "type": "OpenAIEmbeddings", + "node": { + "template": { + "allowed_special": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": [], + "fileTypes": [], + "file_path": "", + "password": false, + "name": "allowed_special", + "display_name": "Allowed Special", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "chunk_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 1000, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chunk_size", + "display_name": "Chunk Size", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "client": { + "type": "Any", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "client", + "display_name": "Client", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=openai_api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "default_headers": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "default_headers", + "display_name": "Default Headers", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "default_query": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "default_query", + "display_name": "Default Query", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "deployment": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "text-embedding-ada-002", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "deployment", + "display_name": "Deployment", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "disallowed_special": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": [ + "all" + ], + "fileTypes": [], + "file_path": "", + "password": false, + "name": "disallowed_special", + "display_name": "Disallowed Special", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "embedding_ctx_length": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 8191, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "embedding_ctx_length", + "display_name": "Embedding Context Length", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "max_retries": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 6, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_retries", + "display_name": "Max Retries", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "text-embedding-ada-002", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "text-embedding-3-small", + "text-embedding-3-large", + "text-embedding-ada-002" + ], + "name": "model", + "display_name": "Model", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "model_kwargs": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_key": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "" + }, + "openai_api_type": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_type", + "display_name": "OpenAI API Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_version": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_version", + "display_name": "OpenAI API Version", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_organization": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_organization", + "display_name": "OpenAI Organization", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_proxy": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_proxy", + "display_name": "OpenAI Proxy", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "request_timeout": { + "type": "float", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "request_timeout", + "display_name": "Request Timeout", + "advanced": true, + "dynamic": false, + "info": "", + "rangeSpec": { + "step_type": "float", + "min": -1, + "max": 1, + "step": 0.1 + }, + "load_from_db": false, + "title_case": false + }, + "show_progress_bar": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "show_progress_bar", + "display_name": "Show Progress Bar", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "skip_empty": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "skip_empty", + "display_name": "Skip Empty", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "tiktoken_enable": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "tiktoken_enable", + "display_name": "TikToken Enable", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "tiktoken_model_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "tiktoken_model_name", + "display_name": "TikToken Model Name", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Generate embeddings using OpenAI models.", + "base_classes": [ + "Embeddings" + ], + "display_name": "OpenAI Embeddings", + "documentation": "", + "custom_fields": { + "openai_api_key": null, + "default_headers": null, + "default_query": null, + "allowed_special": null, + "disallowed_special": null, + "chunk_size": null, + "client": null, + "deployment": null, + "embedding_ctx_length": null, + "max_retries": null, + "model": null, + "model_kwargs": null, + "openai_api_base": null, + "openai_api_type": null, + "openai_api_version": null, + "openai_organization": null, + "openai_proxy": null, + "request_timeout": null, + "show_progress_bar": null, + "skip_empty": null, + "tiktoken_enable": null, + "tiktoken_model_name": null + }, + "output_types": [ + "Embeddings" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "OpenAIEmbeddings-ZlOk1" + }, + "selected": false, + "width": 384, + "height": 383, + "dragging": false + }, + { + "id": "OpenAIModel-EjXlN", + "type": "genericNode", + "position": { + "x": 3410.117202077183, + "y": 431.2038048137648 + }, + "data": { + "type": "OpenAIModel", + "node": { + "template": { + "input_value": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Input", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-3.5-turbo\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n \"value\": \"gpt-4-turbo-preview\",\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=openai_api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "max_tokens": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 256, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_tokens", + "display_name": "Max Tokens", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model_kwargs": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model_name": { + "type": "str", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "gpt-3.5-turbo", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "gpt-4-turbo-preview", + "gpt-3.5-turbo", + "gpt-4-0125-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-1106" + ], + "name": "model_name", + "display_name": "Model Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_key": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "" + }, + "stream": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "stream", + "display_name": "Stream", + "advanced": true, + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "load_from_db": false, + "title_case": false + }, + "system_message": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "system_message", + "display_name": "System Message", + "advanced": true, + "dynamic": false, + "info": "System message to pass to the model.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "temperature": { + "type": "float", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 0.1, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "temperature", + "display_name": "Temperature", + "advanced": false, + "dynamic": false, + "info": "", + "rangeSpec": { + "step_type": "float", + "min": -1, + "max": 1, + "step": 0.1 + }, + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent" + }, + "description": "Generates text using OpenAI LLMs.", + "icon": "OpenAI", + "base_classes": [ + "object", + "Text", + "str" + ], + "display_name": "OpenAI", + "documentation": "", + "custom_fields": { + "input_value": null, + "openai_api_key": null, + "temperature": null, + "model_name": null, + "max_tokens": null, + "model_kwargs": null, + "openai_api_base": null, + "stream": null, + "system_message": null + }, + "output_types": [ + "Text" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "max_tokens", + "model_kwargs", + "model_name", + "openai_api_base", + "openai_api_key", + "temperature", + "input_value", + "system_message", + "stream" + ], + "beta": false + }, + "id": "OpenAIModel-EjXlN" + }, + "selected": true, + "width": 384, + "height": 563, + "positionAbsolute": { + "x": 3410.117202077183, + "y": 431.2038048137648 + }, + "dragging": false + }, + { + "id": "Prompt-xeI6K", + "type": "genericNode", + "position": { + "x": 2969.0261961391298, + "y": 442.1613649809069 + }, + "data": { + "type": "Prompt", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "template": { + "type": "prompt", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "{context}\n\n---\n\nGiven the context above, answer the question as best as possible.\n\nQuestion: {question}\n\nAnswer: ", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "template", + "display_name": "Template", + "advanced": false, + "input_types": [ + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent", + "context": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "context", + "display_name": "context", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "type": "str" + }, + "question": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "question", + "display_name": "question", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "type": "str" + } + }, + "description": "Create a prompt template with dynamic variables.", + "icon": "prompts", + "is_input": null, + "is_output": null, + "is_composition": null, + "base_classes": [ + "object", + "Text", + "str" + ], + "name": "", + "display_name": "Prompt", + "documentation": "", + "custom_fields": { + "template": [ + "context", + "question" + ] + }, + "output_types": [ + "Text" + ], + "full_path": null, + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false, + "error": null + }, + "id": "Prompt-xeI6K", + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt" + }, + "selected": false, + "width": 384, + "height": 477, + "positionAbsolute": { + "x": 2969.0261961391298, + "y": 442.1613649809069 + }, + "dragging": false + }, + { + "id": "ChatOutput-Q39I8", + "type": "genericNode", + "position": { + "x": 3887.2073667611485, + "y": 588.4801225794856 + }, + "data": { + "type": "ChatOutput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Interaction Panel.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Message", + "advanced": false, + "input_types": [ + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "record_template": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "{text}", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "record_template", + "display_name": "Record Template", + "advanced": true, + "dynamic": false, + "info": "In case of Message being a Record, this template will be used to convert it to text.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "return_record": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_record", + "display_name": "Return Record", + "advanced": true, + "dynamic": false, + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "load_from_db": false, + "title_case": false + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Machine", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Machine", + "User" + ], + "name": "sender", + "display_name": "Sender Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "AI", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": true, + "dynamic": false, + "info": "If provided, the message will be stored in the memory.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Display a chat message in the Interaction Panel.", + "icon": "ChatOutput", + "base_classes": [ + "object", + "Text", + "Record", + "str" + ], + "display_name": "Chat Output", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "input_value": null, + "session_id": null, + "return_record": null, + "record_template": null + }, + "output_types": [ + "Text", + "Record" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "ChatOutput-Q39I8" + }, + "selected": false, + "width": 384, + "height": 383, + "positionAbsolute": { + "x": 3887.2073667611485, + "y": 588.4801225794856 + }, + "dragging": false + }, + { + "id": "File-t0a6a", + "type": "genericNode", + "position": { + "x": 2257.233450682836, + "y": 1747.5389618367233 + }, + "data": { + "type": "File", + "node": { + "template": { + "path": { + "type": "file", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [ + ".txt", + ".md", + ".mdx", + ".csv", + ".json", + ".yaml", + ".yml", + ".xml", + ".html", + ".htm", + ".pdf", + ".docx" + ], + "file_path": "51e2b78a-199b-4054-9f32-e288eef6924c/Langflow conversation.pdf", + "password": false, + "name": "path", + "display_name": "Path", + "advanced": false, + "dynamic": false, + "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx", + "load_from_db": false, + "title_case": false, + "value": "" + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from pathlib import Path\nfrom typing import Any, Dict\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_record\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\n\n\nclass FileComponent(CustomComponent):\n display_name = \"File\"\n description = \"A generic file loader.\"\n icon = \"file-text\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"path\": {\n \"display_name\": \"Path\",\n \"field_type\": \"file\",\n \"file_types\": TEXT_FILE_TYPES,\n \"info\": f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n },\n \"silent_errors\": {\n \"display_name\": \"Silent Errors\",\n \"advanced\": True,\n \"info\": \"If true, errors will not raise an exception.\",\n },\n }\n\n def load_file(self, path: str, silent_errors: bool = False) -> Record:\n resolved_path = self.resolve_path(path)\n path_obj = Path(resolved_path)\n extension = path_obj.suffix[1:].lower()\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n record = parse_text_file_to_record(resolved_path, silent_errors)\n self.status = record if record else \"No data\"\n return record or Record()\n\n def build(\n self,\n path: str,\n silent_errors: bool = False,\n ) -> Record:\n record = self.load_file(path, silent_errors)\n self.status = record\n return record\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "silent_errors": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "silent_errors", + "display_name": "Silent Errors", + "advanced": true, + "dynamic": false, + "info": "If true, errors will not raise an exception.", + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent" + }, + "description": "A generic file loader.", + "icon": "file-text", + "base_classes": [ + "Record" + ], + "display_name": "File", + "documentation": "", + "custom_fields": { + "path": null, + "silent_errors": null + }, + "output_types": [ + "Record" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "File-t0a6a" + }, + "selected": false, + "width": 384, + "height": 281, + "positionAbsolute": { + "x": 2257.233450682836, + "y": 1747.5389618367233 + }, + "dragging": false + }, + { + "id": "RecursiveCharacterTextSplitter-tR9QM", + "type": "genericNode", + "position": { + "x": 2791.013514133929, + "y": 1462.9588953494142 + }, + "data": { + "type": "RecursiveCharacterTextSplitter", + "node": { + "template": { + "inputs": { + "type": "Document", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "inputs", + "display_name": "Input", + "advanced": false, + "input_types": [ + "Document", + "Record" + ], + "dynamic": false, + "info": "The texts to split.", + "load_from_db": false, + "title_case": false + }, + "chunk_overlap": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 200, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chunk_overlap", + "display_name": "Chunk Overlap", + "advanced": false, + "dynamic": false, + "info": "The amount of overlap between chunks.", + "load_from_db": false, + "title_case": false + }, + "chunk_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 1000, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chunk_size", + "display_name": "Chunk Size", + "advanced": false, + "dynamic": false, + "info": "The maximum length of each chunk.", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langchain.text_splitter import RecursiveCharacterTextSplitter\nfrom langchain_core.documents import Document\n\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\nfrom langflow.utils.util import build_loader_repr_from_records, unescape_string\n\n\nclass RecursiveCharacterTextSplitterComponent(CustomComponent):\n display_name: str = \"Recursive Character Text Splitter\"\n description: str = \"Split text into chunks of a specified length.\"\n documentation: str = \"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\"\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Input\",\n \"info\": \"The texts to split.\",\n \"input_types\": [\"Document\", \"Record\"],\n },\n \"separators\": {\n \"display_name\": \"Separators\",\n \"info\": 'The characters to split on.\\nIf left empty defaults to [\"\\\\n\\\\n\", \"\\\\n\", \" \", \"\"].',\n \"is_list\": True,\n },\n \"chunk_size\": {\n \"display_name\": \"Chunk Size\",\n \"info\": \"The maximum length of each chunk.\",\n \"field_type\": \"int\",\n \"value\": 1000,\n },\n \"chunk_overlap\": {\n \"display_name\": \"Chunk Overlap\",\n \"info\": \"The amount of overlap between chunks.\",\n \"field_type\": \"int\",\n \"value\": 200,\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n inputs: list[Document],\n separators: Optional[list[str]] = None,\n chunk_size: Optional[int] = 1000,\n chunk_overlap: Optional[int] = 200,\n ) -> list[Record]:\n \"\"\"\n Split text into chunks of a specified length.\n\n Args:\n separators (list[str]): The characters to split on.\n chunk_size (int): The maximum length of each chunk.\n chunk_overlap (int): The amount of overlap between chunks.\n length_function (function): The function to use to calculate the length of the text.\n\n Returns:\n list[str]: The chunks of text.\n \"\"\"\n\n if separators == \"\":\n separators = None\n elif separators:\n # check if the separators list has escaped characters\n # if there are escaped characters, unescape them\n separators = [unescape_string(x) for x in separators]\n\n # Make sure chunk_size and chunk_overlap are ints\n if isinstance(chunk_size, str):\n chunk_size = int(chunk_size)\n if isinstance(chunk_overlap, str):\n chunk_overlap = int(chunk_overlap)\n splitter = RecursiveCharacterTextSplitter(\n separators=separators,\n chunk_size=chunk_size,\n chunk_overlap=chunk_overlap,\n )\n documents = []\n for _input in inputs:\n if isinstance(_input, Record):\n documents.append(_input.to_lc_document())\n else:\n documents.append(_input)\n docs = splitter.split_documents(documents)\n records = self.to_records(docs)\n self.repr_value = build_loader_repr_from_records(records)\n return records\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "separators": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "separators", + "display_name": "Separators", + "advanced": false, + "dynamic": false, + "info": "The characters to split on.\nIf left empty defaults to [\"\\n\\n\", \"\\n\", \" \", \"\"].", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": [ + "" + ] + }, + "_type": "CustomComponent" + }, + "description": "Split text into chunks of a specified length.", + "base_classes": [ + "Record" + ], + "display_name": "Recursive Character Text Splitter", + "documentation": "https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter", + "custom_fields": { + "inputs": null, + "separators": null, + "chunk_size": null, + "chunk_overlap": null + }, + "output_types": [ + "Record" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "RecursiveCharacterTextSplitter-tR9QM" + }, + "selected": false, + "width": 384, + "height": 501, + "positionAbsolute": { + "x": 2791.013514133929, + "y": 1462.9588953494142 + }, + "dragging": false + }, + { + "id": "AstraDBSearch-41nRz", + "type": "genericNode", + "position": { + "x": 1723.976434815103, + "y": 277.03317407245913 + }, + "data": { + "type": "AstraDBSearch", + "node": { + "template": { + "embedding": { + "type": "Embeddings", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "embedding", + "display_name": "Embedding", + "advanced": false, + "dynamic": false, + "info": "Embedding to use", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Input Value", + "advanced": false, + "dynamic": false, + "info": "Input value to search", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "api_endpoint": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "api_endpoint", + "display_name": "API Endpoint", + "advanced": false, + "dynamic": false, + "info": "API endpoint URL for the Astra DB service.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "" + }, + "batch_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "batch_size", + "display_name": "Batch Size", + "advanced": true, + "dynamic": false, + "info": "Optional number of records to process in a single batch.", + "load_from_db": false, + "title_case": false + }, + "bulk_delete_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_delete_concurrency", + "display_name": "Bulk Delete Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk delete operations.", + "load_from_db": false, + "title_case": false + }, + "bulk_insert_batch_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_insert_batch_concurrency", + "display_name": "Bulk Insert Batch Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk insert operations.", + "load_from_db": false, + "title_case": false + }, + "bulk_insert_overwrite_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_insert_overwrite_concurrency", + "display_name": "Bulk Insert Overwrite Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk insert operations that overwrite existing records.", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import List, Optional\n\nfrom langflow.components.vectorstores.AstraDB import AstraDBVectorStoreComponent\nfrom langflow.components.vectorstores.base.model import LCVectorStoreComponent\nfrom langflow.field_typing import Embeddings, Text\nfrom langflow.schema import Record\n\n\nclass AstraDBSearchComponent(LCVectorStoreComponent):\n display_name = \"Astra DB Search\"\n description = \"Searches an existing Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"input_value\", \"embedding\"]\n\n def build_config(self):\n return {\n \"search_type\": {\n \"display_name\": \"Search Type\",\n \"options\": [\"Similarity\", \"MMR\"],\n },\n \"input_value\": {\n \"display_name\": \"Input Value\",\n \"info\": \"Input value to search\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n \"number_of_results\": {\n \"display_name\": \"Number of Results\",\n \"info\": \"Number of results to return.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n collection_name: str,\n input_value: Text,\n token: str,\n api_endpoint: str,\n search_type: str = \"Similarity\",\n number_of_results: int = 4,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Sync\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> List[Record]:\n vector_store = AstraDBVectorStoreComponent().build(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n try:\n return self.search_with_vector_store(input_value, search_type, vector_store, k=number_of_results)\n except KeyError as e:\n if \"content\" in str(e):\n raise ValueError(\n \"You should ingest data through Langflow (or LangChain) to query it in Langflow. Your collection does not contain a field name 'content'.\"\n )\n else:\n raise e\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "collection_indexing_policy": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "collection_indexing_policy", + "display_name": "Collection Indexing Policy", + "advanced": true, + "dynamic": false, + "info": "Optional dictionary defining the indexing policy for the collection.", + "load_from_db": false, + "title_case": false + }, + "collection_name": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "collection_name", + "display_name": "Collection Name", + "advanced": false, + "dynamic": false, + "info": "The name of the collection within Astra DB where the vectors will be stored.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "langflow" + }, + "metadata_indexing_exclude": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metadata_indexing_exclude", + "display_name": "Metadata Indexing Exclude", + "advanced": true, + "dynamic": false, + "info": "Optional list of metadata fields to exclude from the indexing.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "metadata_indexing_include": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metadata_indexing_include", + "display_name": "Metadata Indexing Include", + "advanced": true, + "dynamic": false, + "info": "Optional list of metadata fields to include in the indexing.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "metric": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metric", + "display_name": "Metric", + "advanced": true, + "dynamic": false, + "info": "Optional distance metric for vector comparisons in the vector store.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "namespace": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "namespace", + "display_name": "Namespace", + "advanced": true, + "dynamic": false, + "info": "Optional namespace within Astra DB to use for the collection.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "number_of_results": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 4, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "number_of_results", + "display_name": "Number of Results", + "advanced": true, + "dynamic": false, + "info": "Number of results to return.", + "load_from_db": false, + "title_case": false + }, + "pre_delete_collection": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "pre_delete_collection", + "display_name": "Pre Delete Collection", + "advanced": true, + "dynamic": false, + "info": "Boolean flag to determine whether to delete the collection before creating a new one.", + "load_from_db": false, + "title_case": false + }, + "search_type": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Similarity", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Similarity", + "MMR" + ], + "name": "search_type", + "display_name": "Search Type", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "setup_mode": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Sync", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Sync", + "Async", + "Off" + ], + "name": "setup_mode", + "display_name": "Setup Mode", + "advanced": true, + "dynamic": false, + "info": "Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "token": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "token", + "display_name": "Token", + "advanced": false, + "dynamic": false, + "info": "Authentication token for accessing Astra DB.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "" + }, + "_type": "CustomComponent" + }, + "description": "Searches an existing Astra DB Vector Store.", + "icon": "AstraDB", + "base_classes": [ + "Record" + ], + "display_name": "Astra DB Search", + "documentation": "", + "custom_fields": { + "embedding": null, + "collection_name": null, + "input_value": null, + "token": null, + "api_endpoint": null, + "search_type": null, + "number_of_results": null, + "namespace": null, + "metric": null, + "batch_size": null, + "bulk_insert_batch_concurrency": null, + "bulk_insert_overwrite_concurrency": null, + "bulk_delete_concurrency": null, + "setup_mode": null, + "pre_delete_collection": null, + "metadata_indexing_include": null, + "metadata_indexing_exclude": null, + "collection_indexing_policy": null + }, + "output_types": [ + "Record" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "token", + "api_endpoint", + "collection_name", + "input_value", + "embedding" + ], + "beta": false + }, + "id": "AstraDBSearch-41nRz" + }, + "selected": false, + "width": 384, + "height": 713, + "dragging": false, + "positionAbsolute": { + "x": 1723.976434815103, + "y": 277.03317407245913 + } + }, + { + "id": "AstraDB-eUCSS", + "type": "genericNode", + "position": { + "x": 3372.04958055989, + "y": 1611.0742035495277 + }, + "data": { + "type": "AstraDB", + "node": { + "template": { + "embedding": { + "type": "Embeddings", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "embedding", + "display_name": "Embedding", + "advanced": false, + "dynamic": false, + "info": "Embedding to use", + "load_from_db": false, + "title_case": false + }, + "inputs": { + "type": "Record", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "inputs", + "display_name": "Inputs", + "advanced": false, + "dynamic": false, + "info": "Optional list of records to be processed and stored in the vector store.", + "load_from_db": false, + "title_case": false + }, + "api_endpoint": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "api_endpoint", + "display_name": "API Endpoint", + "advanced": false, + "dynamic": false, + "info": "API endpoint URL for the Astra DB service.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "" + }, + "batch_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "batch_size", + "display_name": "Batch Size", + "advanced": true, + "dynamic": false, + "info": "Optional number of records to process in a single batch.", + "load_from_db": false, + "title_case": false + }, + "bulk_delete_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_delete_concurrency", + "display_name": "Bulk Delete Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk delete operations.", + "load_from_db": false, + "title_case": false + }, + "bulk_insert_batch_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_insert_batch_concurrency", + "display_name": "Bulk Insert Batch Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk insert operations.", + "load_from_db": false, + "title_case": false + }, + "bulk_insert_overwrite_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_insert_overwrite_concurrency", + "display_name": "Bulk Insert Overwrite Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk insert operations that overwrite existing records.", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import List, Optional\n\nfrom langchain_astradb import AstraDBVectorStore\nfrom langchain_astradb.utils.astradb import SetupMode\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Embeddings, VectorStore\nfrom langflow.schema import Record\n\n\nclass AstraDBVectorStoreComponent(CustomComponent):\n display_name = \"Astra DB\"\n description = \"Builds or loads an Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"inputs\", \"embedding\"]\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Inputs\",\n \"info\": \"Optional list of records to be processed and stored in the vector store.\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n token: str,\n api_endpoint: str,\n collection_name: str,\n inputs: Optional[List[Record]] = None,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Async\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> VectorStore:\n try:\n setup_mode_value = SetupMode[setup_mode.upper()]\n except KeyError:\n raise ValueError(f\"Invalid setup mode: {setup_mode}\")\n if inputs:\n documents = [_input.to_lc_document() for _input in inputs]\n\n vector_store = AstraDBVectorStore.from_documents(\n documents=documents,\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n else:\n vector_store = AstraDBVectorStore(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n\n return vector_store\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "collection_indexing_policy": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "collection_indexing_policy", + "display_name": "Collection Indexing Policy", + "advanced": true, + "dynamic": false, + "info": "Optional dictionary defining the indexing policy for the collection.", + "load_from_db": false, + "title_case": false + }, + "collection_name": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "collection_name", + "display_name": "Collection Name", + "advanced": false, + "dynamic": false, + "info": "The name of the collection within Astra DB where the vectors will be stored.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "langflow" + }, + "metadata_indexing_exclude": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metadata_indexing_exclude", + "display_name": "Metadata Indexing Exclude", + "advanced": true, + "dynamic": false, + "info": "Optional list of metadata fields to exclude from the indexing.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "metadata_indexing_include": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metadata_indexing_include", + "display_name": "Metadata Indexing Include", + "advanced": true, + "dynamic": false, + "info": "Optional list of metadata fields to include in the indexing.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "metric": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metric", + "display_name": "Metric", + "advanced": true, + "dynamic": false, + "info": "Optional distance metric for vector comparisons in the vector store.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "namespace": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "namespace", + "display_name": "Namespace", + "advanced": true, + "dynamic": false, + "info": "Optional namespace within Astra DB to use for the collection.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "pre_delete_collection": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "pre_delete_collection", + "display_name": "Pre Delete Collection", + "advanced": true, + "dynamic": false, + "info": "Boolean flag to determine whether to delete the collection before creating a new one.", + "load_from_db": false, + "title_case": false + }, + "setup_mode": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Async", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Sync", + "Async", + "Off" + ], + "name": "setup_mode", + "display_name": "Setup Mode", + "advanced": true, + "dynamic": false, + "info": "Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "token": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "token", + "display_name": "Token", + "advanced": false, + "dynamic": false, + "info": "Authentication token for accessing Astra DB.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "" + }, + "_type": "CustomComponent" + }, + "description": "Builds or loads an Astra DB Vector Store.", + "icon": "AstraDB", + "base_classes": [ + "VectorStore" + ], + "display_name": "Astra DB", + "documentation": "", + "custom_fields": { + "embedding": null, + "token": null, + "api_endpoint": null, + "collection_name": null, + "inputs": null, + "namespace": null, + "metric": null, + "batch_size": null, + "bulk_insert_batch_concurrency": null, + "bulk_insert_overwrite_concurrency": null, + "bulk_delete_concurrency": null, + "setup_mode": null, + "pre_delete_collection": null, + "metadata_indexing_include": null, + "metadata_indexing_exclude": null, + "collection_indexing_policy": null + }, + "output_types": [ + "VectorStore" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "token", + "api_endpoint", + "collection_name", + "inputs", + "embedding" + ], + "beta": false + }, + "id": "AstraDB-eUCSS" + }, + "selected": false, + "width": 384, + "height": 573, + "positionAbsolute": { + "x": 3372.04958055989, + "y": 1611.0742035495277 + }, + "dragging": false + }, + { + "id": "OpenAIEmbeddings-9TPjc", + "type": "genericNode", + "position": { + "x": 2814.0402191223047, + "y": 1955.9268168273086 + }, + "data": { + "type": "OpenAIEmbeddings", + "node": { + "template": { + "allowed_special": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": [], + "fileTypes": [], + "file_path": "", + "password": false, + "name": "allowed_special", + "display_name": "Allowed Special", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "chunk_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 1000, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chunk_size", + "display_name": "Chunk Size", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "client": { + "type": "Any", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "client", + "display_name": "Client", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=openai_api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "default_headers": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "default_headers", + "display_name": "Default Headers", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "default_query": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "default_query", + "display_name": "Default Query", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "deployment": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "text-embedding-ada-002", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "deployment", + "display_name": "Deployment", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "disallowed_special": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": [ + "all" + ], + "fileTypes": [], + "file_path": "", + "password": false, + "name": "disallowed_special", + "display_name": "Disallowed Special", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "embedding_ctx_length": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 8191, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "embedding_ctx_length", + "display_name": "Embedding Context Length", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "max_retries": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 6, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_retries", + "display_name": "Max Retries", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "text-embedding-ada-002", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "text-embedding-3-small", + "text-embedding-3-large", + "text-embedding-ada-002" + ], + "name": "model", + "display_name": "Model", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "model_kwargs": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_key": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "" + }, + "openai_api_type": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_type", + "display_name": "OpenAI API Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_version": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_version", + "display_name": "OpenAI API Version", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_organization": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_organization", + "display_name": "OpenAI Organization", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_proxy": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_proxy", + "display_name": "OpenAI Proxy", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "request_timeout": { + "type": "float", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "request_timeout", + "display_name": "Request Timeout", + "advanced": true, + "dynamic": false, + "info": "", + "rangeSpec": { + "step_type": "float", + "min": -1, + "max": 1, + "step": 0.1 + }, + "load_from_db": false, + "title_case": false + }, + "show_progress_bar": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "show_progress_bar", + "display_name": "Show Progress Bar", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "skip_empty": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "skip_empty", + "display_name": "Skip Empty", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "tiktoken_enable": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "tiktoken_enable", + "display_name": "TikToken Enable", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "tiktoken_model_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "tiktoken_model_name", + "display_name": "TikToken Model Name", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Generate embeddings using OpenAI models.", + "base_classes": [ + "Embeddings" + ], + "display_name": "OpenAI Embeddings", + "documentation": "", + "custom_fields": { + "openai_api_key": null, + "default_headers": null, + "default_query": null, + "allowed_special": null, + "disallowed_special": null, + "chunk_size": null, + "client": null, + "deployment": null, + "embedding_ctx_length": null, + "max_retries": null, + "model": null, + "model_kwargs": null, + "openai_api_base": null, + "openai_api_type": null, + "openai_api_version": null, + "openai_organization": null, + "openai_proxy": null, + "request_timeout": null, + "show_progress_bar": null, + "skip_empty": null, + "tiktoken_enable": null, + "tiktoken_model_name": null + }, + "output_types": [ + "Embeddings" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "OpenAIEmbeddings-9TPjc" + }, + "selected": false, + "width": 384, + "height": 383, + "positionAbsolute": { + "x": 2814.0402191223047, + "y": 1955.9268168273086 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "TextOutput-BDknO", + "target": "Prompt-xeI6K", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œTextOutputœ,œidœ:œTextOutput-BDknOœ}", + "targetHandle": "{œfieldNameœ:œcontextœ,œidœ:œPrompt-xeI6Kœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "id": "reactflow__edge-TextOutput-BDknO{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œTextOutputœ,œidœ:œTextOutput-BDknOœ}-Prompt-xeI6K{œfieldNameœ:œcontextœ,œidœ:œPrompt-xeI6Kœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "context", + "id": "Prompt-xeI6K", + "inputTypes": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "object", + "Text", + "str" + ], + "dataType": "TextOutput", + "id": "TextOutput-BDknO" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "selected": false + }, + { + "source": "ChatInput-yxMKE", + "target": "Prompt-xeI6K", + "sourceHandle": "{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ,œRecordœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-yxMKEœ}", + "targetHandle": "{œfieldNameœ:œquestionœ,œidœ:œPrompt-xeI6Kœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "id": "reactflow__edge-ChatInput-yxMKE{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ,œRecordœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-yxMKEœ}-Prompt-xeI6K{œfieldNameœ:œquestionœ,œidœ:œPrompt-xeI6Kœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "question", + "id": "Prompt-xeI6K", + "inputTypes": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "Text", + "str", + "object", + "Record" + ], + "dataType": "ChatInput", + "id": "ChatInput-yxMKE" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "selected": false + }, + { + "source": "Prompt-xeI6K", + "target": "OpenAIModel-EjXlN", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-xeI6Kœ}", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-EjXlNœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "id": "reactflow__edge-Prompt-xeI6K{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-xeI6Kœ}-OpenAIModel-EjXlN{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-EjXlNœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-EjXlN", + "inputTypes": [ + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "object", + "Text", + "str" + ], + "dataType": "Prompt", + "id": "Prompt-xeI6K" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "selected": false + }, + { + "source": "OpenAIModel-EjXlN", + "target": "ChatOutput-Q39I8", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-EjXlNœ}", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-Q39I8œ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "id": "reactflow__edge-OpenAIModel-EjXlN{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-EjXlNœ}-ChatOutput-Q39I8{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-Q39I8œ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-Q39I8", + "inputTypes": [ + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "object", + "Text", + "str" + ], + "dataType": "OpenAIModel", + "id": "OpenAIModel-EjXlN" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "selected": false + }, + { + "source": "File-t0a6a", + "target": "RecursiveCharacterTextSplitter-tR9QM", + "sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œFileœ,œidœ:œFile-t0a6aœ}", + "targetHandle": "{œfieldNameœ:œinputsœ,œidœ:œRecursiveCharacterTextSplitter-tR9QMœ,œinputTypesœ:[œDocumentœ,œRecordœ],œtypeœ:œDocumentœ}", + "id": "reactflow__edge-File-t0a6a{œbaseClassesœ:[œRecordœ],œdataTypeœ:œFileœ,œidœ:œFile-t0a6aœ}-RecursiveCharacterTextSplitter-tR9QM{œfieldNameœ:œinputsœ,œidœ:œRecursiveCharacterTextSplitter-tR9QMœ,œinputTypesœ:[œDocumentœ,œRecordœ],œtypeœ:œDocumentœ}", + "data": { + "targetHandle": { + "fieldName": "inputs", + "id": "RecursiveCharacterTextSplitter-tR9QM", + "inputTypes": [ + "Document", + "Record" + ], + "type": "Document" + }, + "sourceHandle": { + "baseClasses": [ + "Record" + ], + "dataType": "File", + "id": "File-t0a6a" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "selected": false + }, + { + "source": "OpenAIEmbeddings-ZlOk1", + "sourceHandle": "{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-ZlOk1œ}", + "target": "AstraDBSearch-41nRz", + "targetHandle": "{œfieldNameœ:œembeddingœ,œidœ:œAstraDBSearch-41nRzœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}", + "data": { + "targetHandle": { + "fieldName": "embedding", + "id": "AstraDBSearch-41nRz", + "inputTypes": null, + "type": "Embeddings" + }, + "sourceHandle": { + "baseClasses": [ + "Embeddings" + ], + "dataType": "OpenAIEmbeddings", + "id": "OpenAIEmbeddings-ZlOk1" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-OpenAIEmbeddings-ZlOk1{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-ZlOk1œ}-AstraDBSearch-41nRz{œfieldNameœ:œembeddingœ,œidœ:œAstraDBSearch-41nRzœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}" + }, + { + "source": "ChatInput-yxMKE", + "sourceHandle": "{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ,œRecordœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-yxMKEœ}", + "target": "AstraDBSearch-41nRz", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œAstraDBSearch-41nRzœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "AstraDBSearch-41nRz", + "inputTypes": [ + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "Text", + "str", + "object", + "Record" + ], + "dataType": "ChatInput", + "id": "ChatInput-yxMKE" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-ChatInput-yxMKE{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ,œRecordœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-yxMKEœ}-AstraDBSearch-41nRz{œfieldNameœ:œinput_valueœ,œidœ:œAstraDBSearch-41nRzœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "RecursiveCharacterTextSplitter-tR9QM", + "sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-tR9QMœ}", + "target": "AstraDB-eUCSS", + "targetHandle": "{œfieldNameœ:œinputsœ,œidœ:œAstraDB-eUCSSœ,œinputTypesœ:null,œtypeœ:œRecordœ}", + "data": { + "targetHandle": { + "fieldName": "inputs", + "id": "AstraDB-eUCSS", + "inputTypes": null, + "type": "Record" + }, + "sourceHandle": { + "baseClasses": [ + "Record" + ], + "dataType": "RecursiveCharacterTextSplitter", + "id": "RecursiveCharacterTextSplitter-tR9QM" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-RecursiveCharacterTextSplitter-tR9QM{œbaseClassesœ:[œRecordœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-tR9QMœ}-AstraDB-eUCSS{œfieldNameœ:œinputsœ,œidœ:œAstraDB-eUCSSœ,œinputTypesœ:null,œtypeœ:œRecordœ}", + "selected": false + }, + { + "source": "OpenAIEmbeddings-9TPjc", + "sourceHandle": "{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-9TPjcœ}", + "target": "AstraDB-eUCSS", + "targetHandle": "{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-eUCSSœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}", + "data": { + "targetHandle": { + "fieldName": "embedding", + "id": "AstraDB-eUCSS", + "inputTypes": null, + "type": "Embeddings" + }, + "sourceHandle": { + "baseClasses": [ + "Embeddings" + ], + "dataType": "OpenAIEmbeddings", + "id": "OpenAIEmbeddings-9TPjc" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-OpenAIEmbeddings-9TPjc{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-9TPjcœ}-AstraDB-eUCSS{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-eUCSSœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}", + "selected": false + }, + { + "source": "AstraDBSearch-41nRz", + "sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œAstraDBSearchœ,œidœ:œAstraDBSearch-41nRzœ}", + "target": "TextOutput-BDknO", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-BDknOœ,œinputTypesœ:[œRecordœ,œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "TextOutput-BDknO", + "inputTypes": [ + "Record", + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "Record" + ], + "dataType": "AstraDBSearch", + "id": "AstraDBSearch-41nRz" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-AstraDBSearch-41nRz{œbaseClassesœ:[œRecordœ],œdataTypeœ:œAstraDBSearchœ,œidœ:œAstraDBSearch-41nRzœ}-TextOutput-BDknO{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-BDknOœ,œinputTypesœ:[œRecordœ,œTextœ],œtypeœ:œstrœ}" + } + ], + "viewport": { + "x": -259.6782520315529, + "y": 90.3428735006047, + "zoom": 0.2687057134854984 + } + }, + "description": "Visit https://pre-release.langflow.org/guides/rag-with-astradb for a detailed guide of this project.\nThis project give you both Ingestion and RAG in a single file. You'll need to visit https://astra.datastax.com/ to create an Astra DB instance, your Token and grab an API Endpoint.\nRunning this project requires you to add a file in the Files component, then define a Collection Name and click on the Play icon on the Astra DB component. \n\nAfter the ingestion ends you are ready to click on the Run button at the lower left corner and start asking questions about your data.", + "name": "Vector Store RAG", + "last_tested_version": "1.0.0a0", + "is_component": false +} \ No newline at end of file diff --git a/docs/static/img/add-new-variable.png b/docs/static/img/add-new-variable.png new file mode 100644 index 000000000..178cab67c Binary files /dev/null and b/docs/static/img/add-new-variable.png differ diff --git a/docs/static/img/astra-configure-deployment.png b/docs/static/img/astra-configure-deployment.png new file mode 100644 index 000000000..803f150d1 Binary files /dev/null and b/docs/static/img/astra-configure-deployment.png differ diff --git a/docs/static/img/astra-create-database.png b/docs/static/img/astra-create-database.png new file mode 100644 index 000000000..96e7f15c8 Binary files /dev/null and b/docs/static/img/astra-create-database.png differ diff --git a/docs/static/img/astra-generate-token.png b/docs/static/img/astra-generate-token.png new file mode 100644 index 000000000..b41310491 Binary files /dev/null and b/docs/static/img/astra-generate-token.png differ diff --git a/docs/static/img/astra-ingestion-fields.png b/docs/static/img/astra-ingestion-fields.png new file mode 100644 index 000000000..e19c65e19 Binary files /dev/null and b/docs/static/img/astra-ingestion-fields.png differ diff --git a/docs/static/img/astra-ingestion-flow-dark.png b/docs/static/img/astra-ingestion-flow-dark.png new file mode 100644 index 000000000..86b77806b Binary files /dev/null and b/docs/static/img/astra-ingestion-flow-dark.png differ diff --git a/docs/static/img/astra-ingestion-flow.png b/docs/static/img/astra-ingestion-flow.png new file mode 100644 index 000000000..8ee2567bc Binary files /dev/null and b/docs/static/img/astra-ingestion-flow.png differ diff --git a/docs/static/img/astra-ingestion-run.png b/docs/static/img/astra-ingestion-run.png new file mode 100644 index 000000000..2476e54f9 Binary files /dev/null and b/docs/static/img/astra-ingestion-run.png differ diff --git a/docs/static/img/astra-rag-flow-dark.png b/docs/static/img/astra-rag-flow-dark.png new file mode 100644 index 000000000..60482e09a Binary files /dev/null and b/docs/static/img/astra-rag-flow-dark.png differ diff --git a/docs/static/img/astra-rag-flow-interaction-panel-interaction.png b/docs/static/img/astra-rag-flow-interaction-panel-interaction.png new file mode 100644 index 000000000..4f87704be Binary files /dev/null and b/docs/static/img/astra-rag-flow-interaction-panel-interaction.png differ diff --git a/docs/static/img/astra-rag-flow-interaction-panel.png b/docs/static/img/astra-rag-flow-interaction-panel.png new file mode 100644 index 000000000..6e106b316 Binary files /dev/null and b/docs/static/img/astra-rag-flow-interaction-panel.png differ diff --git a/docs/static/img/astra-rag-flow-run.png b/docs/static/img/astra-rag-flow-run.png new file mode 100644 index 000000000..8a1c849eb Binary files /dev/null and b/docs/static/img/astra-rag-flow-run.png differ diff --git a/docs/static/img/astra-rag-flow.png b/docs/static/img/astra-rag-flow.png new file mode 100644 index 000000000..fab8e2a44 Binary files /dev/null and b/docs/static/img/astra-rag-flow.png differ diff --git a/docs/static/img/chat-input-expanded.png b/docs/static/img/chat-input-expanded.png new file mode 100644 index 000000000..befe5afbd Binary files /dev/null and b/docs/static/img/chat-input-expanded.png differ diff --git a/docs/static/img/chat-input.png b/docs/static/img/chat-input.png new file mode 100644 index 000000000..29605c1b1 Binary files /dev/null and b/docs/static/img/chat-input.png differ diff --git a/docs/static/img/create-variable-window.png b/docs/static/img/create-variable-window.png new file mode 100644 index 000000000..75c8842b5 Binary files /dev/null and b/docs/static/img/create-variable-window.png differ diff --git a/docs/static/img/drag-and-drop-canvas.png b/docs/static/img/drag-and-drop-canvas.png new file mode 100644 index 000000000..e9a0083a7 Binary files /dev/null and b/docs/static/img/drag-and-drop-canvas.png differ diff --git a/docs/static/img/drag-and-drop-flow.png b/docs/static/img/drag-and-drop-flow.png new file mode 100644 index 000000000..3351498b2 Binary files /dev/null and b/docs/static/img/drag-and-drop-flow.png differ diff --git a/docs/static/img/duplicate-space.png b/docs/static/img/duplicate-space.png new file mode 100644 index 000000000..fd9cf865e Binary files /dev/null and b/docs/static/img/duplicate-space.png differ diff --git a/docs/static/img/interaction-panel-text-input.png b/docs/static/img/interaction-panel-text-input.png new file mode 100644 index 000000000..77994c924 Binary files /dev/null and b/docs/static/img/interaction-panel-text-input.png differ diff --git a/docs/static/img/interaction-panel-with-chat-input.png b/docs/static/img/interaction-panel-with-chat-input.png new file mode 100644 index 000000000..c5f7c7998 Binary files /dev/null and b/docs/static/img/interaction-panel-with-chat-input.png differ diff --git a/docs/static/img/ollama-gv.png b/docs/static/img/ollama-gv.png new file mode 100644 index 000000000..0ee7540ee Binary files /dev/null and b/docs/static/img/ollama-gv.png differ diff --git a/docs/static/img/prompt-with-template.png b/docs/static/img/prompt-with-template.png new file mode 100644 index 000000000..0312b899f Binary files /dev/null and b/docs/static/img/prompt-with-template.png differ diff --git a/docs/static/img/prompt.png b/docs/static/img/prompt.png new file mode 100644 index 000000000..6d4260bed Binary files /dev/null and b/docs/static/img/prompt.png differ diff --git a/docs/static/img/text-input-expanded.png b/docs/static/img/text-input-expanded.png new file mode 100644 index 000000000..fe73e496c Binary files /dev/null and b/docs/static/img/text-input-expanded.png differ diff --git a/docs/static/img/text-input.png b/docs/static/img/text-input.png new file mode 100644 index 000000000..e8f8da248 Binary files /dev/null and b/docs/static/img/text-input.png differ diff --git a/docs/static/json_files/SearchApi_Tool.json b/docs/static/json_files/SearchApi_Tool.json index 304f191f1..8179d7c07 100644 --- a/docs/static/json_files/SearchApi_Tool.json +++ b/docs/static/json_files/SearchApi_Tool.json @@ -1 +1,1747 @@ -{"id":"7fc56f82-3493-4742-9c85-82b144127aff","data":{"nodes":[{"id":"ChatOpenAI-4Mfuz","type":"genericNode","position":{"x":-2243.8068684913856,"y":-2026.350019258601},"data":{"type":"ChatOpenAI","node":{"template":{"callbacks":{"type":"langchain_core.callbacks.base.BaseCallbackHandler","required":false,"placeholder":"","list":true,"show":false,"multiline":false,"fileTypes":[],"password":false,"name":"callbacks","advanced":false,"dynamic":false,"info":""},"async_client":{"type":"Any","required":false,"placeholder":"","list":false,"show":false,"multiline":false,"fileTypes":[],"password":false,"name":"async_client","advanced":false,"dynamic":false,"info":""},"cache":{"type":"bool","required":false,"placeholder":"","list":false,"show":false,"multiline":false,"fileTypes":[],"password":false,"name":"cache","advanced":false,"dynamic":false,"info":""},"client":{"type":"Any","required":false,"placeholder":"","list":false,"show":false,"multiline":false,"fileTypes":[],"password":false,"name":"client","advanced":false,"dynamic":false,"info":""},"default_headers":{"type":"dict","required":false,"placeholder":"","list":false,"show":false,"multiline":false,"fileTypes":[],"password":false,"name":"default_headers","advanced":false,"dynamic":false,"info":""},"default_query":{"type":"dict","required":false,"placeholder":"","list":false,"show":false,"multiline":false,"fileTypes":[],"password":false,"name":"default_query","advanced":false,"dynamic":false,"info":""},"http_client":{"type":"Any","required":false,"placeholder":"","list":false,"show":false,"multiline":false,"fileTypes":[],"password":false,"name":"http_client","advanced":false,"dynamic":false,"info":""},"max_retries":{"type":"int","required":false,"placeholder":"","list":false,"show":false,"multiline":false,"value":2,"fileTypes":[],"password":false,"name":"max_retries","advanced":false,"dynamic":false,"info":""},"max_tokens":{"type":"int","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"password":true,"name":"max_tokens","advanced":false,"dynamic":false,"info":"","value":""},"metadata":{"type":"dict","required":false,"placeholder":"","list":false,"show":false,"multiline":false,"fileTypes":[],"password":false,"name":"metadata","advanced":false,"dynamic":false,"info":""},"model_kwargs":{"type":"dict","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"password":false,"name":"model_kwargs","advanced":true,"dynamic":false,"info":""},"model_name":{"type":"str","required":false,"placeholder":"","list":true,"show":true,"multiline":false,"value":"gpt-3.5-turbo-16k","fileTypes":[],"password":false,"options":["gpt-4-1106-preview","gpt-4-vision-preview","gpt-4","gpt-4-32k","gpt-3.5-turbo","gpt-3.5-turbo-16k"],"name":"model_name","advanced":false,"dynamic":false,"info":""},"n":{"type":"int","required":false,"placeholder":"","list":false,"show":false,"multiline":false,"value":1,"fileTypes":[],"password":false,"name":"n","advanced":false,"dynamic":false,"info":""},"name":{"type":"str","required":false,"placeholder":"","list":false,"show":false,"multiline":false,"fileTypes":[],"password":false,"name":"name","advanced":false,"dynamic":false,"info":""},"openai_api_base":{"type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"password":false,"name":"openai_api_base","display_name":"OpenAI API Base","advanced":false,"dynamic":false,"info":"\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\n"},"openai_api_key":{"type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"value":"","fileTypes":[],"password":true,"name":"openai_api_key","display_name":"OpenAI API Key","advanced":false,"dynamic":false,"info":""},"openai_organization":{"type":"str","required":false,"placeholder":"","list":false,"show":false,"multiline":false,"fileTypes":[],"password":false,"name":"openai_organization","display_name":"OpenAI Organization","advanced":false,"dynamic":false,"info":""},"openai_proxy":{"type":"str","required":false,"placeholder":"","list":false,"show":false,"multiline":false,"fileTypes":[],"password":false,"name":"openai_proxy","display_name":"OpenAI Proxy","advanced":false,"dynamic":false,"info":""},"request_timeout":{"type":"float","required":false,"placeholder":"","list":false,"show":false,"multiline":false,"fileTypes":[],"password":false,"name":"request_timeout","advanced":false,"dynamic":false,"info":"","rangeSpec":{"min":-1,"max":1,"step":0.1}},"streaming":{"type":"bool","required":false,"placeholder":"","list":false,"show":false,"multiline":false,"value":false,"fileTypes":[],"password":false,"name":"streaming","advanced":false,"dynamic":false,"info":""},"tags":{"type":"str","required":false,"placeholder":"","list":true,"show":false,"multiline":false,"fileTypes":[],"password":false,"name":"tags","advanced":false,"dynamic":false,"info":""},"temperature":{"type":"float","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"value":"0.5","fileTypes":[],"password":false,"name":"temperature","advanced":false,"dynamic":false,"info":"","rangeSpec":{"min":-1,"max":1,"step":0.1}},"tiktoken_model_name":{"type":"str","required":false,"placeholder":"","list":false,"show":false,"multiline":false,"fileTypes":[],"password":false,"name":"tiktoken_model_name","advanced":false,"dynamic":false,"info":""},"verbose":{"type":"bool","required":false,"placeholder":"","list":false,"show":false,"multiline":false,"value":false,"fileTypes":[],"password":false,"name":"verbose","advanced":false,"dynamic":false,"info":""},"_type":"ChatOpenAI"},"description":"[*Deprecated*] `OpenAI` Chat large language models API.","base_classes":["BaseLanguageModel","BaseChatModel","ChatOpenAI","BaseLLM"],"display_name":"ChatOpenAI","documentation":"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai","custom_fields":{},"output_types":[],"field_formatters":{},"beta":false},"id":"ChatOpenAI-4Mfuz"},"selected":false,"width":384,"height":649,"positionAbsolute":{"x":-2243.8068684913856,"y":-2026.350019258601},"dragging":false},{"id":"CharacterTextSplitter-WVMFU","type":"genericNode","position":{"x":-2661.4749778477553,"y":-1608.9437055023366},"data":{"type":"CharacterTextSplitter","node":{"template":{"documents":{"type":"Document","required":true,"placeholder":"","list":true,"show":true,"multiline":false,"value":"","fileTypes":[],"file_path":"","password":false,"name":"documents","advanced":false,"dynamic":false,"info":""},"chunk_overlap":{"type":"int","required":true,"placeholder":"","list":false,"show":true,"multiline":false,"value":200,"fileTypes":[],"file_path":"","password":false,"name":"chunk_overlap","display_name":"Chunk Overlap","advanced":false,"dynamic":false,"info":""},"chunk_size":{"type":"int","required":true,"placeholder":"","list":false,"show":true,"multiline":false,"value":"2000","fileTypes":[],"file_path":"","password":false,"name":"chunk_size","display_name":"Chunk Size","advanced":false,"dynamic":false,"info":""},"separator":{"type":"str","required":true,"placeholder":"","list":false,"show":true,"multiline":false,"value":"\"","fileTypes":[],"file_path":"","password":false,"name":"separator","display_name":"Separator","advanced":false,"dynamic":false,"info":""},"_type":"CharacterTextSplitter"},"description":"Splitting text that looks at characters.","base_classes":["Document"],"display_name":"CharacterTextSplitter","documentation":"https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/character_text_splitter","custom_fields":{},"output_types":["Document"],"field_formatters":{},"beta":false},"id":"CharacterTextSplitter-WVMFU"},"selected":false,"width":384,"height":501,"positionAbsolute":{"x":-2661.4749778477553,"y":-1608.9437055023366},"dragging":false},{"id":"Chroma-OtYDg","type":"genericNode","position":{"x":-2194.2051907050227,"y":-1370.1637632208287},"data":{"type":"Chroma","node":{"template":{"documents":{"type":"Document","required":false,"placeholder":"","list":true,"show":true,"multiline":false,"fileTypes":[],"file_path":"","password":false,"name":"documents","display_name":"Documents","advanced":false,"dynamic":false,"info":""},"embedding":{"type":"Embeddings","required":true,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"file_path":"","password":false,"name":"embedding","display_name":"Embedding","advanced":false,"dynamic":false,"info":""},"chroma_server_cors_allow_origins":{"type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"file_path":"","password":false,"name":"chroma_server_cors_allow_origins","display_name":"Server CORS Allow Origins","advanced":true,"dynamic":false,"info":""},"chroma_server_grpc_port":{"type":"int","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"file_path":"","password":false,"name":"chroma_server_grpc_port","display_name":"Server gRPC Port","advanced":true,"dynamic":false,"info":""},"chroma_server_host":{"type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"file_path":"","password":false,"name":"chroma_server_host","display_name":"Server Host","advanced":true,"dynamic":false,"info":""},"chroma_server_port":{"type":"int","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"file_path":"","password":false,"name":"chroma_server_port","display_name":"Server Port","advanced":true,"dynamic":false,"info":""},"chroma_server_ssl_enabled":{"type":"bool","required":true,"placeholder":"","list":false,"show":true,"multiline":false,"value":false,"fileTypes":[],"file_path":"","password":false,"name":"chroma_server_ssl_enabled","display_name":"Server SSL Enabled","advanced":true,"dynamic":false,"info":""},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from typing import List, Optional, Union\n\nimport chromadb # type: ignore\nfrom langchain.embeddings.base import Embeddings\nfrom langchain.schema import BaseRetriever, Document\nfrom langchain.vectorstores import Chroma\nfrom langchain.vectorstores.base import VectorStore\n\nfrom langflow import CustomComponent\n\n\nclass ChromaComponent(CustomComponent):\n \"\"\"\n A custom component for implementing a Vector Store using Chroma.\n \"\"\"\n\n display_name: str = \"Chroma\"\n description: str = \"Implementation of Vector Store using Chroma\"\n documentation = \"https://python.langchain.com/docs/integrations/vectorstores/chroma\"\n beta: bool = True\n\n def build_config(self):\n \"\"\"\n Builds the configuration for the component.\n\n Returns:\n - dict: A dictionary containing the configuration options for the component.\n \"\"\"\n return {\n \"collection_name\": {\"display_name\": \"Collection Name\", \"value\": \"langflow\"},\n \"persist\": {\"display_name\": \"Persist\"},\n \"persist_directory\": {\"display_name\": \"Persist Directory\"},\n \"code\": {\"show\": False, \"display_name\": \"Code\"},\n \"documents\": {\"display_name\": \"Documents\", \"is_list\": True},\n \"embedding\": {\"display_name\": \"Embedding\"},\n \"chroma_server_cors_allow_origins\": {\n \"display_name\": \"Server CORS Allow Origins\",\n \"advanced\": True,\n },\n \"chroma_server_host\": {\"display_name\": \"Server Host\", \"advanced\": True},\n \"chroma_server_port\": {\"display_name\": \"Server Port\", \"advanced\": True},\n \"chroma_server_grpc_port\": {\n \"display_name\": \"Server gRPC Port\",\n \"advanced\": True,\n },\n \"chroma_server_ssl_enabled\": {\n \"display_name\": \"Server SSL Enabled\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n collection_name: str,\n persist: bool,\n embedding: Embeddings,\n chroma_server_ssl_enabled: bool,\n persist_directory: Optional[str] = None,\n documents: Optional[List[Document]] = None,\n chroma_server_cors_allow_origins: Optional[str] = None,\n chroma_server_host: Optional[str] = None,\n chroma_server_port: Optional[int] = None,\n chroma_server_grpc_port: Optional[int] = None,\n ) -> Union[VectorStore, BaseRetriever]:\n \"\"\"\n Builds the Vector Store or BaseRetriever object.\n\n Args:\n - collection_name (str): The name of the collection.\n - persist_directory (Optional[str]): The directory to persist the Vector Store to.\n - chroma_server_ssl_enabled (bool): Whether to enable SSL for the Chroma server.\n - persist (bool): Whether to persist the Vector Store or not.\n - embedding (Optional[Embeddings]): The embeddings to use for the Vector Store.\n - documents (Optional[Document]): The documents to use for the Vector Store.\n - chroma_server_cors_allow_origins (Optional[str]): The CORS allow origins for the Chroma server.\n - chroma_server_host (Optional[str]): The host for the Chroma server.\n - chroma_server_port (Optional[int]): The port for the Chroma server.\n - chroma_server_grpc_port (Optional[int]): The gRPC port for the Chroma server.\n\n Returns:\n - Union[VectorStore, BaseRetriever]: The Vector Store or BaseRetriever object.\n \"\"\"\n\n # Chroma settings\n chroma_settings = None\n\n if chroma_server_host is not None:\n chroma_settings = chromadb.config.Settings(\n chroma_server_cors_allow_origins=chroma_server_cors_allow_origins or None,\n chroma_server_host=chroma_server_host,\n chroma_server_port=chroma_server_port or None,\n chroma_server_grpc_port=chroma_server_grpc_port or None,\n chroma_server_ssl_enabled=chroma_server_ssl_enabled,\n )\n\n # If documents, then we need to create a Chroma instance using .from_documents\n if documents is not None and embedding is not None:\n if len(documents) == 0:\n raise ValueError(\"If documents are provided, there must be at least one document.\")\n return Chroma.from_documents(\n documents=documents, # type: ignore\n persist_directory=persist_directory if persist else None,\n collection_name=collection_name,\n embedding=embedding,\n client_settings=chroma_settings,\n )\n\n return Chroma(persist_directory=persist_directory, client_settings=chroma_settings)\n","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":false,"dynamic":true,"info":""},"collection_name":{"type":"str","required":true,"placeholder":"","list":false,"show":true,"multiline":false,"value":"video","fileTypes":[],"file_path":"","password":false,"name":"collection_name","display_name":"Collection Name","advanced":false,"dynamic":false,"info":""},"persist":{"type":"bool","required":true,"placeholder":"","list":false,"show":true,"multiline":false,"value":false,"fileTypes":[],"file_path":"","password":false,"name":"persist","display_name":"Persist","advanced":false,"dynamic":false,"info":""},"persist_directory":{"type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"file_path":"","password":false,"name":"persist_directory","display_name":"Persist Directory","advanced":false,"dynamic":false,"info":""},"_type":"CustomComponent"},"description":"Implementation of Vector Store using Chroma","base_classes":["VectorStore","BaseRetriever"],"display_name":"Chroma","documentation":"https://python.langchain.com/docs/integrations/vectorstores/chroma","custom_fields":{"chroma_server_cors_allow_origins":null,"chroma_server_grpc_port":null,"chroma_server_host":null,"chroma_server_port":null,"chroma_server_ssl_enabled":null,"collection_name":null,"documents":null,"embedding":null,"persist":null,"persist_directory":null},"output_types":["Chroma"],"field_formatters":{},"beta":true},"id":"Chroma-OtYDg"},"selected":false,"width":384,"height":625,"positionAbsolute":{"x":-2194.2051907050227,"y":-1370.1637632208287},"dragging":false},{"id":"OpenAIEmbeddings-9yqtI","type":"genericNode","position":{"x":-2653.011009324626,"y":-1103.8414515074774},"data":{"type":"OpenAIEmbeddings","node":{"template":{"allowed_special":{"type":"str","required":false,"placeholder":"","list":true,"show":true,"multiline":false,"value":[],"fileTypes":[],"password":false,"name":"allowed_special","advanced":true,"dynamic":false,"info":""},"async_client":{"type":"Any","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"password":false,"name":"async_client","advanced":true,"dynamic":false,"info":""},"chunk_size":{"type":"int","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"value":1000,"fileTypes":[],"password":false,"name":"chunk_size","advanced":true,"dynamic":false,"info":""},"client":{"type":"Any","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"password":false,"name":"client","advanced":true,"dynamic":false,"info":""},"default_headers":{"type":"dict","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"password":false,"name":"default_headers","advanced":true,"dynamic":false,"info":""},"default_query":{"type":"dict","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"password":false,"name":"default_query","advanced":true,"dynamic":false,"info":""},"deployment":{"type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"value":"text-embedding-ada-002","fileTypes":[],"password":false,"name":"deployment","advanced":true,"dynamic":false,"info":""},"disallowed_special":{"type":"str","required":false,"placeholder":"","list":true,"show":true,"multiline":false,"value":"all","fileTypes":[],"password":false,"name":"disallowed_special","advanced":true,"dynamic":false,"info":""},"embedding_ctx_length":{"type":"int","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"value":8191,"fileTypes":[],"password":false,"name":"embedding_ctx_length","advanced":true,"dynamic":false,"info":""},"headers":{"type":"Any","required":false,"placeholder":"","list":false,"show":false,"multiline":true,"value":"{\"Authorization\": \"Bearer \"}","fileTypes":[],"password":false,"name":"headers","advanced":true,"dynamic":false,"info":""},"http_client":{"type":"Any","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"password":false,"name":"http_client","advanced":true,"dynamic":false,"info":""},"max_retries":{"type":"int","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"value":2,"fileTypes":[],"password":false,"name":"max_retries","advanced":true,"dynamic":false,"info":""},"model":{"type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"value":"text-embedding-ada-002","fileTypes":[],"password":false,"name":"model","advanced":true,"dynamic":false,"info":""},"model_kwargs":{"type":"dict","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"password":false,"name":"model_kwargs","advanced":true,"dynamic":false,"info":""},"openai_api_base":{"type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"password":true,"name":"openai_api_base","display_name":"OpenAI API Base","advanced":true,"dynamic":false,"info":"","value":""},"openai_api_key":{"type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"value":"","fileTypes":[],"password":true,"name":"openai_api_key","display_name":"OpenAI API Key","advanced":false,"dynamic":false,"info":""},"openai_api_type":{"type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"password":true,"name":"openai_api_type","display_name":"OpenAI API Type","advanced":true,"dynamic":false,"info":"","value":""},"openai_api_version":{"type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"password":true,"name":"openai_api_version","display_name":"OpenAI API Version","advanced":true,"dynamic":false,"info":"","value":""},"openai_organization":{"type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"password":false,"name":"openai_organization","display_name":"OpenAI Organization","advanced":true,"dynamic":false,"info":""},"openai_proxy":{"type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"password":false,"name":"openai_proxy","display_name":"OpenAI Proxy","advanced":true,"dynamic":false,"info":""},"request_timeout":{"type":"float","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"password":false,"name":"request_timeout","advanced":true,"dynamic":false,"info":"","rangeSpec":{"min":-1,"max":1,"step":0.1}},"retry_max_seconds":{"type":"int","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"value":20,"fileTypes":[],"password":false,"name":"retry_max_seconds","advanced":true,"dynamic":false,"info":""},"retry_min_seconds":{"type":"int","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"value":4,"fileTypes":[],"password":false,"name":"retry_min_seconds","advanced":true,"dynamic":false,"info":""},"show_progress_bar":{"type":"bool","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"value":false,"fileTypes":[],"password":false,"name":"show_progress_bar","advanced":true,"dynamic":false,"info":""},"skip_empty":{"type":"bool","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"value":false,"fileTypes":[],"password":false,"name":"skip_empty","advanced":true,"dynamic":false,"info":""},"tiktoken_enabled":{"type":"bool","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"value":"","fileTypes":[],"password":true,"name":"tiktoken_enabled","advanced":false,"dynamic":false,"info":""},"tiktoken_model_name":{"type":"str","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"password":true,"name":"tiktoken_model_name","advanced":false,"dynamic":false,"info":"","value":""},"_type":"OpenAIEmbeddings"},"description":"[*Deprecated*] OpenAI embedding models.","base_classes":["Embeddings","OpenAIEmbeddings"],"display_name":"OpenAIEmbeddings","documentation":"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/openai","custom_fields":{},"output_types":[],"field_formatters":{},"beta":false},"id":"OpenAIEmbeddings-9yqtI"},"selected":false,"width":384,"height":443,"positionAbsolute":{"x":-2653.011009324626,"y":-1103.8414515074774},"dragging":false},{"id":"RetrievalQA-DpylI","type":"genericNode","position":{"x":-1757.239471200792,"y":-1258.2132589352987},"data":{"type":"RetrievalQA","node":{"template":{"callbacks":{"type":"langchain_core.callbacks.base.BaseCallbackHandler","required":false,"placeholder":"","list":true,"show":false,"multiline":false,"fileTypes":[],"password":false,"name":"callbacks","advanced":false,"dynamic":false,"info":""},"combine_documents_chain":{"type":"BaseCombineDocumentsChain","required":true,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"password":false,"name":"combine_documents_chain","advanced":false,"dynamic":false,"info":""},"memory":{"type":"BaseMemory","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"password":false,"name":"memory","advanced":false,"dynamic":false,"info":""},"retriever":{"type":"BaseRetriever","required":true,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"password":false,"name":"retriever","advanced":false,"dynamic":false,"info":""},"input_key":{"type":"str","required":true,"placeholder":"","list":false,"show":true,"multiline":false,"value":"query","fileTypes":[],"password":false,"name":"input_key","advanced":true,"dynamic":false,"info":""},"metadata":{"type":"dict","required":false,"placeholder":"","list":false,"show":false,"multiline":false,"fileTypes":[],"password":false,"name":"metadata","advanced":false,"dynamic":false,"info":""},"name":{"type":"str","required":false,"placeholder":"","list":false,"show":false,"multiline":false,"fileTypes":[],"password":false,"name":"name","advanced":false,"dynamic":false,"info":""},"output_key":{"type":"str","required":true,"placeholder":"","list":false,"show":true,"multiline":false,"value":"result","fileTypes":[],"password":false,"name":"output_key","advanced":true,"dynamic":false,"info":""},"return_source_documents":{"type":"bool","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"value":true,"fileTypes":[],"password":false,"name":"return_source_documents","advanced":true,"dynamic":false,"info":""},"tags":{"type":"str","required":false,"placeholder":"","list":true,"show":false,"multiline":false,"fileTypes":[],"password":false,"name":"tags","advanced":false,"dynamic":false,"info":""},"verbose":{"type":"bool","required":false,"placeholder":"","list":false,"show":false,"multiline":false,"fileTypes":[],"password":false,"name":"verbose","advanced":true,"dynamic":false,"info":""},"_type":"RetrievalQA"},"description":"Chain for question-answering against an index.","base_classes":["Chain","RetrievalQA","BaseRetrievalQA","Callable"],"display_name":"RetrievalQA","documentation":"https://python.langchain.com/docs/modules/chains/popular/vector_db_qa","custom_fields":{},"output_types":[],"field_formatters":{},"beta":false},"id":"RetrievalQA-DpylI"},"selected":false,"width":384,"height":339,"positionAbsolute":{"x":-1757.239471200792,"y":-1258.2132589352987},"dragging":true},{"id":"CombineDocsChain-afKOq","type":"genericNode","position":{"x":-1775.9040057312934,"y":-1644.0423777157919},"data":{"type":"CombineDocsChain","node":{"template":{"llm":{"type":"BaseLanguageModel","required":true,"placeholder":"","list":false,"show":true,"multiline":false,"value":"","fileTypes":[],"file_path":"","password":false,"name":"llm","display_name":"LLM","advanced":false,"dynamic":false,"info":""},"chain_type":{"type":"str","required":true,"placeholder":"","list":true,"show":true,"multiline":false,"value":"refine","fileTypes":[],"file_path":"","password":false,"options":["stuff","map_reduce","map_rerank","refine"],"name":"chain_type","advanced":false,"dynamic":false,"info":""},"_type":"load_qa_chain"},"description":"Load question answering chain.","base_classes":["BaseCombineDocumentsChain","Callable"],"display_name":"CombineDocsChain","documentation":"","custom_fields":{},"output_types":[],"field_formatters":{},"beta":false},"id":"CombineDocsChain-afKOq"},"selected":false,"width":384,"height":333,"dragging":false,"positionAbsolute":{"x":-1775.9040057312934,"y":-1644.0423777157919}},{"id":"SearchApi-kZmEj","type":"genericNode","position":{"x":-3074.364183247263,"y":-1782.5742787937606},"data":{"type":"SearchApi","node":{"template":{"api_key":{"type":"str","required":true,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"file_path":"","password":true,"name":"api_key","display_name":"API Key","advanced":false,"dynamic":false,"info":"The API key to use SearchApi.","value":""},"code":{"type":"code","required":true,"placeholder":"","list":false,"show":true,"multiline":true,"value":"from langflow import CustomComponent\nfrom langchain.schema import Document\nfrom langflow.services.database.models.base import orjson_dumps\nfrom langchain_community.utilities.searchapi import SearchApiAPIWrapper\nfrom typing import Optional\n\n\nclass SearchApi(CustomComponent):\n display_name: str = \"SearchApi\"\n description: str = \"Real-time search engines API.\"\n output_types: list[str] = [\"Document\"]\n documentation: str = \"https://www.searchapi.io/docs/google\"\n field_config = {\n \"engine\": {\n \"display_name\": \"Engine\",\n \"field_type\": \"str\",\n \"info\": \"The search engine to use.\",\n },\n \"params\": {\n \"display_name\": \"Parameters\",\n \"info\": \"The parameters to send with the request.\",\n },\n \"code\": {\"show\": False},\n \"api_key\": {\n \"display_name\": \"API Key\",\n \"field_type\": \"str\",\n \"required\": True, \n \"password\": True,\n \"info\": \"The API key to use SearchApi.\",\n },\n }\n\n def build(\n self,\n engine: str,\n api_key: str,\n params: Optional[dict] = None,\n ) -> Document:\n if params is None:\n params = {}\n\n search_api_wrapper = SearchApiAPIWrapper(engine=engine, searchapi_api_key=api_key)\n\n query = params.pop(\"query\", \"default query\") \n results = search_api_wrapper.results(query, **params)\n\n result = orjson_dumps(results, indent_2=False)\n \n document = Document(page_content=result) \n\n return document","fileTypes":[],"file_path":"","password":false,"name":"code","advanced":false,"dynamic":true,"info":""},"engine":{"type":"str","required":true,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"file_path":"","password":false,"name":"engine","display_name":"Engine","advanced":false,"dynamic":false,"info":"The search engine to use.","value":"youtube_transcripts"},"params":{"type":"dict","required":false,"placeholder":"","list":false,"show":true,"multiline":false,"fileTypes":[],"file_path":"","password":false,"name":"params","display_name":"Parameters","advanced":false,"dynamic":false,"info":"The parameters to send with the request.","value":[{"video_id":"krsBRQbOPQ4"}]},"_type":"CustomComponent"},"description":"Real-time search engines API.","base_classes":["Document"],"display_name":"SearchApi","documentation":"https://www.searchapi.io/docs/google","custom_fields":{"api_key":null,"engine":null,"params":null},"output_types":["SearchApi"],"field_formatters":{},"beta":true},"id":"SearchApi-kZmEj"},"selected":false,"width":384,"height":539,"dragging":false,"positionAbsolute":{"x":-3074.364183247263,"y":-1782.5742787937606}}],"edges":[{"source":"CharacterTextSplitter-WVMFU","sourceHandle":"{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œCharacterTextSplitterœ,œidœ:œCharacterTextSplitter-WVMFUœ}","target":"Chroma-OtYDg","targetHandle":"{œfieldNameœ:œdocumentsœ,œidœ:œChroma-OtYDgœ,œinputTypesœ:null,œtypeœ:œDocumentœ}","data":{"targetHandle":{"fieldName":"documents","id":"Chroma-OtYDg","inputTypes":null,"type":"Document"},"sourceHandle":{"baseClasses":["Document"],"dataType":"CharacterTextSplitter","id":"CharacterTextSplitter-WVMFU"}},"style":{"stroke":"#555"},"className":"stroke-gray-900 stroke-connection","animated":false,"id":"reactflow__edge-CharacterTextSplitter-WVMFU{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œCharacterTextSplitterœ,œidœ:œCharacterTextSplitter-WVMFUœ}-Chroma-OtYDg{œfieldNameœ:œdocumentsœ,œidœ:œChroma-OtYDgœ,œinputTypesœ:null,œtypeœ:œDocumentœ}"},{"source":"OpenAIEmbeddings-9yqtI","sourceHandle":"{œbaseClassesœ:[œEmbeddingsœ,œOpenAIEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-9yqtIœ}","target":"Chroma-OtYDg","targetHandle":"{œfieldNameœ:œembeddingœ,œidœ:œChroma-OtYDgœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}","data":{"targetHandle":{"fieldName":"embedding","id":"Chroma-OtYDg","inputTypes":null,"type":"Embeddings"},"sourceHandle":{"baseClasses":["Embeddings","OpenAIEmbeddings"],"dataType":"OpenAIEmbeddings","id":"OpenAIEmbeddings-9yqtI"}},"style":{"stroke":"#555"},"className":"stroke-gray-900 stroke-connection","animated":false,"id":"reactflow__edge-OpenAIEmbeddings-9yqtI{œbaseClassesœ:[œEmbeddingsœ,œOpenAIEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-9yqtIœ}-Chroma-OtYDg{œfieldNameœ:œembeddingœ,œidœ:œChroma-OtYDgœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}"},{"source":"Chroma-OtYDg","sourceHandle":"{œbaseClassesœ:[œVectorStoreœ,œBaseRetrieverœ],œdataTypeœ:œChromaœ,œidœ:œChroma-OtYDgœ}","target":"RetrievalQA-DpylI","targetHandle":"{œfieldNameœ:œretrieverœ,œidœ:œRetrievalQA-DpylIœ,œinputTypesœ:null,œtypeœ:œBaseRetrieverœ}","data":{"targetHandle":{"fieldName":"retriever","id":"RetrievalQA-DpylI","inputTypes":null,"type":"BaseRetriever"},"sourceHandle":{"baseClasses":["VectorStore","BaseRetriever"],"dataType":"Chroma","id":"Chroma-OtYDg"}},"style":{"stroke":"#555"},"className":"stroke-gray-900 stroke-connection","animated":false,"id":"reactflow__edge-Chroma-OtYDg{œbaseClassesœ:[œVectorStoreœ,œBaseRetrieverœ],œdataTypeœ:œChromaœ,œidœ:œChroma-OtYDgœ}-RetrievalQA-DpylI{œfieldNameœ:œretrieverœ,œidœ:œRetrievalQA-DpylIœ,œinputTypesœ:null,œtypeœ:œBaseRetrieverœ}"},{"source":"ChatOpenAI-4Mfuz","sourceHandle":"{œbaseClassesœ:[œBaseLanguageModelœ,œBaseChatModelœ,œChatOpenAIœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-4Mfuzœ}","target":"CombineDocsChain-afKOq","targetHandle":"{œfieldNameœ:œllmœ,œidœ:œCombineDocsChain-afKOqœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}","data":{"targetHandle":{"fieldName":"llm","id":"CombineDocsChain-afKOq","inputTypes":null,"type":"BaseLanguageModel"},"sourceHandle":{"baseClasses":["BaseLanguageModel","BaseChatModel","ChatOpenAI","BaseLLM"],"dataType":"ChatOpenAI","id":"ChatOpenAI-4Mfuz"}},"style":{"stroke":"#555"},"className":"stroke-gray-900 stroke-connection","animated":false,"id":"reactflow__edge-ChatOpenAI-4Mfuz{œbaseClassesœ:[œBaseLanguageModelœ,œBaseChatModelœ,œChatOpenAIœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-4Mfuzœ}-CombineDocsChain-afKOq{œfieldNameœ:œllmœ,œidœ:œCombineDocsChain-afKOqœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}"},{"source":"CombineDocsChain-afKOq","sourceHandle":"{œbaseClassesœ:[œBaseCombineDocumentsChainœ,œCallableœ],œdataTypeœ:œCombineDocsChainœ,œidœ:œCombineDocsChain-afKOqœ}","target":"RetrievalQA-DpylI","targetHandle":"{œfieldNameœ:œcombine_documents_chainœ,œidœ:œRetrievalQA-DpylIœ,œinputTypesœ:null,œtypeœ:œBaseCombineDocumentsChainœ}","data":{"targetHandle":{"fieldName":"combine_documents_chain","id":"RetrievalQA-DpylI","inputTypes":null,"type":"BaseCombineDocumentsChain"},"sourceHandle":{"baseClasses":["BaseCombineDocumentsChain","Callable"],"dataType":"CombineDocsChain","id":"CombineDocsChain-afKOq"}},"style":{"stroke":"#555"},"className":"stroke-gray-900 stroke-connection","animated":false,"id":"reactflow__edge-CombineDocsChain-afKOq{œbaseClassesœ:[œBaseCombineDocumentsChainœ,œCallableœ],œdataTypeœ:œCombineDocsChainœ,œidœ:œCombineDocsChain-afKOqœ}-RetrievalQA-DpylI{œfieldNameœ:œcombine_documents_chainœ,œidœ:œRetrievalQA-DpylIœ,œinputTypesœ:null,œtypeœ:œBaseCombineDocumentsChainœ}"},{"source":"SearchApi-kZmEj","sourceHandle":"{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œSearchApiœ,œidœ:œSearchApi-kZmEjœ}","target":"CharacterTextSplitter-WVMFU","targetHandle":"{œfieldNameœ:œdocumentsœ,œidœ:œCharacterTextSplitter-WVMFUœ,œinputTypesœ:null,œtypeœ:œDocumentœ}","data":{"targetHandle":{"fieldName":"documents","id":"CharacterTextSplitter-WVMFU","inputTypes":null,"type":"Document"},"sourceHandle":{"baseClasses":["Document"],"dataType":"SearchApi","id":"SearchApi-kZmEj"}},"style":{"stroke":"#555"},"className":"stroke-gray-900 stroke-connection","animated":false,"id":"reactflow__edge-SearchApi-kZmEj{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œSearchApiœ,œidœ:œSearchApi-kZmEjœ}-CharacterTextSplitter-WVMFU{œfieldNameœ:œdocumentsœ,œidœ:œCharacterTextSplitter-WVMFUœ,œinputTypesœ:null,œtypeœ:œDocumentœ}"}],"viewport":{"x":2046.3642433866817,"y":1270.2852546488134,"zoom":0.6177254407441625}},"description":"Real-time Google Search engine q&a.","name":"SearchApi Tool","last_tested_version":"0.6.5a9","is_component":false} \ No newline at end of file +{ + "id": "7fc56f82-3493-4742-9c85-82b144127aff", + "data": { + "nodes": [ + { + "id": "ChatOpenAI-4Mfuz", + "type": "genericNode", + "position": { + "x": -2243.8068684913856, + "y": -2026.350019258601 + }, + "data": { + "type": "ChatOpenAI", + "node": { + "template": { + "callbacks": { + "type": "langchain_core.callbacks.base.BaseCallbackHandler", + "required": false, + "placeholder": "", + "list": true, + "show": false, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "callbacks", + "advanced": false, + "dynamic": false, + "info": "" + }, + "async_client": { + "type": "Any", + "required": false, + "placeholder": "", + "list": false, + "show": false, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "async_client", + "advanced": false, + "dynamic": false, + "info": "" + }, + "cache": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": false, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "cache", + "advanced": false, + "dynamic": false, + "info": "" + }, + "client": { + "type": "Any", + "required": false, + "placeholder": "", + "list": false, + "show": false, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "client", + "advanced": false, + "dynamic": false, + "info": "" + }, + "default_headers": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": false, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "default_headers", + "advanced": false, + "dynamic": false, + "info": "" + }, + "default_query": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": false, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "default_query", + "advanced": false, + "dynamic": false, + "info": "" + }, + "http_client": { + "type": "Any", + "required": false, + "placeholder": "", + "list": false, + "show": false, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "http_client", + "advanced": false, + "dynamic": false, + "info": "" + }, + "max_retries": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": false, + "multiline": false, + "value": 2, + "fileTypes": [], + "password": false, + "name": "max_retries", + "advanced": false, + "dynamic": false, + "info": "" + }, + "max_tokens": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "password": true, + "name": "max_tokens", + "advanced": false, + "dynamic": false, + "info": "", + "value": "" + }, + "metadata": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": false, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "metadata", + "advanced": false, + "dynamic": false, + "info": "" + }, + "model_kwargs": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "model_kwargs", + "advanced": true, + "dynamic": false, + "info": "" + }, + "model_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "gpt-3.5-turbo-16k", + "fileTypes": [], + "password": false, + "options": [ + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-4", + "gpt-4-32k", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k" + ], + "name": "model_name", + "advanced": false, + "dynamic": false, + "info": "" + }, + "n": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": false, + "multiline": false, + "value": 1, + "fileTypes": [], + "password": false, + "name": "n", + "advanced": false, + "dynamic": false, + "info": "" + }, + "name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": false, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "name", + "advanced": false, + "dynamic": false, + "info": "" + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": false, + "dynamic": false, + "info": "\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\n" + }, + "openai_api_key": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "", + "fileTypes": [], + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "" + }, + "openai_organization": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": false, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "openai_organization", + "display_name": "OpenAI Organization", + "advanced": false, + "dynamic": false, + "info": "" + }, + "openai_proxy": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": false, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "openai_proxy", + "display_name": "OpenAI Proxy", + "advanced": false, + "dynamic": false, + "info": "" + }, + "request_timeout": { + "type": "float", + "required": false, + "placeholder": "", + "list": false, + "show": false, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "request_timeout", + "advanced": false, + "dynamic": false, + "info": "", + "rangeSpec": { + "min": -1, + "max": 1, + "step": 0.1 + } + }, + "streaming": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": false, + "multiline": false, + "value": false, + "fileTypes": [], + "password": false, + "name": "streaming", + "advanced": false, + "dynamic": false, + "info": "" + }, + "tags": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": false, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "tags", + "advanced": false, + "dynamic": false, + "info": "" + }, + "temperature": { + "type": "float", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "0.5", + "fileTypes": [], + "password": false, + "name": "temperature", + "advanced": false, + "dynamic": false, + "info": "", + "rangeSpec": { + "min": -1, + "max": 1, + "step": 0.1 + } + }, + "tiktoken_model_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": false, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "tiktoken_model_name", + "advanced": false, + "dynamic": false, + "info": "" + }, + "verbose": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": false, + "multiline": false, + "value": false, + "fileTypes": [], + "password": false, + "name": "verbose", + "advanced": false, + "dynamic": false, + "info": "" + }, + "_type": "ChatOpenAI" + }, + "description": "[*Deprecated*] `OpenAI` Chat large language models API.", + "base_classes": [ + "BaseLanguageModel", + "BaseChatModel", + "ChatOpenAI", + "BaseLLM" + ], + "display_name": "ChatOpenAI", + "documentation": "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai", + "custom_fields": {}, + "output_types": [], + "field_formatters": {}, + "beta": false + }, + "id": "ChatOpenAI-4Mfuz" + }, + "selected": false, + "width": 384, + "height": 649, + "positionAbsolute": { + "x": -2243.8068684913856, + "y": -2026.350019258601 + }, + "dragging": false + }, + { + "id": "CharacterTextSplitter-WVMFU", + "type": "genericNode", + "position": { + "x": -2661.4749778477553, + "y": -1608.9437055023366 + }, + "data": { + "type": "CharacterTextSplitter", + "node": { + "template": { + "documents": { + "type": "Document", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "documents", + "advanced": false, + "dynamic": false, + "info": "" + }, + "chunk_overlap": { + "type": "int", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 200, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chunk_overlap", + "display_name": "Chunk Overlap", + "advanced": false, + "dynamic": false, + "info": "" + }, + "chunk_size": { + "type": "int", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "2000", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chunk_size", + "display_name": "Chunk Size", + "advanced": false, + "dynamic": false, + "info": "" + }, + "separator": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "\"", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "separator", + "display_name": "Separator", + "advanced": false, + "dynamic": false, + "info": "" + }, + "_type": "CharacterTextSplitter" + }, + "description": "Splitting text that looks at characters.", + "base_classes": [ + "Document" + ], + "display_name": "CharacterTextSplitter", + "documentation": "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/character_text_splitter", + "custom_fields": {}, + "output_types": [ + "Document" + ], + "field_formatters": {}, + "beta": false + }, + "id": "CharacterTextSplitter-WVMFU" + }, + "selected": false, + "width": 384, + "height": 501, + "positionAbsolute": { + "x": -2661.4749778477553, + "y": -1608.9437055023366 + }, + "dragging": false + }, + { + "id": "Chroma-OtYDg", + "type": "genericNode", + "position": { + "x": -2194.2051907050227, + "y": -1370.1637632208287 + }, + "data": { + "type": "Chroma", + "node": { + "template": { + "documents": { + "type": "Document", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "documents", + "display_name": "Documents", + "advanced": false, + "dynamic": false, + "info": "" + }, + "embedding": { + "type": "Embeddings", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "embedding", + "display_name": "Embedding", + "advanced": false, + "dynamic": false, + "info": "" + }, + "chroma_server_cors_allow_origins": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chroma_server_cors_allow_origins", + "display_name": "Server CORS Allow Origins", + "advanced": true, + "dynamic": false, + "info": "" + }, + "chroma_server_grpc_port": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chroma_server_grpc_port", + "display_name": "Server gRPC Port", + "advanced": true, + "dynamic": false, + "info": "" + }, + "chroma_server_host": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chroma_server_host", + "display_name": "Server Host", + "advanced": true, + "dynamic": false, + "info": "" + }, + "chroma_server_port": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chroma_server_port", + "display_name": "Server Port", + "advanced": true, + "dynamic": false, + "info": "" + }, + "chroma_server_ssl_enabled": { + "type": "bool", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chroma_server_ssl_enabled", + "display_name": "Server SSL Enabled", + "advanced": true, + "dynamic": false, + "info": "" + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import List, Optional, Union\n\nimport chromadb # type: ignore\nfrom langchain.embeddings.base import Embeddings\nfrom langchain.schema import BaseRetriever, Document\nfrom langchain.vectorstores import Chroma\nfrom langchain.vectorstores.base import VectorStore\n\nfrom langflow.custom import CustomComponent\n\n\nclass ChromaComponent(CustomComponent):\n \"\"\"\n A custom component for implementing a Vector Store using Chroma.\n \"\"\"\n\n display_name: str = \"Chroma\"\n description: str = \"Implementation of Vector Store using Chroma\"\n documentation = \"https://python.langchain.com/docs/integrations/vectorstores/chroma\"\n beta: bool = True\n\n def build_config(self):\n \"\"\"\n Builds the configuration for the component.\n\n Returns:\n - dict: A dictionary containing the configuration options for the component.\n \"\"\"\n return {\n \"collection_name\": {\"display_name\": \"Collection Name\", \"value\": \"langflow\"},\n \"persist\": {\"display_name\": \"Persist\"},\n \"persist_directory\": {\"display_name\": \"Persist Directory\"},\n \"code\": {\"show\": False, \"display_name\": \"Code\"},\n \"documents\": {\"display_name\": \"Documents\", \"is_list\": True},\n \"embedding\": {\"display_name\": \"Embedding\"},\n \"chroma_server_cors_allow_origins\": {\n \"display_name\": \"Server CORS Allow Origins\",\n \"advanced\": True,\n },\n \"chroma_server_host\": {\"display_name\": \"Server Host\", \"advanced\": True},\n \"chroma_server_port\": {\"display_name\": \"Server Port\", \"advanced\": True},\n \"chroma_server_grpc_port\": {\n \"display_name\": \"Server gRPC Port\",\n \"advanced\": True,\n },\n \"chroma_server_ssl_enabled\": {\n \"display_name\": \"Server SSL Enabled\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n collection_name: str,\n persist: bool,\n embedding: Embeddings,\n chroma_server_ssl_enabled: bool,\n persist_directory: Optional[str] = None,\n documents: Optional[List[Document]] = None,\n chroma_server_cors_allow_origins: Optional[str] = None,\n chroma_server_host: Optional[str] = None,\n chroma_server_port: Optional[int] = None,\n chroma_server_grpc_port: Optional[int] = None,\n ) -> Union[VectorStore, BaseRetriever]:\n \"\"\"\n Builds the Vector Store or BaseRetriever object.\n\n Args:\n - collection_name (str): The name of the collection.\n - persist_directory (Optional[str]): The directory to persist the Vector Store to.\n - chroma_server_ssl_enabled (bool): Whether to enable SSL for the Chroma server.\n - persist (bool): Whether to persist the Vector Store or not.\n - embedding (Optional[Embeddings]): The embeddings to use for the Vector Store.\n - documents (Optional[Document]): The documents to use for the Vector Store.\n - chroma_server_cors_allow_origins (Optional[str]): The CORS allow origins for the Chroma server.\n - chroma_server_host (Optional[str]): The host for the Chroma server.\n - chroma_server_port (Optional[int]): The port for the Chroma server.\n - chroma_server_grpc_port (Optional[int]): The gRPC port for the Chroma server.\n\n Returns:\n - Union[VectorStore, BaseRetriever]: The Vector Store or BaseRetriever object.\n \"\"\"\n\n # Chroma settings\n chroma_settings = None\n\n if chroma_server_host is not None:\n chroma_settings = chromadb.config.Settings(\n chroma_server_cors_allow_origins=chroma_server_cors_allow_origins or None,\n chroma_server_host=chroma_server_host,\n chroma_server_port=chroma_server_port or None,\n chroma_server_grpc_port=chroma_server_grpc_port or None,\n chroma_server_ssl_enabled=chroma_server_ssl_enabled,\n )\n\n # If documents, then we need to create a Chroma instance using .from_documents\n if documents is not None and embedding is not None:\n if len(documents) == 0:\n raise ValueError(\"If documents are provided, there must be at least one document.\")\n return Chroma.from_documents(\n documents=documents, # type: ignore\n persist_directory=persist_directory if persist else None,\n collection_name=collection_name,\n embedding=embedding,\n client_settings=chroma_settings,\n )\n\n return Chroma(persist_directory=persist_directory, client_settings=chroma_settings)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": false, + "dynamic": true, + "info": "" + }, + "collection_name": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "video", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "collection_name", + "display_name": "Collection Name", + "advanced": false, + "dynamic": false, + "info": "" + }, + "persist": { + "type": "bool", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "persist", + "display_name": "Persist", + "advanced": false, + "dynamic": false, + "info": "" + }, + "persist_directory": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "persist_directory", + "display_name": "Persist Directory", + "advanced": false, + "dynamic": false, + "info": "" + }, + "_type": "CustomComponent" + }, + "description": "Implementation of Vector Store using Chroma", + "base_classes": [ + "VectorStore", + "BaseRetriever" + ], + "display_name": "Chroma", + "documentation": "https://python.langchain.com/docs/integrations/vectorstores/chroma", + "custom_fields": { + "chroma_server_cors_allow_origins": null, + "chroma_server_grpc_port": null, + "chroma_server_host": null, + "chroma_server_port": null, + "chroma_server_ssl_enabled": null, + "collection_name": null, + "documents": null, + "embedding": null, + "persist": null, + "persist_directory": null + }, + "output_types": [ + "Chroma" + ], + "field_formatters": {}, + "beta": true + }, + "id": "Chroma-OtYDg" + }, + "selected": false, + "width": 384, + "height": 625, + "positionAbsolute": { + "x": -2194.2051907050227, + "y": -1370.1637632208287 + }, + "dragging": false + }, + { + "id": "OpenAIEmbeddings-9yqtI", + "type": "genericNode", + "position": { + "x": -2653.011009324626, + "y": -1103.8414515074774 + }, + "data": { + "type": "OpenAIEmbeddings", + "node": { + "template": { + "allowed_special": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": [], + "fileTypes": [], + "password": false, + "name": "allowed_special", + "advanced": true, + "dynamic": false, + "info": "" + }, + "async_client": { + "type": "Any", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "async_client", + "advanced": true, + "dynamic": false, + "info": "" + }, + "chunk_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 1000, + "fileTypes": [], + "password": false, + "name": "chunk_size", + "advanced": true, + "dynamic": false, + "info": "" + }, + "client": { + "type": "Any", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "client", + "advanced": true, + "dynamic": false, + "info": "" + }, + "default_headers": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "default_headers", + "advanced": true, + "dynamic": false, + "info": "" + }, + "default_query": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "default_query", + "advanced": true, + "dynamic": false, + "info": "" + }, + "deployment": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "text-embedding-ada-002", + "fileTypes": [], + "password": false, + "name": "deployment", + "advanced": true, + "dynamic": false, + "info": "" + }, + "disallowed_special": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "all", + "fileTypes": [], + "password": false, + "name": "disallowed_special", + "advanced": true, + "dynamic": false, + "info": "" + }, + "embedding_ctx_length": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 8191, + "fileTypes": [], + "password": false, + "name": "embedding_ctx_length", + "advanced": true, + "dynamic": false, + "info": "" + }, + "headers": { + "type": "Any", + "required": false, + "placeholder": "", + "list": false, + "show": false, + "multiline": true, + "value": "{\"Authorization\": \"Bearer \"}", + "fileTypes": [], + "password": false, + "name": "headers", + "advanced": true, + "dynamic": false, + "info": "" + }, + "http_client": { + "type": "Any", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "http_client", + "advanced": true, + "dynamic": false, + "info": "" + }, + "max_retries": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 2, + "fileTypes": [], + "password": false, + "name": "max_retries", + "advanced": true, + "dynamic": false, + "info": "" + }, + "model": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "text-embedding-ada-002", + "fileTypes": [], + "password": false, + "name": "model", + "advanced": true, + "dynamic": false, + "info": "" + }, + "model_kwargs": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "model_kwargs", + "advanced": true, + "dynamic": false, + "info": "" + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "password": true, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "", + "value": "" + }, + "openai_api_key": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "", + "fileTypes": [], + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "" + }, + "openai_api_type": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "password": true, + "name": "openai_api_type", + "display_name": "OpenAI API Type", + "advanced": true, + "dynamic": false, + "info": "", + "value": "" + }, + "openai_api_version": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "password": true, + "name": "openai_api_version", + "display_name": "OpenAI API Version", + "advanced": true, + "dynamic": false, + "info": "", + "value": "" + }, + "openai_organization": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "openai_organization", + "display_name": "OpenAI Organization", + "advanced": true, + "dynamic": false, + "info": "" + }, + "openai_proxy": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "openai_proxy", + "display_name": "OpenAI Proxy", + "advanced": true, + "dynamic": false, + "info": "" + }, + "request_timeout": { + "type": "float", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "request_timeout", + "advanced": true, + "dynamic": false, + "info": "", + "rangeSpec": { + "min": -1, + "max": 1, + "step": 0.1 + } + }, + "retry_max_seconds": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 20, + "fileTypes": [], + "password": false, + "name": "retry_max_seconds", + "advanced": true, + "dynamic": false, + "info": "" + }, + "retry_min_seconds": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 4, + "fileTypes": [], + "password": false, + "name": "retry_min_seconds", + "advanced": true, + "dynamic": false, + "info": "" + }, + "show_progress_bar": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "password": false, + "name": "show_progress_bar", + "advanced": true, + "dynamic": false, + "info": "" + }, + "skip_empty": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "password": false, + "name": "skip_empty", + "advanced": true, + "dynamic": false, + "info": "" + }, + "tiktoken_enabled": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "", + "fileTypes": [], + "password": true, + "name": "tiktoken_enabled", + "advanced": false, + "dynamic": false, + "info": "" + }, + "tiktoken_model_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "password": true, + "name": "tiktoken_model_name", + "advanced": false, + "dynamic": false, + "info": "", + "value": "" + }, + "_type": "OpenAIEmbeddings" + }, + "description": "[*Deprecated*] OpenAI embedding models.", + "base_classes": [ + "Embeddings", + "OpenAIEmbeddings" + ], + "display_name": "OpenAIEmbeddings", + "documentation": "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/openai", + "custom_fields": {}, + "output_types": [], + "field_formatters": {}, + "beta": false + }, + "id": "OpenAIEmbeddings-9yqtI" + }, + "selected": false, + "width": 384, + "height": 443, + "positionAbsolute": { + "x": -2653.011009324626, + "y": -1103.8414515074774 + }, + "dragging": false + }, + { + "id": "RetrievalQA-DpylI", + "type": "genericNode", + "position": { + "x": -1757.239471200792, + "y": -1258.2132589352987 + }, + "data": { + "type": "RetrievalQA", + "node": { + "template": { + "callbacks": { + "type": "langchain_core.callbacks.base.BaseCallbackHandler", + "required": false, + "placeholder": "", + "list": true, + "show": false, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "callbacks", + "advanced": false, + "dynamic": false, + "info": "" + }, + "combine_documents_chain": { + "type": "BaseCombineDocumentsChain", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "combine_documents_chain", + "advanced": false, + "dynamic": false, + "info": "" + }, + "memory": { + "type": "BaseMemory", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "memory", + "advanced": false, + "dynamic": false, + "info": "" + }, + "retriever": { + "type": "BaseRetriever", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "retriever", + "advanced": false, + "dynamic": false, + "info": "" + }, + "input_key": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "query", + "fileTypes": [], + "password": false, + "name": "input_key", + "advanced": true, + "dynamic": false, + "info": "" + }, + "metadata": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": false, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "metadata", + "advanced": false, + "dynamic": false, + "info": "" + }, + "name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": false, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "name", + "advanced": false, + "dynamic": false, + "info": "" + }, + "output_key": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "result", + "fileTypes": [], + "password": false, + "name": "output_key", + "advanced": true, + "dynamic": false, + "info": "" + }, + "return_source_documents": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": true, + "fileTypes": [], + "password": false, + "name": "return_source_documents", + "advanced": true, + "dynamic": false, + "info": "" + }, + "tags": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": false, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "tags", + "advanced": false, + "dynamic": false, + "info": "" + }, + "verbose": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": false, + "multiline": false, + "fileTypes": [], + "password": false, + "name": "verbose", + "advanced": true, + "dynamic": false, + "info": "" + }, + "_type": "RetrievalQA" + }, + "description": "Chain for question-answering against an index.", + "base_classes": [ + "Chain", + "RetrievalQA", + "BaseRetrievalQA", + "Callable" + ], + "display_name": "RetrievalQA", + "documentation": "https://python.langchain.com/docs/modules/chains/popular/vector_db_qa", + "custom_fields": {}, + "output_types": [], + "field_formatters": {}, + "beta": false + }, + "id": "RetrievalQA-DpylI" + }, + "selected": false, + "width": 384, + "height": 339, + "positionAbsolute": { + "x": -1757.239471200792, + "y": -1258.2132589352987 + }, + "dragging": true + }, + { + "id": "CombineDocsChain-afKOq", + "type": "genericNode", + "position": { + "x": -1775.9040057312934, + "y": -1644.0423777157919 + }, + "data": { + "type": "CombineDocsChain", + "node": { + "template": { + "llm": { + "type": "BaseLanguageModel", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "llm", + "display_name": "LLM", + "advanced": false, + "dynamic": false, + "info": "" + }, + "chain_type": { + "type": "str", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "refine", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "stuff", + "map_reduce", + "map_rerank", + "refine" + ], + "name": "chain_type", + "advanced": false, + "dynamic": false, + "info": "" + }, + "_type": "load_qa_chain" + }, + "description": "Load question answering chain.", + "base_classes": [ + "BaseCombineDocumentsChain", + "Callable" + ], + "display_name": "CombineDocsChain", + "documentation": "", + "custom_fields": {}, + "output_types": [], + "field_formatters": {}, + "beta": false + }, + "id": "CombineDocsChain-afKOq" + }, + "selected": false, + "width": 384, + "height": 333, + "dragging": false, + "positionAbsolute": { + "x": -1775.9040057312934, + "y": -1644.0423777157919 + } + }, + { + "id": "SearchApi-kZmEj", + "type": "genericNode", + "position": { + "x": -3074.364183247263, + "y": -1782.5742787937606 + }, + "data": { + "type": "SearchApi", + "node": { + "template": { + "api_key": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "api_key", + "display_name": "API Key", + "advanced": false, + "dynamic": false, + "info": "The API key to use SearchApi.", + "value": "" + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from langflow.custom import CustomComponent\nfrom langchain.schema import Document\nfrom langflow.services.database.models.base import orjson_dumps\nfrom langchain_community.utilities.searchapi import SearchApiAPIWrapper\nfrom typing import Optional\n\n\nclass SearchApi(CustomComponent):\n display_name: str = \"SearchApi\"\n description: str = \"Real-time search engines API.\"\n output_types: list[str] = [\"Document\"]\n documentation: str = \"https://www.searchapi.io/docs/google\"\n field_config = {\n \"engine\": {\n \"display_name\": \"Engine\",\n \"field_type\": \"str\",\n \"info\": \"The search engine to use.\",\n },\n \"params\": {\n \"display_name\": \"Parameters\",\n \"info\": \"The parameters to send with the request.\",\n },\n \"code\": {\"show\": False},\n \"api_key\": {\n \"display_name\": \"API Key\",\n \"field_type\": \"str\",\n \"required\": True, \n \"password\": True,\n \"info\": \"The API key to use SearchApi.\",\n },\n }\n\n def build(\n self,\n engine: str,\n api_key: str,\n params: Optional[dict] = None,\n ) -> Document:\n if params is None:\n params = {}\n\n search_api_wrapper = SearchApiAPIWrapper(engine=engine, searchapi_api_key=api_key)\n\n query = params.pop(\"query\", \"default query\") \n results = search_api_wrapper.results(query, **params)\n\n result = orjson_dumps(results, indent_2=False)\n \n document = Document(page_content=result) \n\n return document", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": false, + "dynamic": true, + "info": "" + }, + "engine": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "engine", + "display_name": "Engine", + "advanced": false, + "dynamic": false, + "info": "The search engine to use.", + "value": "youtube_transcripts" + }, + "params": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "params", + "display_name": "Parameters", + "advanced": false, + "dynamic": false, + "info": "The parameters to send with the request.", + "value": [ + { + "video_id": "krsBRQbOPQ4" + } + ] + }, + "_type": "CustomComponent" + }, + "description": "Real-time search engines API.", + "base_classes": [ + "Document" + ], + "display_name": "SearchApi", + "documentation": "https://www.searchapi.io/docs/google", + "custom_fields": { + "api_key": null, + "engine": null, + "params": null + }, + "output_types": [ + "SearchApi" + ], + "field_formatters": {}, + "beta": true + }, + "id": "SearchApi-kZmEj" + }, + "selected": false, + "width": 384, + "height": 539, + "dragging": false, + "positionAbsolute": { + "x": -3074.364183247263, + "y": -1782.5742787937606 + } + } + ], + "edges": [ + { + "source": "CharacterTextSplitter-WVMFU", + "sourceHandle": "{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œCharacterTextSplitterœ,œidœ:œCharacterTextSplitter-WVMFUœ}", + "target": "Chroma-OtYDg", + "targetHandle": "{œfieldNameœ:œdocumentsœ,œidœ:œChroma-OtYDgœ,œinputTypesœ:null,œtypeœ:œDocumentœ}", + "data": { + "targetHandle": { + "fieldName": "documents", + "id": "Chroma-OtYDg", + "inputTypes": null, + "type": "Document" + }, + "sourceHandle": { + "baseClasses": [ + "Document" + ], + "dataType": "CharacterTextSplitter", + "id": "CharacterTextSplitter-WVMFU" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "animated": false, + "id": "reactflow__edge-CharacterTextSplitter-WVMFU{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œCharacterTextSplitterœ,œidœ:œCharacterTextSplitter-WVMFUœ}-Chroma-OtYDg{œfieldNameœ:œdocumentsœ,œidœ:œChroma-OtYDgœ,œinputTypesœ:null,œtypeœ:œDocumentœ}" + }, + { + "source": "OpenAIEmbeddings-9yqtI", + "sourceHandle": "{œbaseClassesœ:[œEmbeddingsœ,œOpenAIEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-9yqtIœ}", + "target": "Chroma-OtYDg", + "targetHandle": "{œfieldNameœ:œembeddingœ,œidœ:œChroma-OtYDgœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}", + "data": { + "targetHandle": { + "fieldName": "embedding", + "id": "Chroma-OtYDg", + "inputTypes": null, + "type": "Embeddings" + }, + "sourceHandle": { + "baseClasses": [ + "Embeddings", + "OpenAIEmbeddings" + ], + "dataType": "OpenAIEmbeddings", + "id": "OpenAIEmbeddings-9yqtI" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "animated": false, + "id": "reactflow__edge-OpenAIEmbeddings-9yqtI{œbaseClassesœ:[œEmbeddingsœ,œOpenAIEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-9yqtIœ}-Chroma-OtYDg{œfieldNameœ:œembeddingœ,œidœ:œChroma-OtYDgœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}" + }, + { + "source": "Chroma-OtYDg", + "sourceHandle": "{œbaseClassesœ:[œVectorStoreœ,œBaseRetrieverœ],œdataTypeœ:œChromaœ,œidœ:œChroma-OtYDgœ}", + "target": "RetrievalQA-DpylI", + "targetHandle": "{œfieldNameœ:œretrieverœ,œidœ:œRetrievalQA-DpylIœ,œinputTypesœ:null,œtypeœ:œBaseRetrieverœ}", + "data": { + "targetHandle": { + "fieldName": "retriever", + "id": "RetrievalQA-DpylI", + "inputTypes": null, + "type": "BaseRetriever" + }, + "sourceHandle": { + "baseClasses": [ + "VectorStore", + "BaseRetriever" + ], + "dataType": "Chroma", + "id": "Chroma-OtYDg" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "animated": false, + "id": "reactflow__edge-Chroma-OtYDg{œbaseClassesœ:[œVectorStoreœ,œBaseRetrieverœ],œdataTypeœ:œChromaœ,œidœ:œChroma-OtYDgœ}-RetrievalQA-DpylI{œfieldNameœ:œretrieverœ,œidœ:œRetrievalQA-DpylIœ,œinputTypesœ:null,œtypeœ:œBaseRetrieverœ}" + }, + { + "source": "ChatOpenAI-4Mfuz", + "sourceHandle": "{œbaseClassesœ:[œBaseLanguageModelœ,œBaseChatModelœ,œChatOpenAIœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-4Mfuzœ}", + "target": "CombineDocsChain-afKOq", + "targetHandle": "{œfieldNameœ:œllmœ,œidœ:œCombineDocsChain-afKOqœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}", + "data": { + "targetHandle": { + "fieldName": "llm", + "id": "CombineDocsChain-afKOq", + "inputTypes": null, + "type": "BaseLanguageModel" + }, + "sourceHandle": { + "baseClasses": [ + "BaseLanguageModel", + "BaseChatModel", + "ChatOpenAI", + "BaseLLM" + ], + "dataType": "ChatOpenAI", + "id": "ChatOpenAI-4Mfuz" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "animated": false, + "id": "reactflow__edge-ChatOpenAI-4Mfuz{œbaseClassesœ:[œBaseLanguageModelœ,œBaseChatModelœ,œChatOpenAIœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-4Mfuzœ}-CombineDocsChain-afKOq{œfieldNameœ:œllmœ,œidœ:œCombineDocsChain-afKOqœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}" + }, + { + "source": "CombineDocsChain-afKOq", + "sourceHandle": "{œbaseClassesœ:[œBaseCombineDocumentsChainœ,œCallableœ],œdataTypeœ:œCombineDocsChainœ,œidœ:œCombineDocsChain-afKOqœ}", + "target": "RetrievalQA-DpylI", + "targetHandle": "{œfieldNameœ:œcombine_documents_chainœ,œidœ:œRetrievalQA-DpylIœ,œinputTypesœ:null,œtypeœ:œBaseCombineDocumentsChainœ}", + "data": { + "targetHandle": { + "fieldName": "combine_documents_chain", + "id": "RetrievalQA-DpylI", + "inputTypes": null, + "type": "BaseCombineDocumentsChain" + }, + "sourceHandle": { + "baseClasses": [ + "BaseCombineDocumentsChain", + "Callable" + ], + "dataType": "CombineDocsChain", + "id": "CombineDocsChain-afKOq" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "animated": false, + "id": "reactflow__edge-CombineDocsChain-afKOq{œbaseClassesœ:[œBaseCombineDocumentsChainœ,œCallableœ],œdataTypeœ:œCombineDocsChainœ,œidœ:œCombineDocsChain-afKOqœ}-RetrievalQA-DpylI{œfieldNameœ:œcombine_documents_chainœ,œidœ:œRetrievalQA-DpylIœ,œinputTypesœ:null,œtypeœ:œBaseCombineDocumentsChainœ}" + }, + { + "source": "SearchApi-kZmEj", + "sourceHandle": "{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œSearchApiœ,œidœ:œSearchApi-kZmEjœ}", + "target": "CharacterTextSplitter-WVMFU", + "targetHandle": "{œfieldNameœ:œdocumentsœ,œidœ:œCharacterTextSplitter-WVMFUœ,œinputTypesœ:null,œtypeœ:œDocumentœ}", + "data": { + "targetHandle": { + "fieldName": "documents", + "id": "CharacterTextSplitter-WVMFU", + "inputTypes": null, + "type": "Document" + }, + "sourceHandle": { + "baseClasses": [ + "Document" + ], + "dataType": "SearchApi", + "id": "SearchApi-kZmEj" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "animated": false, + "id": "reactflow__edge-SearchApi-kZmEj{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œSearchApiœ,œidœ:œSearchApi-kZmEjœ}-CharacterTextSplitter-WVMFU{œfieldNameœ:œdocumentsœ,œidœ:œCharacterTextSplitter-WVMFUœ,œinputTypesœ:null,œtypeœ:œDocumentœ}" + } + ], + "viewport": { + "x": 2046.3642433866817, + "y": 1270.2852546488134, + "zoom": 0.6177254407441625 + } + }, + "description": "Real-time Google Search engine q&a.", + "name": "SearchApi Tool", + "last_tested_version": "0.6.5a9", + "is_component": false +} \ No newline at end of file diff --git a/lcserve.Dockerfile b/lcserve.Dockerfile deleted file mode 100644 index 883a2c040..000000000 --- a/lcserve.Dockerfile +++ /dev/null @@ -1,16 +0,0 @@ -# This file is used by `lc-serve` to build the image. -# Don't change the name of this file. - -FROM jinawolf/serving-gateway:${version} - -RUN apt-get update \ - && apt-get install --no-install-recommends -y build-essential libpq-dev - -COPY . /appdir/ - -RUN pip install poetry==1.4.0 && cd /appdir && pip install . && \ - pip uninstall -y poetry && \ - apt-get remove --auto-remove -y build-essential libpq-dev && \ - apt-get autoremove && apt-get clean && rm -rf /var/lib/apt/lists/* && rm -rf /tmp/* - -ENTRYPOINT [ "jina", "gateway", "--uses", "config.yml" ] \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index f6dd53297..dc6cb7649 100644 --- a/poetry.lock +++ b/poetry.lock @@ -239,6 +239,43 @@ typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} [package.extras] tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] +[[package]] +name = "assemblyai" +version = "0.23.1" +description = "AssemblyAI Python SDK" +optional = false +python-versions = ">=3.8" +files = [ + {file = "assemblyai-0.23.1-py3-none-any.whl", hash = "sha256:2887c7983fa911717cbe37a38d38fcdc8188e62687385b8b6f979546c58354f4"}, + {file = "assemblyai-0.23.1.tar.gz", hash = "sha256:4a3d4d8c4f6c956c6243f0873147ba29da4c6cf5edd6a1b52e6bdaa209526998"}, +] + +[package.dependencies] +httpx = ">=0.19.0" +pydantic = ">=1.7.0,<1.10.7 || >1.10.7" +typing-extensions = ">=3.7" +websockets = ">=11.0" + +[package.extras] +extras = ["pyaudio (>=0.2.13)"] + +[[package]] +name = "astrapy" +version = "0.7.7" +description = "AstraPy is a Pythonic SDK for DataStax Astra" +optional = false +python-versions = ">=3.8.0,<4.0.0" +files = [ + {file = "astrapy-0.7.7-py3-none-any.whl", hash = "sha256:e5def4e3c5ceb06dfc996471250dc0c972b729c06336ea4aac006dadfc071a9a"}, + {file = "astrapy-0.7.7.tar.gz", hash = "sha256:4bf81096a0c26cce18a14a34bb5f699649fd7d90b4ec6050f3d7c0274722d769"}, +] + +[package.dependencies] +cassio = ">=0.1.4,<0.2.0" +deprecation = ">=2.1.0,<2.2.0" +httpx = {version = ">=0.25.2,<1", extras = ["http2"]} +toml = ">=0.10.2,<0.11.0" + [[package]] name = "asttokens" version = "2.4.1" @@ -383,6 +420,17 @@ charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] +[[package]] +name = "bidict" +version = "0.23.1" +description = "The bidirectional mapping library for Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bidict-0.23.1-py3-none-any.whl", hash = "sha256:5dae8d4d79b552a71cbabc7deb25dfe8ce710b17ff41711e13010ead2abfc3e5"}, + {file = "bidict-0.23.1.tar.gz", hash = "sha256:03069d763bc387bbd20e7d49914e75fc4132a41937fa3405417e1a5a2d006d71"}, +] + [[package]] name = "billiard" version = "4.2.0" @@ -407,17 +455,17 @@ files = [ [[package]] name = "boto3" -version = "1.34.73" +version = "1.34.77" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.34.73-py3-none-any.whl", hash = "sha256:4d68e7c7c1339e251c661fd6e2a34e31d281177106326712417fed839907fa84"}, - {file = "boto3-1.34.73.tar.gz", hash = "sha256:f45503333286c03fb692a3ce497b6fdb4e88c51c98a3b8ff05071d7f56571448"}, + {file = "boto3-1.34.77-py3-none-any.whl", hash = "sha256:7abd327980258ec2ae980d2ff7fc32ede7448146b14d34c56bf0be074e2a149b"}, + {file = "boto3-1.34.77.tar.gz", hash = "sha256:8ebed4fa5a3b84dd4037f28226985af00e00fb860d739fc8b1ed6381caa4b330"}, ] [package.dependencies] -botocore = ">=1.34.73,<1.35.0" +botocore = ">=1.34.77,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -426,22 +474,19 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.73" +version = "1.34.77" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.34.73-py3-none-any.whl", hash = "sha256:88d660b711cc5b5b049e15d547cb09526f86e48c15b78dacad78522109502b91"}, - {file = "botocore-1.34.73.tar.gz", hash = "sha256:8df020b6682b9f1e9ee7b0554d5d0c14b7b23e3de070c85bcdf07fb20bfe4e2b"}, + {file = "botocore-1.34.77-py3-none-any.whl", hash = "sha256:6d6a402032ca0b89525212356a865397f8f2839683dd53d41b8cee1aa84b2b4b"}, + {file = "botocore-1.34.77.tar.gz", hash = "sha256:6dab60261cdbfb7d0059488ea39408d5522fad419c004ba5db3484e6df854ea8"}, ] [package.dependencies] jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" -urllib3 = [ - {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, - {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""}, -] +urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} [package.extras] crt = ["awscrt (==0.19.19)"] @@ -574,6 +619,69 @@ files = [ {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] +[[package]] +name = "cassandra-driver" +version = "3.29.1" +description = "DataStax Driver for Apache Cassandra" +optional = false +python-versions = "*" +files = [ + {file = "cassandra-driver-3.29.1.tar.gz", hash = "sha256:38e9c2a2f2a9664bb03f1f852d5fccaeff2163942b5db35dffcf8bf32a51cfe5"}, + {file = "cassandra_driver-3.29.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8f175c7616a63ca48cb8bd4acc443e2a3d889964d5157cead761f23cc8db7bd"}, + {file = "cassandra_driver-3.29.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7d66398952b9cd21c40edff56e22b6d3bce765edc94b207ddb5896e7bc9aa088"}, + {file = "cassandra_driver-3.29.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bbc6f575ef109ce5d4abfa2033bf36c394032abd83e32ab671159ce68e7e17b"}, + {file = "cassandra_driver-3.29.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78f241af75696adb3e470209e2fbb498804c99e2b197d24d74774eee6784f283"}, + {file = "cassandra_driver-3.29.1-cp310-cp310-win32.whl", hash = "sha256:54d9e651a742d6ca3d874ef8d06a40fa032d2dba97142da2d36f60c5675e39f8"}, + {file = "cassandra_driver-3.29.1-cp310-cp310-win_amd64.whl", hash = "sha256:630dc5423cd40eba0ee9db31065e2238098ff1a25a6b1bd36360f85738f26e4b"}, + {file = "cassandra_driver-3.29.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0b841d38c96bb878d31df393954863652d6d3a85f47bcc00fd1d70a5ea73023f"}, + {file = "cassandra_driver-3.29.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:19cc7375f673e215bd4cbbefae2de9f07830be7dabef55284a2d2ff8d8691efe"}, + {file = "cassandra_driver-3.29.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b74b355be3dcafe652fffda8f14f385ccc1a8dae9df28e6080cc660da39b45f"}, + {file = "cassandra_driver-3.29.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e6dac7eddd3f4581859f180383574068a3f113907811b4dad755a8ace4c3fbd"}, + {file = "cassandra_driver-3.29.1-cp311-cp311-win32.whl", hash = "sha256:293a79dba417112b56320ed0013d71fd7520f5fc4a5fd2ac8000c762c6dd5b07"}, + {file = "cassandra_driver-3.29.1-cp311-cp311-win_amd64.whl", hash = "sha256:7c2374fdf1099047a6c9c8329c79d71ad11e61d9cca7de92a0f49655da4bdd8a"}, + {file = "cassandra_driver-3.29.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4431a0c836f33a33c733c84997fbdb6398be005c4d18a8c8525c469fdc29393c"}, + {file = "cassandra_driver-3.29.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d23b08381b171a9e42ace483a82457edcddada9e8367e31677b97538cde2dc34"}, + {file = "cassandra_driver-3.29.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4beb29a0139e63a10a5b9a3c7b72c30a4e6e20c9f0574f9d22c0d4144fe3d348"}, + {file = "cassandra_driver-3.29.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b206423cc454a78f16b411e7cb641dddc26168ac2e18f2c13665f5f3c89868c"}, + {file = "cassandra_driver-3.29.1-cp312-cp312-win32.whl", hash = "sha256:ac898cca7303a3a2a3070513eee12ef0f1be1a0796935c5b8aa13dae8c0a7f7e"}, + {file = "cassandra_driver-3.29.1-cp312-cp312-win_amd64.whl", hash = "sha256:4ad0c9fb2229048ad6ff8c6ddbf1fdc78b111f2b061c66237c2257fcc4a31b14"}, + {file = "cassandra_driver-3.29.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4282c5deac462e4bb0f6fd0553a33d514dbd5ee99d0812594210080330ddd1a2"}, + {file = "cassandra_driver-3.29.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:41ca7eea069754002418d3bdfbd3dfd150ea12cb9db474ab1a01fa4679a05bcb"}, + {file = "cassandra_driver-3.29.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6639ccb268c4dc754bc45e03551711780d0e02cb298ab26cde1f42b7bcc74f8"}, + {file = "cassandra_driver-3.29.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a9d7d3b1be24a7f113b5404186ccccc977520401303a8fe78ba34134cad2482"}, + {file = "cassandra_driver-3.29.1-cp38-cp38-win32.whl", hash = "sha256:81c8fd556c6e1bb93577e69c1f10a3fadf7ddb93958d226ccbb72389396e9a92"}, + {file = "cassandra_driver-3.29.1-cp38-cp38-win_amd64.whl", hash = "sha256:cfe70ed0f27af949de2767ea9cef4092584e8748759374a55bf23c30746c7b23"}, + {file = "cassandra_driver-3.29.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a2c03c1d834ac1a0ae39f9af297a8cd38829003ce910b08b324fb3abe488ce2b"}, + {file = "cassandra_driver-3.29.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9a3e1e2b01f3b7a5cf75c97401bce830071d99c42464352087d7475e0161af93"}, + {file = "cassandra_driver-3.29.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90c42006665a4e490b0766b70f3d637f36a30accbef2da35d6d4081c0e0bafc3"}, + {file = "cassandra_driver-3.29.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c1aca41f45772f9759e8246030907d92bc35fbbdc91525a3cb9b49939b80ad7"}, + {file = "cassandra_driver-3.29.1-cp39-cp39-win32.whl", hash = "sha256:ce4a66245d4a0c8b07fdcb6398698c2c42eb71245fb49cff39435bb702ff7be6"}, + {file = "cassandra_driver-3.29.1-cp39-cp39-win_amd64.whl", hash = "sha256:4cae69ceb1b1d9383e988a1b790115253eacf7867ceb15ed2adb736e3ce981be"}, +] + +[package.dependencies] +geomet = ">=0.1,<0.3" + +[package.extras] +cle = ["cryptography (>=35.0)"] +graph = ["gremlinpython (==3.4.6)"] + +[[package]] +name = "cassio" +version = "0.1.5" +description = "A framework-agnostic Python library to seamlessly integrate Apache Cassandra(R) with ML/LLM/genAI workloads." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cassio-0.1.5-py3-none-any.whl", hash = "sha256:cf1d11f255c040bc0aede4963ca020840133377aa54f7f15d2f819d6553d52ce"}, + {file = "cassio-0.1.5.tar.gz", hash = "sha256:88c50c34d46a1bfffca1e0b600318a6efef45e6c18a56ddabe208cbede8dcc27"}, +] + +[package.dependencies] +cassandra-driver = ">=3.28.0" +numpy = ">=1.0" +requests = ">=2" + [[package]] name = "celery" version = "5.3.6" @@ -904,6 +1012,58 @@ typer = ">=0.9.0" typing-extensions = ">=4.5.0" uvicorn = {version = ">=0.18.3", extras = ["standard"]} +[[package]] +name = "clevercsv" +version = "0.8.2" +description = "A Python package for handling messy CSV files" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "clevercsv-0.8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:67ab7dc8490ed391add1f26db262d09067796be7e76948bde0a9c6f1dddb7508"}, + {file = "clevercsv-0.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bcf2402578f2f1c655ed21370e668f44098d9734129804f0fba1779dab7f2c47"}, + {file = "clevercsv-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a65d9722303e3db439124418ee312fcab6a75897175842690eae94bdf51b72b"}, + {file = "clevercsv-0.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:755369a540d40295ea2898343c44dc8a4886e7c9e2fd5f5a780d2995a5516e1d"}, + {file = "clevercsv-0.8.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fe155a8e39160692869f3b9b8a8bca9ba215cc350b9c804437edaa90ede4d16"}, + {file = "clevercsv-0.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:1e38761cd3f1977f8298a1a4cac3981c953aaf2226c0f1cc3f1ccf2172100ba4"}, + {file = "clevercsv-0.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3502c7af7a4b7a50b923a5972a9357ae2a37aa857dd96c7489c201d104e5d0b9"}, + {file = "clevercsv-0.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1ed99467ba2d47a2e1e81e990f74c7542d2cd0da120d922c5c992c17ac3ba026"}, + {file = "clevercsv-0.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1be9c6f2e73117a77d0b0491c07738fd177ba5e2bf996ac9a221417b223162d7"}, + {file = "clevercsv-0.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b942ee944264e5c4dbf276de579a502d11d3571daec97af5ebe54e6cadf2b77"}, + {file = "clevercsv-0.8.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a8a81c1f324e2a5589e0de9b6bd965f1dd19b54b0e9e7f97cab5edf888d486"}, + {file = "clevercsv-0.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:e474cc07167010c4cb6b1a65588309bc855537cae01ab63cdf61a511e69b4722"}, + {file = "clevercsv-0.8.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2bfa4fe39b3b51bcf07d2f8cf033d7ac53bac5292ef7b9a88bae7c9e6689f366"}, + {file = "clevercsv-0.8.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3e2528f89ee483878c3c8030a2d2da4eef2a8a7ac3036adad0767c1025a99df7"}, + {file = "clevercsv-0.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8832093b49defb2f224a98158265fe0bbee9ed6a70a8105cf8d7c4b949a8e95b"}, + {file = "clevercsv-0.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b68a7fbddef0e1746d3ec5e4d114e1900eb1a86d71250b0b208622daa5d2c7c"}, + {file = "clevercsv-0.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cb807e7bea5a18cca4d51d1abc58e2f975759b7a0bcb78e1677c56ac7827e9a"}, + {file = "clevercsv-0.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:74133c7225037052247584cf2df99b052fce4659a423c30f0ea84532e0a30924"}, + {file = "clevercsv-0.8.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cb9241fe5d6a2e3330c52c04fd18b7c279bbdeb7d0ecef8c4267f14336021d78"}, + {file = "clevercsv-0.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c1129f1328c0940b13b9b08a72f04c8b0a85a6a021994999f34cd3abe19ca206"}, + {file = "clevercsv-0.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7f75f46a4d6b75380f2c0b8190fed6fbb7c1400246ce52a71c68f59baf1ec362"}, + {file = "clevercsv-0.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bf6058a4930fde2ff00ab82e0ec961247b6e2dd503f67a16f51382b8654cbc2"}, + {file = "clevercsv-0.8.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d9f29bb3fb4c0d35416cc0baf9221d1476f3bee4c367c3618f81ac0f45b71af"}, + {file = "clevercsv-0.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:17455184e44ce60bb4d21eddd477c7f505b735faff82012d5c857dd31a22e0aa"}, + {file = "clevercsv-0.8.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:dcf560e643c1fb37d3523a11b0dfbce0bda63ac831d8c71fa2973b262bc1603f"}, + {file = "clevercsv-0.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:41513c6ff653baded084a96318e4bdc078a9cce4ff5f9b4656d49aa2421e2e74"}, + {file = "clevercsv-0.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:71b88f1181fba6e5f6a46292d27f068bdc50200b5660b82a770adfcfb5ae076e"}, + {file = "clevercsv-0.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff57768d060ac6509dd33a65fa1d2a0fbb70cd62d0075d80a96367a2a9150765"}, + {file = "clevercsv-0.8.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f9514a169270fd095827698d1d5d80a8a3785f600441a11fb3a469ad5209eeb"}, + {file = "clevercsv-0.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:850f9c112377a73b0bac093df45b846513d34509ba3774fe04061ba4a4afca92"}, + {file = "clevercsv-0.8.2.tar.gz", hash = "sha256:fac1b9671bd77e7835f5fe22898df88b1a11cfd924ff71fc2d0f066065dea940"}, +] + +[package.dependencies] +chardet = ">=3.0" +packaging = ">=23.0" +regex = ">=2018.11" + +[package.extras] +dev = ["faust-cchardet (>=2.1.18)", "furo", "green", "m2r2", "pandas (>=1.0.0)", "pytest (>=2.6)", "sphinx", "tabview (>=1.4)", "termcolor", "wilderness (>=0.1.5)"] +docs = ["furo", "m2r2", "sphinx"] +full = ["faust-cchardet (>=2.1.18)", "pandas (>=1.0.0)", "tabview (>=1.4)", "wilderness (>=0.1.5)"] +precommit = ["wilderness (>=0.1.5)"] +tests = ["faust-cchardet (>=2.1.18)", "pandas (>=1.0.0)", "tabview (>=1.4)", "wilderness (>=0.1.5)"] + [[package]] name = "click" version = "8.1.7" @@ -969,22 +1129,23 @@ testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] [[package]] name = "cohere" -version = "4.57" -description = "Python SDK for the Cohere API" +version = "5.2.1" +description = "" optional = false -python-versions = ">=3.8,<4.0" +python-versions = "<4.0,>=3.8" files = [ - {file = "cohere-4.57-py3-none-any.whl", hash = "sha256:479bdea81ae119e53f671f1ae808fcff9df88211780525d7ef2f7b99dfb32e59"}, - {file = "cohere-4.57.tar.gz", hash = "sha256:71ace0204a92d1a2a8d4b949b88b353b4f22fc645486851924284cc5a0eb700d"}, + {file = "cohere-5.2.1-py3-none-any.whl", hash = "sha256:c694f9d2cdafd87443f54ea5238b51a0fb807f119673e00b814c2a2993368e38"}, + {file = "cohere-5.2.1.tar.gz", hash = "sha256:7cd5522bb162c05c67b2db0b7aba2a103622e17ece9e885f5ef2de66bb67a324"}, ] [package.dependencies] -aiohttp = ">=3.0,<4.0" -backoff = ">=2.0,<3.0" -fastavro = ">=1.8,<2.0" -importlib_metadata = ">=6.0,<7.0" -requests = ">=2.25.0,<3.0.0" -urllib3 = ">=1.26,<3" +fastavro = ">=1.9.4,<2.0.0" +httpx = ">=0.21.2" +pydantic = ">=1.9.2" +requests = ">=2.31.0,<3.0.0" +tokenizers = ">=0.15.2,<0.16.0" +types-requests = ">=2.31.0.20240311,<3.0.0.0" +typing_extensions = ">=4.0.0" [[package]] name = "colorama" @@ -1025,6 +1186,23 @@ humanfriendly = ">=9.1" [package.extras] cron = ["capturer (>=2.4)"] +[[package]] +name = "colorlog" +version = "6.8.2" +description = "Add colours to the output of Python's logging module." +optional = false +python-versions = ">=3.6" +files = [ + {file = "colorlog-6.8.2-py3-none-any.whl", hash = "sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33"}, + {file = "colorlog-6.8.2.tar.gz", hash = "sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +development = ["black", "flake8", "mypy", "pytest", "types-colorama"] + [[package]] name = "comm" version = "0.2.2" @@ -1134,6 +1312,33 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli"] +[[package]] +name = "crewai" +version = "0.22.5" +description = "Cutting-edge framework for orchestrating role-playing, autonomous AI agents. By fostering collaborative intelligence, CrewAI empowers agents to work together seamlessly, tackling complex tasks." +optional = false +python-versions = ">=3.10,<=3.13" +files = [ + {file = "crewai-0.22.5-py3-none-any.whl", hash = "sha256:9f254d8b2ebd7fae9a43d0ebab116a3f536c6620ca3bc955cb796fc8bb9beb7d"}, + {file = "crewai-0.22.5.tar.gz", hash = "sha256:03c76a04f46a432a1c3d9e5e0c8e039f983b25019194b0ab2ec594762ac380cf"}, +] + +[package.dependencies] +click = ">=8.1.7,<9.0.0" +instructor = ">=0.5.2,<0.6.0" +langchain = ">=0.1.10,<0.2.0" +langchain-openai = ">=0.0.5,<0.0.6" +openai = ">=1.13.3,<2.0.0" +opentelemetry-api = ">=1.22.0,<2.0.0" +opentelemetry-exporter-otlp-proto-http = ">=1.22.0,<2.0.0" +opentelemetry-sdk = ">=1.22.0,<2.0.0" +pydantic = ">=2.4.2,<3.0.0" +python-dotenv = "1.0.0" +regex = ">=2023.12.25,<2024.0.0" + +[package.extras] +tools = ["crewai-tools (>=0.0.15,<0.0.16)"] + [[package]] name = "cryptography" version = "42.0.5" @@ -1223,6 +1428,49 @@ files = [ marshmallow = ">=3.18.0,<4.0.0" typing-inspect = ">=0.4.0,<1" +[[package]] +name = "datasets" +version = "2.14.7" +description = "HuggingFace community-driven open-source library of datasets" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "datasets-2.14.7-py3-none-any.whl", hash = "sha256:1a64041a7da4f4130f736fc371c1f528b8ddd208cebe156400f65719bdbba79d"}, + {file = "datasets-2.14.7.tar.gz", hash = "sha256:394cf9b4ec0694b25945977b16ad5d18d5c15fb0e94141713eb8ead7452caf9e"}, +] + +[package.dependencies] +aiohttp = "*" +dill = ">=0.3.0,<0.3.8" +fsspec = {version = ">=2023.1.0,<=2023.10.0", extras = ["http"]} +huggingface-hub = ">=0.14.0,<1.0.0" +multiprocess = "*" +numpy = ">=1.17" +packaging = "*" +pandas = "*" +pyarrow = ">=8.0.0" +pyarrow-hotfix = "*" +pyyaml = ">=5.1" +requests = ">=2.19.0" +tqdm = ">=4.62.1" +xxhash = "*" + +[package.extras] +apache-beam = ["apache-beam (>=2.26.0,<2.44.0)"] +audio = ["librosa", "soundfile (>=0.12.1)"] +benchmarks = ["tensorflow (==2.12.0)", "torch (==2.0.1)", "transformers (==4.30.1)"] +dev = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0,<2.44.0)", "black (>=23.1,<24.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "pyyaml (>=5.3.1)", "rarfile (>=4.0)", "ruff (>=0.0.241)", "s3fs", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy (<2.0.0)", "tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "tiktoken", "torch", "transformers", "zstandard"] +docs = ["s3fs", "tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow-macos", "torch", "transformers"] +jax = ["jax (>=0.2.8,!=0.3.2,<=0.3.25)", "jaxlib (>=0.1.65,<=0.3.25)"] +metrics-tests = ["Werkzeug (>=1.0.1)", "accelerate", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "requests-file (>=1.5.1)", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "spacy (>=3.0.0)", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "typer (<0.5.0)"] +quality = ["black (>=23.1,<24.0)", "pyyaml (>=5.3.1)", "ruff (>=0.0.241)"] +s3 = ["s3fs"] +tensorflow = ["tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow-macos"] +tensorflow-gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] +tests = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0,<2.44.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy (<2.0.0)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "tiktoken", "torch", "transformers", "zstandard"] +torch = ["torch"] +vision = ["Pillow (>=6.2.1)"] + [[package]] name = "debugpy" version = "1.8.1" @@ -1327,18 +1575,17 @@ packaging = "*" [[package]] name = "dill" -version = "0.3.8" +version = "0.3.7" description = "serialize all of Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, - {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, + {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, + {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, ] [package.extras] graph = ["objgraph (>=1.7.2)"] -profile = ["gprof2dot (>=2022.7.29)"] [[package]] name = "dirtyjson" @@ -1425,6 +1672,89 @@ files = [ {file = "docstring_parser-0.15.tar.gz", hash = "sha256:48ddc093e8b1865899956fcc03b03e66bb7240c310fac5af81814580c55bf682"}, ] +[[package]] +name = "dspy-ai" +version = "2.4.0" +description = "DSPy" +optional = false +python-versions = ">=3.9" +files = [ + {file = "dspy-ai-2.4.0.tar.gz", hash = "sha256:d646ec015270b70bd51916346c01680923ccfa386c9e5474caac6d4366148678"}, + {file = "dspy_ai-2.4.0-py3-none-any.whl", hash = "sha256:edea508dc315702f61ae39568eaf2d8789ba4f8682102e730e016801b9102703"}, +] + +[package.dependencies] +backoff = ">=2.2.1,<2.3.0" +datasets = ">=2.14.6,<2.15.0" +joblib = ">=1.3.2,<1.4.0" +openai = ">=0.28.1,<2.0.0" +optuna = "*" +pandas = "*" +pydantic = "2.5.0" +regex = "*" +requests = "*" +tqdm = "*" +ujson = "*" + +[package.extras] +chromadb = ["chromadb (>=0.4.14,<0.5.0)"] +dev = ["pytest (>=6.2.5)"] +docs = ["autodoc-pydantic", "docutils (<0.17)", "furo (>=2023.3.27)", "m2r2", "myst-nb", "myst-parser", "sphinx (>=4.3.0)", "sphinx-autobuild", "sphinx-automodapi (==0.16.0)", "sphinx-reredirects (>=0.1.2)", "sphinx-rtd-theme"] +faiss-cpu = ["faiss-cpu", "sentence-transformers"] +marqo = ["marqo", "marqo (>=3.1.0,<3.2.0)"] +mongodb = ["pymongo (>=3.12.0,<3.13.0)"] +pinecone = ["pinecone-client (>=2.2.4,<2.3.0)"] +qdrant = ["fastembed", "fastembed (>=0.1.0,<0.2.0)", "qdrant-client", "qdrant-client (>=1.6.2,<1.7.0)"] +weaviate = ["weaviate-client (>=3.26.1,<3.27.0)"] + +[[package]] +name = "duckdb" +version = "0.9.2" +description = "DuckDB embedded database" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:aadcea5160c586704c03a8a796c06a8afffbefefb1986601104a60cb0bfdb5ab"}, + {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:08215f17147ed83cbec972175d9882387366de2ed36c21cbe4add04b39a5bcb4"}, + {file = "duckdb-0.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee6c2a8aba6850abef5e1be9dbc04b8e72a5b2c2b67f77892317a21fae868fe7"}, + {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ff49f3da9399900fd58b5acd0bb8bfad22c5147584ad2427a78d937e11ec9d0"}, + {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5ac5baf8597efd2bfa75f984654afcabcd698342d59b0e265a0bc6f267b3f0"}, + {file = "duckdb-0.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:81c6df905589a1023a27e9712edb5b724566587ef280a0c66a7ec07c8083623b"}, + {file = "duckdb-0.9.2-cp310-cp310-win32.whl", hash = "sha256:a298cd1d821c81d0dec8a60878c4b38c1adea04a9675fb6306c8f9083bbf314d"}, + {file = "duckdb-0.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:492a69cd60b6cb4f671b51893884cdc5efc4c3b2eb76057a007d2a2295427173"}, + {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:061a9ea809811d6e3025c5de31bc40e0302cfb08c08feefa574a6491e882e7e8"}, + {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a43f93be768af39f604b7b9b48891f9177c9282a408051209101ff80f7450d8f"}, + {file = "duckdb-0.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ac29c8c8f56fff5a681f7bf61711ccb9325c5329e64f23cb7ff31781d7b50773"}, + {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b14d98d26bab139114f62ade81350a5342f60a168d94b27ed2c706838f949eda"}, + {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:796a995299878913e765b28cc2b14c8e44fae2f54ab41a9ee668c18449f5f833"}, + {file = "duckdb-0.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6cb64ccfb72c11ec9c41b3cb6181b6fd33deccceda530e94e1c362af5f810ba1"}, + {file = "duckdb-0.9.2-cp311-cp311-win32.whl", hash = "sha256:930740cb7b2cd9e79946e1d3a8f66e15dc5849d4eaeff75c8788d0983b9256a5"}, + {file = "duckdb-0.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:c28f13c45006fd525001b2011cdf91fa216530e9751779651e66edc0e446be50"}, + {file = "duckdb-0.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fbce7bbcb4ba7d99fcec84cec08db40bc0dd9342c6c11930ce708817741faeeb"}, + {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15a82109a9e69b1891f0999749f9e3265f550032470f51432f944a37cfdc908b"}, + {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9490fb9a35eb74af40db5569d90df8a04a6f09ed9a8c9caa024998c40e2506aa"}, + {file = "duckdb-0.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:696d5c6dee86c1a491ea15b74aafe34ad2b62dcd46ad7e03b1d00111ca1a8c68"}, + {file = "duckdb-0.9.2-cp37-cp37m-win32.whl", hash = "sha256:4f0935300bdf8b7631ddfc838f36a858c1323696d8c8a2cecbd416bddf6b0631"}, + {file = "duckdb-0.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:0aab900f7510e4d2613263865570203ddfa2631858c7eb8cbed091af6ceb597f"}, + {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7d8130ed6a0c9421b135d0743705ea95b9a745852977717504e45722c112bf7a"}, + {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:974e5de0294f88a1a837378f1f83330395801e9246f4e88ed3bfc8ada65dcbee"}, + {file = "duckdb-0.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4fbc297b602ef17e579bb3190c94d19c5002422b55814421a0fc11299c0c1100"}, + {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1dd58a0d84a424924a35b3772419f8cd78a01c626be3147e4934d7a035a8ad68"}, + {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11a1194a582c80dfb57565daa06141727e415ff5d17e022dc5f31888a5423d33"}, + {file = "duckdb-0.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:be45d08541002a9338e568dca67ab4f20c0277f8f58a73dfc1435c5b4297c996"}, + {file = "duckdb-0.9.2-cp38-cp38-win32.whl", hash = "sha256:dd6f88aeb7fc0bfecaca633629ff5c986ac966fe3b7dcec0b2c48632fd550ba2"}, + {file = "duckdb-0.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:28100c4a6a04e69aa0f4a6670a6d3d67a65f0337246a0c1a429f3f28f3c40b9a"}, + {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ae5bf0b6ad4278e46e933e51473b86b4b932dbc54ff097610e5b482dd125552"}, + {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e5d0bb845a80aa48ed1fd1d2d285dd352e96dc97f8efced2a7429437ccd1fe1f"}, + {file = "duckdb-0.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ce262d74a52500d10888110dfd6715989926ec936918c232dcbaddb78fc55b4"}, + {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6935240da090a7f7d2666f6d0a5e45ff85715244171ca4e6576060a7f4a1200e"}, + {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5cfb93e73911696a98b9479299d19cfbc21dd05bb7ab11a923a903f86b4d06e"}, + {file = "duckdb-0.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:64e3bc01751f31e7572d2716c3e8da8fe785f1cdc5be329100818d223002213f"}, + {file = "duckdb-0.9.2-cp39-cp39-win32.whl", hash = "sha256:6e5b80f46487636368e31b61461940e3999986359a78660a50dfdd17dd72017c"}, + {file = "duckdb-0.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:e6142a220180dbeea4f341708bd5f9501c5c962ce7ef47c1cadf5e8810b4cb13"}, + {file = "duckdb-0.9.2.tar.gz", hash = "sha256:3843afeab7c3fc4a4c0b53686a4cc1d9cdbdadcbb468d60fef910355ecafd447"}, +] + [[package]] name = "easygui" version = "0.98.3" @@ -1633,9 +1963,6 @@ files = [ {file = "fake_useragent-1.5.1-py3-none-any.whl", hash = "sha256:57415096557c8a4e23b62a375c21c55af5fd4ba30549227f562d2c4f5b60e3b3"}, ] -[package.dependencies] -importlib-resources = {version = ">=5.0", markers = "python_version < \"3.10\""} - [[package]] name = "fastapi" version = "0.109.2" @@ -1779,7 +2106,6 @@ files = [ [package.dependencies] blinker = ">=1.6.2" click = ">=8.1.3" -importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} itsdangerous = ">=2.1.2" Jinja2 = ">=3.1.2" Werkzeug = ">=3.0.0" @@ -1934,15 +2260,19 @@ files = [ [[package]] name = "fsspec" -version = "2024.3.1" +version = "2023.10.0" description = "File-system specification" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2024.3.1-py3-none-any.whl", hash = "sha256:918d18d41bf73f0e2b261824baeb1b124bcf771767e3a26425cd7dec3332f512"}, - {file = "fsspec-2024.3.1.tar.gz", hash = "sha256:f39780e282d7d117ffb42bb96992f8a90795e4d0fb0f661a70ca39fe9c43ded9"}, + {file = "fsspec-2023.10.0-py3-none-any.whl", hash = "sha256:346a8f024efeb749d2a5fca7ba8854474b1ff9af7c3faaf636a4548781136529"}, + {file = "fsspec-2023.10.0.tar.gz", hash = "sha256:330c66757591df346ad3091a53bd907e15348c2ba17d63fd54f5c39c4457d2a5"}, ] +[package.dependencies] +aiohttp = {version = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1", optional = true, markers = "extra == \"http\""} +requests = {version = "*", optional = true, markers = "extra == \"http\""} + [package.extras] abfs = ["adlfs"] adl = ["adlfs"] @@ -1958,7 +2288,7 @@ github = ["requests"] gs = ["gcsfs"] gui = ["panel"] hdfs = ["pyarrow (>=1)"] -http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "requests"] libarchive = ["libarchive-c"] oci = ["ocifs"] s3 = ["s3fs"] @@ -1978,6 +2308,21 @@ files = [ {file = "future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05"}, ] +[[package]] +name = "geomet" +version = "0.2.1.post1" +description = "GeoJSON <-> WKT/WKB conversion utilities" +optional = false +python-versions = ">2.6, !=3.3.*, <4" +files = [ + {file = "geomet-0.2.1.post1-py3-none-any.whl", hash = "sha256:a41a1e336b381416d6cbed7f1745c848e91defaa4d4c1bdc1312732e46ffad2b"}, + {file = "geomet-0.2.1.post1.tar.gz", hash = "sha256:91d754f7c298cbfcabd3befdb69c641c27fe75e808b27aa55028605761d17e95"}, +] + +[package.dependencies] +click = "*" +six = "*" + [[package]] name = "gevent" version = "24.2.1" @@ -2216,13 +2561,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.124.0" +version = "2.125.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-python-client-2.124.0.tar.gz", hash = "sha256:f6d3258420f7c76b0f5266b5e402e6f804e30351b018a10083f4a46c3ec33773"}, - {file = "google_api_python_client-2.124.0-py2.py3-none-any.whl", hash = "sha256:07dc674449ed353704b1169fdee792f74438d024261dad71b6ce7bb9c683d51f"}, + {file = "google-api-python-client-2.125.0.tar.gz", hash = "sha256:51a0385cff65ec135106e8be60ee7112557396dde5f44113ae23912baddda143"}, + {file = "google_api_python_client-2.125.0-py2.py3-none-any.whl", hash = "sha256:0a62b60fbd61b61a455f15d925264b3301099b67cafd2d33cf8bf151f1fca4f4"}, ] [package.dependencies] @@ -2270,257 +2615,28 @@ files = [ google-auth = "*" httplib2 = ">=0.19.0" -[[package]] -name = "google-cloud-aiplatform" -version = "1.45.0" -description = "Vertex AI API client library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "google-cloud-aiplatform-1.45.0.tar.gz", hash = "sha256:8fdc5f79fe9211ccbb9191b92db883798dffdd63995c12cc734bc17fcdbb3846"}, - {file = "google_cloud_aiplatform-1.45.0-py2.py3-none-any.whl", hash = "sha256:40bf5e2baa9cdb453689c4276eee5e7fe12db2e7723c133f000d35bcca964fb2"}, -] - -[package.dependencies] -docstring-parser = "<1" -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.8.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<3.0.0dev" -google-cloud-bigquery = ">=1.15.0,<3.20.0 || >3.20.0,<4.0.0dev" -google-cloud-resource-manager = ">=1.3.3,<3.0.0dev" -google-cloud-storage = ">=1.32.0,<3.0.0dev" -packaging = ">=14.3" -proto-plus = ">=1.22.0,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" -pydantic = "<3" -shapely = "<3.0.0dev" - -[package.extras] -autologging = ["mlflow (>=1.27.0,<=2.1.1)"] -cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] -datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=3.0.0,<8.0dev)"] -endpoint = ["requests (>=2.28.1)"] -full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<0.103.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (==5.3.1)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "starlette (>=0.17.1)", "tensorflow (>=2.3.0,<2.15.0)", "tensorflow (>=2.3.0,<3.0.0dev)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)"] -lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"] -metadata = ["numpy (>=1.15.0)", "pandas (>=1.0.0)"] -pipelines = ["pyyaml (==5.3.1)"] -prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<0.103.1)", "httpx (>=0.23.0,<0.25.0)", "starlette (>=0.17.1)", "uvicorn[standard] (>=0.16.0)"] -preview = ["cloudpickle (<3.0)", "google-cloud-logging (<4.0)"] -private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"] -ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)"] -ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "ray[train] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "scikit-learn", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"] -tensorboard = ["tensorflow (>=2.3.0,<2.15.0)"] -testing = ["bigframes", "cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<0.103.1)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (==5.3.1)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (==2.13.0)", "tensorflow (>=2.3.0,<2.15.0)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost", "xgboost-ray"] -vizier = ["google-vizier (>=0.1.6)"] -xai = ["tensorflow (>=2.3.0,<3.0.0dev)"] - -[[package]] -name = "google-cloud-bigquery" -version = "3.19.0" -description = "Google BigQuery API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-bigquery-3.19.0.tar.gz", hash = "sha256:8e311dae49768e1501fcdc5e916bff4b7e169471e5707919f4a6f78a02b3b5a6"}, - {file = "google_cloud_bigquery-3.19.0-py2.py3-none-any.whl", hash = "sha256:c6b8850247a4b132066e49f6e45f850c22824482838688d744a4398eea1120ed"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<3.0.0dev" -google-cloud-core = ">=1.6.0,<3.0.0dev" -google-resumable-media = ">=0.6.0,<3.0dev" -packaging = ">=20.0.0" -python-dateutil = ">=2.7.2,<3.0dev" -requests = ">=2.21.0,<3.0.0dev" - -[package.extras] -all = ["Shapely (>=1.8.4,<3.0.0dev)", "db-dtypes (>=0.3.0,<2.0.0dev)", "geopandas (>=0.9.0,<1.0dev)", "google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "importlib-metadata (>=1.0.0)", "ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)", "ipywidgets (>=7.7.0)", "opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)", "pandas (>=1.1.0)", "proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)", "pyarrow (>=3.0.0)", "tqdm (>=4.7.4,<5.0.0dev)"] -bigquery-v2 = ["proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)"] -bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "pyarrow (>=3.0.0)"] -geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<1.0dev)"] -ipython = ["ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)"] -ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"] -opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"] -pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "importlib-metadata (>=1.0.0)", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"] -tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] - -[[package]] -name = "google-cloud-core" -version = "2.4.1" -description = "Google Cloud API client core library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, - {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, -] - -[package.dependencies] -google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" -google-auth = ">=1.25.0,<3.0dev" - -[package.extras] -grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] - -[[package]] -name = "google-cloud-resource-manager" -version = "1.12.3" -description = "Google Cloud Resource Manager API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-resource-manager-1.12.3.tar.gz", hash = "sha256:809851824119834e4f2310b2c4f38621c1d16b2bb14d5b9f132e69c79d355e7f"}, - {file = "google_cloud_resource_manager-1.12.3-py2.py3-none-any.whl", hash = "sha256:92be7d6959927b76d90eafc4028985c37975a46ded5466a018f02e8649e113d4"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" -grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" - -[[package]] -name = "google-cloud-storage" -version = "2.16.0" -description = "Google Cloud Storage API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-cloud-storage-2.16.0.tar.gz", hash = "sha256:dda485fa503710a828d01246bd16ce9db0823dc51bbca742ce96a6817d58669f"}, - {file = "google_cloud_storage-2.16.0-py2.py3-none-any.whl", hash = "sha256:91a06b96fb79cf9cdfb4e759f178ce11ea885c79938f89590344d079305f5852"}, -] - -[package.dependencies] -google-api-core = ">=2.15.0,<3.0.0dev" -google-auth = ">=2.26.1,<3.0dev" -google-cloud-core = ">=2.3.0,<3.0dev" -google-crc32c = ">=1.0,<2.0dev" -google-resumable-media = ">=2.6.0" -requests = ">=2.18.0,<3.0.0dev" - -[package.extras] -protobuf = ["protobuf (<5.0.0dev)"] - -[[package]] -name = "google-crc32c" -version = "1.5.0" -description = "A python wrapper of the C library 'Google CRC32C'" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-crc32c-1.5.0.tar.gz", hash = "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7"}, - {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13"}, - {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346"}, - {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65"}, - {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b"}, - {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02"}, - {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4"}, - {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e"}, - {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c"}, - {file = "google_crc32c-1.5.0-cp310-cp310-win32.whl", hash = "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee"}, - {file = "google_crc32c-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289"}, - {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273"}, - {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298"}, - {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57"}, - {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438"}, - {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906"}, - {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183"}, - {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd"}, - {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c"}, - {file = "google_crc32c-1.5.0-cp311-cp311-win32.whl", hash = "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709"}, - {file = "google_crc32c-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-win32.whl", hash = "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94"}, - {file = "google_crc32c-1.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740"}, - {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8"}, - {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d"}, - {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37"}, - {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894"}, - {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a"}, - {file = "google_crc32c-1.5.0-cp38-cp38-win32.whl", hash = "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4"}, - {file = "google_crc32c-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c"}, - {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7"}, - {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57"}, - {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210"}, - {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd"}, - {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96"}, - {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61"}, - {file = "google_crc32c-1.5.0-cp39-cp39-win32.whl", hash = "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c"}, - {file = "google_crc32c-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178"}, - {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462"}, - {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31"}, - {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93"}, -] - -[package.extras] -testing = ["pytest"] - [[package]] name = "google-generativeai" -version = "0.3.2" +version = "0.4.1" description = "Google Generative AI High level API client library and tools." optional = false python-versions = ">=3.9" files = [ - {file = "google_generativeai-0.3.2-py3-none-any.whl", hash = "sha256:8761147e6e167141932dc14a7b7af08f2310dd56668a78d206c19bb8bd85bcd7"}, + {file = "google_generativeai-0.4.1-py3-none-any.whl", hash = "sha256:89be3c00c2e688108fccefc50f47f45fc9d37ecd53c1ade9d86b5d982919c24a"}, ] [package.dependencies] google-ai-generativelanguage = "0.4.0" google-api-core = "*" -google-auth = "*" +google-auth = ">=2.15.0" protobuf = "*" +pydantic = "*" tqdm = "*" typing-extensions = "*" [package.extras] dev = ["Pillow", "absl-py", "black", "ipython", "nose2", "pandas", "pytype", "pyyaml"] -[[package]] -name = "google-resumable-media" -version = "2.7.0" -description = "Utilities for Google Media Downloads and Resumable Uploads" -optional = false -python-versions = ">= 3.7" -files = [ - {file = "google-resumable-media-2.7.0.tar.gz", hash = "sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b"}, - {file = "google_resumable_media-2.7.0-py2.py3-none-any.whl", hash = "sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08"}, -] - -[package.dependencies] -google-crc32c = ">=1.0,<2.0dev" - -[package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "google-auth (>=1.22.0,<2.0dev)"] -requests = ["requests (>=2.18.0,<3.0.0dev)"] - [[package]] name = "google-search-results" version = "2.4.2" @@ -2546,7 +2662,6 @@ files = [ ] [package.dependencies] -grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" [package.extras] @@ -2638,22 +2753,6 @@ files = [ docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] -[[package]] -name = "grpc-google-iam-v1" -version = "0.13.0" -description = "IAM API client library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "grpc-google-iam-v1-0.13.0.tar.gz", hash = "sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e"}, - {file = "grpc_google_iam_v1-0.13.0-py2.py3-none-any.whl", hash = "sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89"}, -] - -[package.dependencies] -googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} -grpcio = ">=1.44.0,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" - [[package]] name = "grpcio" version = "1.62.1" @@ -2876,6 +2975,16 @@ files = [ {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, ] +[[package]] +name = "html2text" +version = "2024.2.26" +description = "Turn HTML into equivalent Markdown-structured text." +optional = false +python-versions = ">=3.8" +files = [ + {file = "html2text-2024.2.26.tar.gz", hash = "sha256:05f8e367d15aaabc96415376776cdd11afd5127a77fce6e36afc60c563ca2c32"}, +] + [[package]] name = "httpcore" version = "1.0.5" @@ -3068,15 +3177,113 @@ files = [ {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] +[[package]] +name = "ijson" +version = "3.2.3" +description = "Iterative JSON parser with standard Python iterator interfaces" +optional = false +python-versions = "*" +files = [ + {file = "ijson-3.2.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0a4ae076bf97b0430e4e16c9cb635a6b773904aec45ed8dcbc9b17211b8569ba"}, + {file = "ijson-3.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cfced0a6ec85916eb8c8e22415b7267ae118eaff2a860c42d2cc1261711d0d31"}, + {file = "ijson-3.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0b9d1141cfd1e6d6643aa0b4876730d0d28371815ce846d2e4e84a2d4f471cf3"}, + {file = "ijson-3.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e0a27db6454edd6013d40a956d008361aac5bff375a9c04ab11fc8c214250b5"}, + {file = "ijson-3.2.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c0d526ccb335c3c13063c273637d8611f32970603dfb182177b232d01f14c23"}, + {file = "ijson-3.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:545a30b3659df2a3481593d30d60491d1594bc8005f99600e1bba647bb44cbb5"}, + {file = "ijson-3.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9680e37a10fedb3eab24a4a7e749d8a73f26f1a4c901430e7aa81b5da15f7307"}, + {file = "ijson-3.2.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2a80c0bb1053055d1599e44dc1396f713e8b3407000e6390add72d49633ff3bb"}, + {file = "ijson-3.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f05ed49f434ce396ddcf99e9fd98245328e99f991283850c309f5e3182211a79"}, + {file = "ijson-3.2.3-cp310-cp310-win32.whl", hash = "sha256:b4eb2304573c9fdf448d3fa4a4fdcb727b93002b5c5c56c14a5ffbbc39f64ae4"}, + {file = "ijson-3.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:923131f5153c70936e8bd2dd9dcfcff43c67a3d1c789e9c96724747423c173eb"}, + {file = "ijson-3.2.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:904f77dd3d87736ff668884fe5197a184748eb0c3e302ded61706501d0327465"}, + {file = "ijson-3.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0974444c1f416e19de1e9f567a4560890095e71e81623c509feff642114c1e53"}, + {file = "ijson-3.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1a4b8eb69b6d7b4e94170aa991efad75ba156b05f0de2a6cd84f991def12ff9"}, + {file = "ijson-3.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d052417fd7ce2221114f8d3b58f05a83c1a2b6b99cafe0b86ac9ed5e2fc889df"}, + {file = "ijson-3.2.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b8064a85ec1b0beda7dd028e887f7112670d574db606f68006c72dd0bb0e0e2"}, + {file = "ijson-3.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaac293853f1342a8d2a45ac1f723c860f700860e7743fb97f7b76356df883a8"}, + {file = "ijson-3.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6c32c18a934c1dc8917455b0ce478fd7a26c50c364bd52c5a4fb0fc6bb516af7"}, + {file = "ijson-3.2.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:713a919e0220ac44dab12b5fed74f9130f3480e55e90f9d80f58de129ea24f83"}, + {file = "ijson-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a3a6a2fbbe7550ffe52d151cf76065e6b89cfb3e9d0463e49a7e322a25d0426"}, + {file = "ijson-3.2.3-cp311-cp311-win32.whl", hash = "sha256:6a4db2f7fb9acfb855c9ae1aae602e4648dd1f88804a0d5cfb78c3639bcf156c"}, + {file = "ijson-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:ccd6be56335cbb845f3d3021b1766299c056c70c4c9165fb2fbe2d62258bae3f"}, + {file = "ijson-3.2.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:055b71bbc37af5c3c5861afe789e15211d2d3d06ac51ee5a647adf4def19c0ea"}, + {file = "ijson-3.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c075a547de32f265a5dd139ab2035900fef6653951628862e5cdce0d101af557"}, + {file = "ijson-3.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:457f8a5fc559478ac6b06b6d37ebacb4811f8c5156e997f0d87d708b0d8ab2ae"}, + {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9788f0c915351f41f0e69ec2618b81ebfcf9f13d9d67c6d404c7f5afda3e4afb"}, + {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa234ab7a6a33ed51494d9d2197fb96296f9217ecae57f5551a55589091e7853"}, + {file = "ijson-3.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd0dc5da4f9dc6d12ab6e8e0c57d8b41d3c8f9ceed31a99dae7b2baf9ea769a"}, + {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c6beb80df19713e39e68dc5c337b5c76d36ccf69c30b79034634e5e4c14d6904"}, + {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a2973ce57afb142d96f35a14e9cfec08308ef178a2c76b8b5e1e98f3960438bf"}, + {file = "ijson-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:105c314fd624e81ed20f925271ec506523b8dd236589ab6c0208b8707d652a0e"}, + {file = "ijson-3.2.3-cp312-cp312-win32.whl", hash = "sha256:ac44781de5e901ce8339352bb5594fcb3b94ced315a34dbe840b4cff3450e23b"}, + {file = "ijson-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:0567e8c833825b119e74e10a7c29761dc65fcd155f5d4cb10f9d3b8916ef9912"}, + {file = "ijson-3.2.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:eeb286639649fb6bed37997a5e30eefcacddac79476d24128348ec890b2a0ccb"}, + {file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:396338a655fb9af4ac59dd09c189885b51fa0eefc84d35408662031023c110d1"}, + {file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e0243d166d11a2a47c17c7e885debf3b19ed136be2af1f5d1c34212850236ac"}, + {file = "ijson-3.2.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85afdb3f3a5d0011584d4fa8e6dccc5936be51c27e84cd2882fe904ca3bd04c5"}, + {file = "ijson-3.2.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4fc35d569eff3afa76bfecf533f818ecb9390105be257f3f83c03204661ace70"}, + {file = "ijson-3.2.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:455d7d3b7a6aacfb8ab1ebcaf697eedf5be66e044eac32508fccdc633d995f0e"}, + {file = "ijson-3.2.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:c63f3d57dbbac56cead05b12b81e8e1e259f14ce7f233a8cbe7fa0996733b628"}, + {file = "ijson-3.2.3-cp36-cp36m-win32.whl", hash = "sha256:a4d7fe3629de3ecb088bff6dfe25f77be3e8261ed53d5e244717e266f8544305"}, + {file = "ijson-3.2.3-cp36-cp36m-win_amd64.whl", hash = "sha256:96190d59f015b5a2af388a98446e411f58ecc6a93934e036daa75f75d02386a0"}, + {file = "ijson-3.2.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:35194e0b8a2bda12b4096e2e792efa5d4801a0abb950c48ade351d479cd22ba5"}, + {file = "ijson-3.2.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1053fb5f0b010ee76ca515e6af36b50d26c1728ad46be12f1f147a835341083"}, + {file = "ijson-3.2.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:211124cff9d9d139dd0dfced356f1472860352c055d2481459038b8205d7d742"}, + {file = "ijson-3.2.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92dc4d48e9f6a271292d6079e9fcdce33c83d1acf11e6e12696fb05c5889fe74"}, + {file = "ijson-3.2.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3dcc33ee56f92a77f48776014ddb47af67c33dda361e84371153c4f1ed4434e1"}, + {file = "ijson-3.2.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:98c6799925a5d1988da4cd68879b8eeab52c6e029acc45e03abb7921a4715c4b"}, + {file = "ijson-3.2.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4252e48c95cd8ceefc2caade310559ab61c37d82dfa045928ed05328eb5b5f65"}, + {file = "ijson-3.2.3-cp37-cp37m-win32.whl", hash = "sha256:644f4f03349ff2731fd515afd1c91b9e439e90c9f8c28292251834154edbffca"}, + {file = "ijson-3.2.3-cp37-cp37m-win_amd64.whl", hash = "sha256:ba33c764afa9ecef62801ba7ac0319268a7526f50f7601370d9f8f04e77fc02b"}, + {file = "ijson-3.2.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4b2ec8c2a3f1742cbd5f36b65e192028e541b5fd8c7fd97c1fc0ca6c427c704a"}, + {file = "ijson-3.2.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7dc357da4b4ebd8903e77dbcc3ce0555ee29ebe0747c3c7f56adda423df8ec89"}, + {file = "ijson-3.2.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bcc51c84bb220ac330122468fe526a7777faa6464e3b04c15b476761beea424f"}, + {file = "ijson-3.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8d54b624629f9903005c58d9321a036c72f5c212701bbb93d1a520ecd15e370"}, + {file = "ijson-3.2.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6ea7c7e3ec44742e867c72fd750c6a1e35b112f88a917615332c4476e718d40"}, + {file = "ijson-3.2.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:916acdc5e504f8b66c3e287ada5d4b39a3275fc1f2013c4b05d1ab9933671a6c"}, + {file = "ijson-3.2.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81815b4184b85ce124bfc4c446d5f5e5e643fc119771c5916f035220ada29974"}, + {file = "ijson-3.2.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b49fd5fe1cd9c1c8caf6c59f82b08117dd6bea2ec45b641594e25948f48f4169"}, + {file = "ijson-3.2.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:86b3c91fdcb8ffb30556c9669930f02b7642de58ca2987845b04f0d7fe46d9a8"}, + {file = "ijson-3.2.3-cp38-cp38-win32.whl", hash = "sha256:a729b0c8fb935481afe3cf7e0dadd0da3a69cc7f145dbab8502e2f1e01d85a7c"}, + {file = "ijson-3.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:d34e049992d8a46922f96483e96b32ac4c9cffd01a5c33a928e70a283710cd58"}, + {file = "ijson-3.2.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9c2a12dcdb6fa28f333bf10b3a0f80ec70bc45280d8435be7e19696fab2bc706"}, + {file = "ijson-3.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1844c5b57da21466f255a0aeddf89049e730d7f3dfc4d750f0e65c36e6a61a7c"}, + {file = "ijson-3.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2ec3e5ff2515f1c40ef6a94983158e172f004cd643b9e4b5302017139b6c96e4"}, + {file = "ijson-3.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46bafb1b9959872a1f946f8dd9c6f1a30a970fc05b7bfae8579da3f1f988e598"}, + {file = "ijson-3.2.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab4db9fee0138b60e31b3c02fff8a4c28d7b152040553b6a91b60354aebd4b02"}, + {file = "ijson-3.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4bc87e69d1997c6a55fff5ee2af878720801ff6ab1fb3b7f94adda050651e37"}, + {file = "ijson-3.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e9fd906f0c38e9f0bfd5365e1bed98d649f506721f76bb1a9baa5d7374f26f19"}, + {file = "ijson-3.2.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e84d27d1acb60d9102728d06b9650e5b7e5cb0631bd6e3dfadba8fb6a80d6c2f"}, + {file = "ijson-3.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2cc04fc0a22bb945cd179f614845c8b5106c0b3939ee0d84ce67c7a61ac1a936"}, + {file = "ijson-3.2.3-cp39-cp39-win32.whl", hash = "sha256:e641814793a037175f7ec1b717ebb68f26d89d82cfd66f36e588f32d7e488d5f"}, + {file = "ijson-3.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:6bd3e7e91d031f1e8cea7ce53f704ab74e61e505e8072467e092172422728b22"}, + {file = "ijson-3.2.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:06f9707da06a19b01013f8c65bf67db523662a9b4a4ff027e946e66c261f17f0"}, + {file = "ijson-3.2.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be8495f7c13fa1f622a2c6b64e79ac63965b89caf664cc4e701c335c652d15f2"}, + {file = "ijson-3.2.3-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7596b42f38c3dcf9d434dddd50f46aeb28e96f891444c2b4b1266304a19a2c09"}, + {file = "ijson-3.2.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbac4e9609a1086bbad075beb2ceec486a3b138604e12d2059a33ce2cba93051"}, + {file = "ijson-3.2.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:db2d6341f9cb538253e7fe23311d59252f124f47165221d3c06a7ed667ecd595"}, + {file = "ijson-3.2.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fa8b98be298efbb2588f883f9953113d8a0023ab39abe77fe734b71b46b1220a"}, + {file = "ijson-3.2.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:674e585361c702fad050ab4c153fd168dc30f5980ef42b64400bc84d194e662d"}, + {file = "ijson-3.2.3-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd12e42b9cb9c0166559a3ffa276b4f9fc9d5b4c304e5a13668642d34b48b634"}, + {file = "ijson-3.2.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d31e0d771d82def80cd4663a66de277c3b44ba82cd48f630526b52f74663c639"}, + {file = "ijson-3.2.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ce4c70c23521179d6da842bb9bc2e36bb9fad1e0187e35423ff0f282890c9ca"}, + {file = "ijson-3.2.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:39f551a6fbeed4433c85269c7c8778e2aaea2501d7ebcb65b38f556030642c17"}, + {file = "ijson-3.2.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b14d322fec0de7af16f3ef920bf282f0dd747200b69e0b9628117f381b7775b"}, + {file = "ijson-3.2.3-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7851a341429b12d4527ca507097c959659baf5106c7074d15c17c387719ffbcd"}, + {file = "ijson-3.2.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db3bf1b42191b5cc9b6441552fdcb3b583594cb6b19e90d1578b7cbcf80d0fae"}, + {file = "ijson-3.2.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6f662dc44362a53af3084d3765bb01cd7b4734d1f484a6095cad4cb0cbfe5374"}, + {file = "ijson-3.2.3.tar.gz", hash = "sha256:10294e9bf89cb713da05bc4790bdff616610432db561964827074898e174f917"}, +] + [[package]] name = "importlib-metadata" -version = "6.11.0" +version = "7.0.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, - {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, + {file = "importlib_metadata-7.0.0-py3-none-any.whl", hash = "sha256:d97503976bb81f40a193d41ee6570868479c69d5068651eb039c40d850c59d67"}, + {file = "importlib_metadata-7.0.0.tar.gz", hash = "sha256:7fc841f8b8332803464e5dc1c63a2e59121f46ca186c0e2e182e80bf8c1319f7"}, ] [package.dependencies] @@ -3098,9 +3305,6 @@ files = [ {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, ] -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] @@ -3116,6 +3320,26 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "instructor" +version = "0.5.2" +description = "structured outputs for llm" +optional = false +python-versions = ">=3.10,<4.0" +files = [ + {file = "instructor-0.5.2-py3-none-any.whl", hash = "sha256:8c7c927f3cbf6cd863eeebceae3f021e27eaca2ceaf9e9f3c8204540a1126160"}, + {file = "instructor-0.5.2.tar.gz", hash = "sha256:d8d679eb4624254db615794aaab59840e506fa696bc0181d998ae4f9ded2706d"}, +] + +[package.dependencies] +aiohttp = ">=3.9.1,<4.0.0" +docstring-parser = ">=0.15,<0.16" +openai = ">=1.1.0,<2.0.0" +pydantic = ">=2.0.2,<3.0.0" +rich = ">=13.7.0,<14.0.0" +tenacity = ">=8.2.3,<9.0.0" +typer = ">=0.9.0,<0.10.0" + [[package]] name = "ipykernel" version = "6.29.4" @@ -3151,13 +3375,13 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio [[package]] name = "ipython" -version = "8.18.1" +version = "8.23.0" description = "IPython: Productive Interactive Computing" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" files = [ - {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, - {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, + {file = "ipython-8.23.0-py3-none-any.whl", hash = "sha256:07232af52a5ba146dc3372c7bf52a0f890a23edf38d77caef8d53f9cdc2584c1"}, + {file = "ipython-8.23.0.tar.gz", hash = "sha256:7468edaf4f6de3e1b912e57f66c241e6fd3c7099f2ec2136e239e142e800274d"}, ] [package.dependencies] @@ -3166,25 +3390,26 @@ decorator = "*" exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} jedi = ">=0.16" matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} prompt-toolkit = ">=3.0.41,<3.1.0" pygments = ">=2.4.0" stack-data = "*" -traitlets = ">=5" -typing-extensions = {version = "*", markers = "python_version < \"3.10\""} +traitlets = ">=5.13.0" +typing-extensions = {version = "*", markers = "python_version < \"3.12\""} [package.extras] -all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] black = ["black"] -doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +doc = ["docrepr", "exceptiongroup", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "stack-data", "typing-extensions"] kernel = ["ipykernel"] +matplotlib = ["matplotlib"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] -test = ["pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath", "trio"] +test = ["pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] [[package]] name = "itsdangerous" @@ -3385,7 +3610,6 @@ files = [ ] [package.dependencies] -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" python-dateutil = ">=2.8.2" pyzmq = ">=23.0" @@ -3429,7 +3653,6 @@ files = [ [package.dependencies] amqp = ">=5.1.1,<6.0.0" -typing-extensions = {version = "*", markers = "python_version < \"3.10\""} vine = "*" [package.extras] @@ -3477,13 +3700,13 @@ adal = ["adal (>=1.0.2)"] [[package]] name = "langchain" -version = "0.1.13" +version = "0.1.14" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain-0.1.13-py3-none-any.whl", hash = "sha256:c87657021b777d6b07e55be379a28660a1cd148c31593569869dd6b0b4cab945"}, - {file = "langchain-0.1.13.tar.gz", hash = "sha256:db330aa79c33501cb1ed97ff465f7645813eaa6cfd742c61e19c2d48e4aaba18"}, + {file = "langchain-0.1.14-py3-none-any.whl", hash = "sha256:94f9b5df2421faaf762d4f43b9d65c270c2f701934580d281e4c6226deef7234"}, + {file = "langchain-0.1.14.tar.gz", hash = "sha256:124c6244cf3957616b98f2df07dc2992fc40dff6ed1a62d8ee8a40f1e0260a40"}, ] [package.dependencies] @@ -3491,8 +3714,8 @@ aiohttp = ">=3.8.3,<4.0.0" async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} dataclasses-json = ">=0.5.7,<0.7" jsonpatch = ">=1.33,<2.0" -langchain-community = ">=0.0.29,<0.1" -langchain-core = ">=0.1.33,<0.2.0" +langchain-community = ">=0.0.30,<0.1" +langchain-core = ">=0.1.37,<0.2.0" langchain-text-splitters = ">=0.0.1,<0.1" langsmith = ">=0.1.17,<0.2.0" numpy = ">=1,<2" @@ -3506,12 +3729,12 @@ tenacity = ">=8.1.0,<9.0.0" azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (<2)"] clarifai = ["clarifai (>=9.1.0)"] cli = ["typer (>=0.9.0,<0.10.0)"] -cohere = ["cohere (>=4,<5)"] +cohere = ["cohere (>=4,<6)"] docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"] embeddings = ["sentence-transformers (>=2,<3)"] -extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<6)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] javascript = ["esprima (>=4.0.1,<5.0.0)"] -llms = ["clarifai (>=9.1.0)", "cohere (>=4,<5)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"] +llms = ["clarifai (>=9.1.0)", "cohere (>=4,<6)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"] openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"] qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"] text-helpers = ["chardet (>=5.1.0,<6.0.0)"] @@ -3532,21 +3755,52 @@ anthropic = ">=0.17.0,<1" defusedxml = ">=0.7.1,<0.8.0" langchain-core = ">=0.1,<0.2" +[[package]] +name = "langchain-astradb" +version = "0.1.0" +description = "An integration package connecting Astra DB and LangChain" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langchain_astradb-0.1.0-py3-none-any.whl", hash = "sha256:c6686089da343fce8c31e36c9162323e88888300b09d56b72347a19449d7361f"}, + {file = "langchain_astradb-0.1.0.tar.gz", hash = "sha256:c8a3426c9daa2beeec2dc7a718186b0b9c388082e9543e0bc07363712cc3b947"}, +] + +[package.dependencies] +astrapy = ">=0.7.7,<0.8.0" +langchain-core = ">=0.1.31,<0.2.0" +numpy = ">=1,<2" + +[[package]] +name = "langchain-cohere" +version = "0.1.0" +description = "An integration package connecting Cohere and LangChain" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_cohere-0.1.0-py3-none-any.whl", hash = "sha256:f60e9eb41f7d4ead9659bddb3fae7aa18ddc3fdf2b2867be4bd8a565229f488d"}, + {file = "langchain_cohere-0.1.0.tar.gz", hash = "sha256:960551293ea58d170fad37d44657d3ae4587f6b2e8f3f58922c53c59b9e9d85c"}, +] + +[package.dependencies] +cohere = ">=5.1.4,<6.0.0" +langchain-core = ">=0.1.32,<0.2.0" + [[package]] name = "langchain-community" -version = "0.0.29" +version = "0.0.31" description = "Community contributed LangChain integrations." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_community-0.0.29-py3-none-any.whl", hash = "sha256:1652dddf257089b7b5066974b636262b4a5b680339f4539be133b14ae351e67d"}, - {file = "langchain_community-0.0.29.tar.gz", hash = "sha256:d88107fafa9fe2c5733da9630c68d9ee51cd33b1c88a4950e7a2d9a38f7e7aa3"}, + {file = "langchain_community-0.0.31-py3-none-any.whl", hash = "sha256:905c01b978a1cef7fdcddd2d9241dedc9987db6f23ba1b58d974e38b1cdf2775"}, + {file = "langchain_community-0.0.31.tar.gz", hash = "sha256:9a970bc2bb59bb4c204b696d8c62c2534f6ddb31005005cc1b7d7f934e58a5fc"}, ] [package.dependencies] aiohttp = ">=3.8.3,<4.0.0" dataclasses-json = ">=0.5.7,<0.7" -langchain-core = ">=0.1.33,<0.2.0" +langchain-core = ">=0.1.37,<0.2.0" langsmith = ">=0.1.0,<0.2.0" numpy = ">=1,<2" PyYAML = ">=5.3" @@ -3556,17 +3810,17 @@ tenacity = ">=8.1.0,<9.0.0" [package.extras] cli = ["typer (>=0.9.0,<0.10.0)"] -extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)", "zhipuai (>=1.0.7,<2.0.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)", "zhipuai (>=1.0.7,<2.0.0)"] [[package]] name = "langchain-core" -version = "0.1.36" +version = "0.1.40" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_core-0.1.36-py3-none-any.whl", hash = "sha256:564beeb18ab13deca8daf6e6e74acab52e0b8f6202110262a4c914e4450febd2"}, - {file = "langchain_core-0.1.36.tar.gz", hash = "sha256:aa2432370ca3d2a5d6dd14a810aa6488bf2f622ff7a0a3dc30f6e0ed9d7f5fa8"}, + {file = "langchain_core-0.1.40-py3-none-any.whl", hash = "sha256:618dbb7ab44d8b263b91e384db1ff07d0db256ae5bdafa0123a115b6a75a13f1"}, + {file = "langchain_core-0.1.40.tar.gz", hash = "sha256:34c06fc0e6d3534b738c63f85403446b4be71161665b7e091f9bb19c914ec100"}, ] [package.dependencies] @@ -3575,7 +3829,6 @@ langsmith = ">=0.1.0,<0.2.0" packaging = ">=23.2,<24.0" pydantic = ">=1,<3" PyYAML = ">=5.3" -requests = ">=2,<3" tenacity = ">=8.1.0,<9.0.0" [package.extras] @@ -3583,35 +3836,35 @@ extended-testing = ["jinja2 (>=3,<4)"] [[package]] name = "langchain-experimental" -version = "0.0.55" +version = "0.0.56" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langchain_experimental-0.0.55-py3-none-any.whl", hash = "sha256:1b6b47689a4d21f81fac5ce3e968914bf7cafb70a14695404a0a67585114e09f"}, - {file = "langchain_experimental-0.0.55.tar.gz", hash = "sha256:8338a5e31b3dce8b072ddbdc1dda7765d864505478923e98681a93ae2555c237"}, + {file = "langchain_experimental-0.0.56-py3-none-any.whl", hash = "sha256:91fd7a723b0ef3193a63726745523efdd5dd7134116d838c312cfdbf4b354298"}, + {file = "langchain_experimental-0.0.56.tar.gz", hash = "sha256:ebb1c34815739d3af50c9b709c57b91d0357d567ad2042acb724853c6ba1d735"}, ] [package.dependencies] -langchain = ">=0.1.13,<0.2.0" -langchain-core = ">=0.1.33,<0.2.0" +langchain = ">=0.1.14,<0.2.0" +langchain-core = ">=0.1.37,<0.2.0" [package.extras] extended-testing = ["faker (>=19.3.1,<20.0.0)", "jinja2 (>=3,<4)", "pandas (>=2.0.1,<3.0.0)", "presidio-analyzer (>=2.2.352,<3.0.0)", "presidio-anonymizer (>=2.2.352,<3.0.0)", "sentence-transformers (>=2,<3)", "tabulate (>=0.9.0,<0.10.0)", "vowpal-wabbit-next (==0.6.0)"] [[package]] name = "langchain-google-genai" -version = "0.0.6" +version = "1.0.1" description = "An integration package connecting Google's genai package and LangChain" optional = false -python-versions = ">=3.9,<4.0" +python-versions = "<4.0,>=3.9" files = [ - {file = "langchain_google_genai-0.0.6-py3-none-any.whl", hash = "sha256:2e347ad4d18f8f761ed8d3997f9ea52b41f97b7fd81f698612ad718da5d9c13b"}, - {file = "langchain_google_genai-0.0.6.tar.gz", hash = "sha256:93156a523306031d4bbfa7c759778a1123779902512f8ef329516da558069d70"}, + {file = "langchain_google_genai-1.0.1-py3-none-any.whl", hash = "sha256:c59e7f49b3afb3d6d25366f83d47319e6825c8f0a9b95f4a591687dcb18862c0"}, + {file = "langchain_google_genai-1.0.1.tar.gz", hash = "sha256:43b80ed2fbae3495381a4ed9d3978067c467fd5e5cf123aeaed79a9998b6e54a"}, ] [package.dependencies] -google-generativeai = ">=0.3.1,<0.4.0" +google-generativeai = ">=0.4.1,<0.5.0" langchain-core = ">=0.1,<0.2" [package.extras] @@ -3619,20 +3872,20 @@ images = ["pillow (>=10.1.0,<11.0.0)"] [[package]] name = "langchain-openai" -version = "0.0.6" +version = "0.0.5" description = "An integration package connecting OpenAI and LangChain" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain_openai-0.0.6-py3-none-any.whl", hash = "sha256:2ef040e4447a26a9d3bd45dfac9cefa00797ea58555a3d91ab4f88699eb3a005"}, - {file = "langchain_openai-0.0.6.tar.gz", hash = "sha256:f5c4ebe46f2c8635c8f0c26cc8df27700aacafea025410e418d5a080039974dd"}, + {file = "langchain_openai-0.0.5-py3-none-any.whl", hash = "sha256:93b37dfac274adad65e46d5d6e71411e00c6984bcc5e10f1d6bb58e7944dc01b"}, + {file = "langchain_openai-0.0.5.tar.gz", hash = "sha256:f317fee5b652949ad96ad7edf8ef7a044a6a3f0cc71d1e12f9d5261789fd68c4"}, ] [package.dependencies] langchain-core = ">=0.1.16,<0.2" numpy = ">=1,<2" openai = ">=1.10.0,<2.0.0" -tiktoken = ">=0.5.2,<1" +tiktoken = ">=0.5.2,<0.6.0" [[package]] name = "langchain-text-splitters" @@ -3665,15 +3918,68 @@ files = [ [package.dependencies] six = "*" +[[package]] +name = "langflow-base" +version = "0.0.17" +description = "A Python package with a built-in web application" +optional = false +python-versions = ">=3.10,<3.12" +files = [] +develop = true + +[package.dependencies] +alembic = "^1.13.0" +bcrypt = "4.0.1" +cachetools = "^5.3.1" +chromadb = "^0.4.24" +docstring-parser = "^0.15" +duckdb = "^0.9.2" +fastapi = "^0.109.0" +gunicorn = "^21.2.0" +httpx = "*" +jq = {version = "^1.7.0", markers = "sys_platform != \"win32\""} +langchain = "~0.1.0" +langchain-anthropic = "^0.1.4" +langchain-astradb = "^0.1.0" +langchain-experimental = "*" +loguru = "^0.7.1" +multiprocess = "^0.70.14" +orjson = "3.9.15" +pandas = "2.2.0" +passlib = "^1.7.4" +pillow = "^10.2.0" +platformdirs = "^4.2.0" +pydantic = "^2.5.0" +pydantic-settings = "^2.1.0" +pypdf = "^4.1.0" +python-docx = "^1.1.0" +python-jose = "^3.3.0" +python-multipart = "^0.0.7" +python-socketio = "^5.11.0" +rich = "^13.7.0" +sqlmodel = "^0.0.14" +typer = "^0.9.0" +uvicorn = "^0.27.0" +websockets = "*" + +[package.extras] +all = [] +deploy = [] +local = [] + +[package.source] +type = "directory" +url = "src/backend/base" + [[package]] name = "langfuse" -version = "2.21.1" +version = "2.21.2" description = "A client library for accessing langfuse" optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langfuse-2.21.1-py3-none-any.whl", hash = "sha256:5ef286823a4c9903e2120ad2bf0169a929d41789702535abc713e66a0d270f05"}, - {file = "langfuse-2.21.1.tar.gz", hash = "sha256:36494ea016784ac339a1a5375b88c33484e81668433956ead442d7a93c217078"}, + {file = "langfuse-2.21.2-py3-none-any.whl", hash = "sha256:bd65858e6326776f65c9b2e414e64fdea0f14402f5c784952af93346dfd489bb"}, + {file = "langfuse-2.21.2.tar.gz", hash = "sha256:eb7911aa640f020f097cb56eaa7d67f01d39f9e2bdd6226e0c5d642a87f3663c"}, ] [package.dependencies] @@ -3686,18 +3992,18 @@ wrapt = ">=1.14,<2.0" [package.extras] langchain = ["langchain (>=0.0.309)"] -llama-index = ["llama-index (>=0.10.12,<0.11.0)"] +llama-index = ["llama-index (>=0.10.12,<2.0.0)"] openai = ["openai (>=0.27.8)"] [[package]] name = "langsmith" -version = "0.1.37" +version = "0.1.39" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.37-py3-none-any.whl", hash = "sha256:2ea0375eb76d95b1cd32f57fc27a5c9c529443fbe816c0c0671d7e25e432ea37"}, - {file = "langsmith-0.1.37.tar.gz", hash = "sha256:d410491b6ff6e1f07aeb1d33fb19784f544eed5fb549b514c793ab19d8fb4b60"}, + {file = "langsmith-0.1.39-py3-none-any.whl", hash = "sha256:85c19177162585728001cb7ae91ab48ca4abe39b7bc1ff783212ac426ded222b"}, + {file = "langsmith-0.1.39.tar.gz", hash = "sha256:2aec9d2f9cc664042d2121b13da569b0902aff842c86b17b440245d57da84ec5"}, ] [package.dependencies] @@ -3722,14 +4028,40 @@ interegular = ["interegular (>=0.3.1,<0.4.0)"] nearley = ["js2py"] regex = ["regex"] +[[package]] +name = "litellm" +version = "1.34.22" +description = "Library to easily interface with LLM API providers" +optional = false +python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" +files = [ + {file = "litellm-1.34.22-py3-none-any.whl", hash = "sha256:0e573d56d762f4060c53493da4a08c48034b5bb5ba22e34517065739adfd9154"}, + {file = "litellm-1.34.22.tar.gz", hash = "sha256:ca50ede3ca8d3f9dc2765ca13cf2ff5c4e4b9afb4db222f9d7cb9ee838b6180f"}, +] + +[package.dependencies] +aiohttp = "*" +click = "*" +importlib-metadata = ">=6.8.0" +jinja2 = ">=3.1.2,<4.0.0" +openai = ">=1.0.0" +python-dotenv = ">=0.2.0" +requests = ">=2.31.0,<3.0.0" +tiktoken = ">=0.4.0" +tokenizers = "*" + +[package.extras] +extra-proxy = ["azure-identity (>=1.15.0,<2.0.0)", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "resend (>=0.8.0,<0.9.0)"] +proxy = ["PyJWT (>=2.8.0,<3.0.0)", "apscheduler (>=3.10.4,<4.0.0)", "backoff", "cryptography (>=42.0.5,<43.0.0)", "fastapi (>=0.109.1,<0.110.0)", "fastapi-sso (>=0.10.0,<0.11.0)", "gunicorn (>=21.2.0,<22.0.0)", "orjson (>=3.9.7,<4.0.0)", "python-multipart (>=0.0.9,<0.0.10)", "pyyaml (>=6.0.1,<7.0.0)", "rq", "uvicorn (>=0.22.0,<0.23.0)"] + [[package]] name = "llama-cpp-python" -version = "0.2.57" +version = "0.2.59" description = "Python bindings for the llama.cpp library" optional = true python-versions = ">=3.8" files = [ - {file = "llama_cpp_python-0.2.57.tar.gz", hash = "sha256:bd81dbc4bc03b7deca3be0496330705d4c53bba726f7c3a47d556c7ec2452304"}, + {file = "llama_cpp_python-0.2.59.tar.gz", hash = "sha256:4b19283226ab91c74c6d811d88724a6f32d9dd7d07caf9d8b897dd3372d5d4d2"}, ] [package.dependencies] @@ -3746,13 +4078,143 @@ test = ["httpx (>=0.24.1)", "pytest (>=7.4.0)", "scipy (>=1.10)"] [[package]] name = "llama-index" +version = "0.10.26" +description = "Interface between LLMs and your data" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "llama_index-0.10.26-py3-none-any.whl", hash = "sha256:25a3f8c9f63fbdaaf3d760ce59c5dad8afc5cf40431e9e6a28e02b92a820d450"}, + {file = "llama_index-0.10.26.tar.gz", hash = "sha256:5a6036bcb2449277ede3244cfa1b49d9fab5dba30ad8b212c2df92449cfa7d48"}, +] + +[package.dependencies] +llama-index-agent-openai = ">=0.1.4,<0.3.0" +llama-index-cli = ">=0.1.2,<0.2.0" +llama-index-core = ">=0.10.26,<0.11.0" +llama-index-embeddings-openai = ">=0.1.5,<0.2.0" +llama-index-indices-managed-llama-cloud = ">=0.1.2,<0.2.0" +llama-index-legacy = ">=0.9.48,<0.10.0" +llama-index-llms-openai = ">=0.1.13,<0.2.0" +llama-index-multi-modal-llms-openai = ">=0.1.3,<0.2.0" +llama-index-program-openai = ">=0.1.3,<0.2.0" +llama-index-question-gen-openai = ">=0.1.2,<0.2.0" +llama-index-readers-file = ">=0.1.4,<0.2.0" +llama-index-readers-llama-parse = ">=0.1.2,<0.2.0" + +[[package]] +name = "llama-index-agent-openai" +version = "0.2.2" +description = "llama-index agent openai integration" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "llama_index_agent_openai-0.2.2-py3-none-any.whl", hash = "sha256:fa8cbc2c7be5a465848f8d5b432db01c55f07dfa06357edb7fb77fb17d534d1e"}, + {file = "llama_index_agent_openai-0.2.2.tar.gz", hash = "sha256:12063dd932c74015796f973986cc52d783f51fda38e4ead72a56d0fd195925ee"}, +] + +[package.dependencies] +llama-index-core = ">=0.10.1,<0.11.0" +llama-index-llms-openai = ">=0.1.5,<0.2.0" +openai = ">=1.14.0" + +[[package]] +name = "llama-index-cli" +version = "0.1.11" +description = "llama-index cli" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "llama_index_cli-0.1.11-py3-none-any.whl", hash = "sha256:44bc32af6d9bc0b523ad284f24fa1ec17288491243fe6d7c7b4770b3245dbb84"}, + {file = "llama_index_cli-0.1.11.tar.gz", hash = "sha256:5de707e125aa877d70c61da70cc44fea72a9f7adb77f413b51f73b1deffdd750"}, +] + +[package.dependencies] +llama-index-core = ">=0.10.11.post1,<0.11.0" +llama-index-embeddings-openai = ">=0.1.1,<0.2.0" +llama-index-llms-openai = ">=0.1.1,<0.2.0" + +[[package]] +name = "llama-index-core" +version = "0.10.26" +description = "Interface between LLMs and your data" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "llama_index_core-0.10.26-py3-none-any.whl", hash = "sha256:e08886b9d353fb45f63d0011e78326627f1ee5c7761585fdd85a9a0dfdbf0365"}, + {file = "llama_index_core-0.10.26.tar.gz", hash = "sha256:8ea27093a053aed79b7b5d80694152066ddaa5b70116308d48db382c76367a87"}, +] + +[package.dependencies] +aiohttp = ">=3.8.6,<4.0.0" +dataclasses-json = "*" +deprecated = ">=1.2.9.3" +dirtyjson = ">=1.0.8,<2.0.0" +fsspec = ">=2023.5.0" +httpx = "*" +llamaindex-py-client = ">=0.1.15,<0.2.0" +nest-asyncio = ">=1.5.8,<2.0.0" +networkx = ">=3.0" +nltk = ">=3.8.1,<4.0.0" +numpy = "*" +openai = ">=1.1.0" +pandas = "*" +pillow = ">=9.0.0" +PyYAML = ">=6.0.1" +requests = ">=2.31.0" +SQLAlchemy = {version = ">=1.4.49", extras = ["asyncio"]} +tenacity = ">=8.2.0,<9.0.0" +tiktoken = ">=0.3.3" +tqdm = ">=4.66.1,<5.0.0" +typing-extensions = ">=4.5.0" +typing-inspect = ">=0.8.0" +wrapt = "*" + +[package.extras] +gradientai = ["gradientai (>=1.4.0)"] +html = ["beautifulsoup4 (>=4.12.2,<5.0.0)"] +langchain = ["langchain (>=0.0.303)"] +local-models = ["optimum[onnxruntime] (>=1.13.2,<2.0.0)", "sentencepiece (>=0.1.99,<0.2.0)", "transformers[torch] (>=4.33.1,<5.0.0)"] +postgres = ["asyncpg (>=0.29.0,<0.30.0)", "pgvector (>=0.2.4,<0.3.0)", "psycopg2-binary (>=2.9.9,<3.0.0)"] +query-tools = ["guidance (>=0.0.64,<0.0.65)", "jsonpath-ng (>=1.6.0,<2.0.0)", "lm-format-enforcer (>=0.4.3,<0.5.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "scikit-learn", "spacy (>=3.7.1,<4.0.0)"] + +[[package]] +name = "llama-index-embeddings-openai" +version = "0.1.7" +description = "llama-index embeddings openai integration" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "llama_index_embeddings_openai-0.1.7-py3-none-any.whl", hash = "sha256:6023925ed1487b0688323d21711efbf8880e82ed3b87ef413255c3dc63a2f2fe"}, + {file = "llama_index_embeddings_openai-0.1.7.tar.gz", hash = "sha256:c71cc9820680c4cedfc9845dc87b94f6851d1ccce1e486fc91298f8fa8d9f27d"}, +] + +[package.dependencies] +llama-index-core = ">=0.10.1,<0.11.0" + +[[package]] +name = "llama-index-indices-managed-llama-cloud" +version = "0.1.5" +description = "llama-index indices llama-cloud integration" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "llama_index_indices_managed_llama_cloud-0.1.5-py3-none-any.whl", hash = "sha256:79f636cb6f4fabb12fec153564110f7f4dfda3cacb087793a5fec988484d7d2c"}, + {file = "llama_index_indices_managed_llama_cloud-0.1.5.tar.gz", hash = "sha256:47cdde9f06bbddd508f0efcf41de425e85171ac2c8fda8a5fb2a89673e1c8c71"}, +] + +[package.dependencies] +llama-index-core = ">=0.10.0,<0.11.0" +llamaindex-py-client = ">=0.1.13,<0.2.0" + +[[package]] +name = "llama-index-legacy" version = "0.9.48" description = "Interface between LLMs and your data" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "llama_index-0.9.48-py3-none-any.whl", hash = "sha256:56aa406d39e7ca53a5d990b55d69901fbb9eddc9af6a40950367dc5d734f6283"}, - {file = "llama_index-0.9.48.tar.gz", hash = "sha256:c50d02ac8c7e4ff9fb41f0860391fe0020ad8a3d7c30048db52d17d8be654bf3"}, + {file = "llama_index_legacy-0.9.48-py3-none-any.whl", hash = "sha256:714ada95beac179b4acefa4d2deff74bb7b2f22b0f699ac247d4cb67738d16d4"}, + {file = "llama_index_legacy-0.9.48.tar.gz", hash = "sha256:82ddc4691edbf49533d65582c249ba22c03fe96fbd3e92f7758dccef28e43834"}, ] [package.dependencies] @@ -3783,6 +4245,129 @@ local-models = ["optimum[onnxruntime] (>=1.13.2,<2.0.0)", "sentencepiece (>=0.1. postgres = ["asyncpg (>=0.28.0,<0.29.0)", "pgvector (>=0.1.0,<0.2.0)", "psycopg2-binary (>=2.9.9,<3.0.0)"] query-tools = ["guidance (>=0.0.64,<0.0.65)", "jsonpath-ng (>=1.6.0,<2.0.0)", "lm-format-enforcer (>=0.4.3,<0.5.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "scikit-learn", "spacy (>=3.7.1,<4.0.0)"] +[[package]] +name = "llama-index-llms-openai" +version = "0.1.14" +description = "llama-index llms openai integration" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "llama_index_llms_openai-0.1.14-py3-none-any.whl", hash = "sha256:13cec467962a6ccb9e63451c7febe8e9c2ed536bd6b1058239c2b4fd86776060"}, + {file = "llama_index_llms_openai-0.1.14.tar.gz", hash = "sha256:7eba66882ae84fa42b188941234b84267c48e449ef214f511756dcad3f9f0b62"}, +] + +[package.dependencies] +llama-index-core = ">=0.10.24,<0.11.0" + +[[package]] +name = "llama-index-multi-modal-llms-openai" +version = "0.1.4" +description = "llama-index multi-modal-llms openai integration" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "llama_index_multi_modal_llms_openai-0.1.4-py3-none-any.whl", hash = "sha256:03b887d110551d5d5b99b9fd110824e6311f2e31f4d5e67dafd2ee66da32818d"}, + {file = "llama_index_multi_modal_llms_openai-0.1.4.tar.gz", hash = "sha256:6a5d6584c33a9d1b06cf5c874c63af2603fc93b660bde481a8c547e876c6e2c3"}, +] + +[package.dependencies] +llama-index-core = ">=0.10.1,<0.11.0" +llama-index-llms-openai = ">=0.1.1,<0.2.0" + +[[package]] +name = "llama-index-program-openai" +version = "0.1.5" +description = "llama-index program openai integration" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "llama_index_program_openai-0.1.5-py3-none-any.whl", hash = "sha256:20b6efa706ac73e4dc5086900fea1ffcb1eb0787c8a6f081669d37da7235aee0"}, + {file = "llama_index_program_openai-0.1.5.tar.gz", hash = "sha256:c33aa2d2876ad0ff1f9a2a755d4e7d4917240847d0174e7b2d0b8474499bb700"}, +] + +[package.dependencies] +llama-index-agent-openai = ">=0.1.1,<0.3.0" +llama-index-core = ">=0.10.1,<0.11.0" +llama-index-llms-openai = ">=0.1.1,<0.2.0" + +[[package]] +name = "llama-index-question-gen-openai" +version = "0.1.3" +description = "llama-index question_gen openai integration" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "llama_index_question_gen_openai-0.1.3-py3-none-any.whl", hash = "sha256:1f83b49e8b2e665030d1ec8c54687d6985d9fa8426147b64e46628a9e489b302"}, + {file = "llama_index_question_gen_openai-0.1.3.tar.gz", hash = "sha256:4486198117a45457d2e036ae60b93af58052893cc7d78fa9b6f47dd47b81e2e1"}, +] + +[package.dependencies] +llama-index-core = ">=0.10.1,<0.11.0" +llama-index-llms-openai = ">=0.1.1,<0.2.0" +llama-index-program-openai = ">=0.1.1,<0.2.0" + +[[package]] +name = "llama-index-readers-file" +version = "0.1.13" +description = "llama-index readers file integration" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "llama_index_readers_file-0.1.13-py3-none-any.whl", hash = "sha256:692988b8c3ca2807d21a171351078b634ada2ff1682ebe4a484f82da384dfc55"}, + {file = "llama_index_readers_file-0.1.13.tar.gz", hash = "sha256:830f06ec7b34437fc3bb5f268d235c5c7640296adb148d8f92277dceb7f0846d"}, +] + +[package.dependencies] +beautifulsoup4 = ">=4.12.3,<5.0.0" +llama-index-core = ">=0.10.1,<0.11.0" +pymupdf = ">=1.23.21,<2.0.0" +pypdf = ">=4.0.1,<5.0.0" +striprtf = ">=0.0.26,<0.0.27" + +[[package]] +name = "llama-index-readers-llama-parse" +version = "0.1.4" +description = "llama-index readers llama-parse integration" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "llama_index_readers_llama_parse-0.1.4-py3-none-any.whl", hash = "sha256:c4914b37d12cceee56fbd185cca80f87d60acbf8ea7a73f9719610180be1fcdd"}, + {file = "llama_index_readers_llama_parse-0.1.4.tar.gz", hash = "sha256:78608b193c818894aefeee0aa303f02b7f80f2e4caf13866c2fd3b0b1023e2c0"}, +] + +[package.dependencies] +llama-index-core = ">=0.10.7,<0.11.0" +llama-parse = ">=0.4.0,<0.5.0" + +[[package]] +name = "llama-parse" +version = "0.4.0" +description = "Parse files into RAG-Optimized formats." +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "llama_parse-0.4.0-py3-none-any.whl", hash = "sha256:52b450c9bb94746c5f86867f54fd2e74670d89b385dcb879eb93490a42fda628"}, + {file = "llama_parse-0.4.0.tar.gz", hash = "sha256:ee521e9422db48dbdf00303cb6e90f1066131960b5ca4e14f5b5a6db6b7917e2"}, +] + +[package.dependencies] +llama-index-core = ">=0.10.7" + +[[package]] +name = "llamaindex-py-client" +version = "0.1.16" +description = "" +optional = false +python-versions = "<4.0,>=3.8" +files = [ + {file = "llamaindex_py_client-0.1.16-py3-none-any.whl", hash = "sha256:b34e0a14984468f46ff5eebfe4b2b88598a24ff9459338a5621eee78e58bf0db"}, + {file = "llamaindex_py_client-0.1.16.tar.gz", hash = "sha256:e99bbc0855e6caaa75eba219cdb3cf6c943ae94fa15ccbb68a3a08d452fd6380"}, +] + +[package.dependencies] +httpx = ">=0.20.0" +pydantic = ">=1.10" + [[package]] name = "locust" version = "2.24.1" @@ -3830,111 +4415,173 @@ dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptio [[package]] name = "lxml" -version = "4.9.4" +version = "5.2.1" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" +python-versions = ">=3.6" files = [ - {file = "lxml-4.9.4-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e214025e23db238805a600f1f37bf9f9a15413c7bf5f9d6ae194f84980c78722"}, - {file = "lxml-4.9.4-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ec53a09aee61d45e7dbe7e91252ff0491b6b5fee3d85b2d45b173d8ab453efc1"}, - {file = "lxml-4.9.4-cp27-cp27m-win32.whl", hash = "sha256:7d1d6c9e74c70ddf524e3c09d9dc0522aba9370708c2cb58680ea40174800013"}, - {file = "lxml-4.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:cb53669442895763e61df5c995f0e8361b61662f26c1b04ee82899c2789c8f69"}, - {file = "lxml-4.9.4-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:647bfe88b1997d7ae8d45dabc7c868d8cb0c8412a6e730a7651050b8c7289cf2"}, - {file = "lxml-4.9.4-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4d973729ce04784906a19108054e1fd476bc85279a403ea1a72fdb051c76fa48"}, - {file = "lxml-4.9.4-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:056a17eaaf3da87a05523472ae84246f87ac2f29a53306466c22e60282e54ff8"}, - {file = "lxml-4.9.4-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:aaa5c173a26960fe67daa69aa93d6d6a1cd714a6eb13802d4e4bd1d24a530644"}, - {file = "lxml-4.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:647459b23594f370c1c01768edaa0ba0959afc39caeeb793b43158bb9bb6a663"}, - {file = "lxml-4.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:bdd9abccd0927673cffe601d2c6cdad1c9321bf3437a2f507d6b037ef91ea307"}, - {file = "lxml-4.9.4-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:00e91573183ad273e242db5585b52670eddf92bacad095ce25c1e682da14ed91"}, - {file = "lxml-4.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a602ed9bd2c7d85bd58592c28e101bd9ff9c718fbde06545a70945ffd5d11868"}, - {file = "lxml-4.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:de362ac8bc962408ad8fae28f3967ce1a262b5d63ab8cefb42662566737f1dc7"}, - {file = "lxml-4.9.4-cp310-cp310-win32.whl", hash = "sha256:33714fcf5af4ff7e70a49731a7cc8fd9ce910b9ac194f66eaa18c3cc0a4c02be"}, - {file = "lxml-4.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:d3caa09e613ece43ac292fbed513a4bce170681a447d25ffcbc1b647d45a39c5"}, - {file = "lxml-4.9.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:359a8b09d712df27849e0bcb62c6a3404e780b274b0b7e4c39a88826d1926c28"}, - {file = "lxml-4.9.4-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:43498ea734ccdfb92e1886dfedaebeb81178a241d39a79d5351ba2b671bff2b2"}, - {file = "lxml-4.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4855161013dfb2b762e02b3f4d4a21cc7c6aec13c69e3bffbf5022b3e708dd97"}, - {file = "lxml-4.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c71b5b860c5215fdbaa56f715bc218e45a98477f816b46cfde4a84d25b13274e"}, - {file = "lxml-4.9.4-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9a2b5915c333e4364367140443b59f09feae42184459b913f0f41b9fed55794a"}, - {file = "lxml-4.9.4-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d82411dbf4d3127b6cde7da0f9373e37ad3a43e89ef374965465928f01c2b979"}, - {file = "lxml-4.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:273473d34462ae6e97c0f4e517bd1bf9588aa67a1d47d93f760a1282640e24ac"}, - {file = "lxml-4.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:389d2b2e543b27962990ab529ac6720c3dded588cc6d0f6557eec153305a3622"}, - {file = "lxml-4.9.4-cp311-cp311-win32.whl", hash = "sha256:8aecb5a7f6f7f8fe9cac0bcadd39efaca8bbf8d1bf242e9f175cbe4c925116c3"}, - {file = "lxml-4.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:c7721a3ef41591341388bb2265395ce522aba52f969d33dacd822da8f018aff8"}, - {file = "lxml-4.9.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:dbcb2dc07308453db428a95a4d03259bd8caea97d7f0776842299f2d00c72fc8"}, - {file = "lxml-4.9.4-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:01bf1df1db327e748dcb152d17389cf6d0a8c5d533ef9bab781e9d5037619229"}, - {file = "lxml-4.9.4-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e8f9f93a23634cfafbad6e46ad7d09e0f4a25a2400e4a64b1b7b7c0fbaa06d9d"}, - {file = "lxml-4.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3f3f00a9061605725df1816f5713d10cd94636347ed651abdbc75828df302b20"}, - {file = "lxml-4.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:953dd5481bd6252bd480d6ec431f61d7d87fdcbbb71b0d2bdcfc6ae00bb6fb10"}, - {file = "lxml-4.9.4-cp312-cp312-win32.whl", hash = "sha256:266f655d1baff9c47b52f529b5f6bec33f66042f65f7c56adde3fcf2ed62ae8b"}, - {file = "lxml-4.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:f1faee2a831fe249e1bae9cbc68d3cd8a30f7e37851deee4d7962b17c410dd56"}, - {file = "lxml-4.9.4-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:23d891e5bdc12e2e506e7d225d6aa929e0a0368c9916c1fddefab88166e98b20"}, - {file = "lxml-4.9.4-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e96a1788f24d03e8d61679f9881a883ecdf9c445a38f9ae3f3f193ab6c591c66"}, - {file = "lxml-4.9.4-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:5557461f83bb7cc718bc9ee1f7156d50e31747e5b38d79cf40f79ab1447afd2d"}, - {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:fdb325b7fba1e2c40b9b1db407f85642e32404131c08480dd652110fc908561b"}, - {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d74d4a3c4b8f7a1f676cedf8e84bcc57705a6d7925e6daef7a1e54ae543a197"}, - {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ac7674d1638df129d9cb4503d20ffc3922bd463c865ef3cb412f2c926108e9a4"}, - {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:ddd92e18b783aeb86ad2132d84a4b795fc5ec612e3545c1b687e7747e66e2b53"}, - {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bd9ac6e44f2db368ef8986f3989a4cad3de4cd55dbdda536e253000c801bcc7"}, - {file = "lxml-4.9.4-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bc354b1393dce46026ab13075f77b30e40b61b1a53e852e99d3cc5dd1af4bc85"}, - {file = "lxml-4.9.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f836f39678cb47c9541f04d8ed4545719dc31ad850bf1832d6b4171e30d65d23"}, - {file = "lxml-4.9.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:9c131447768ed7bc05a02553d939e7f0e807e533441901dd504e217b76307745"}, - {file = "lxml-4.9.4-cp36-cp36m-win32.whl", hash = "sha256:bafa65e3acae612a7799ada439bd202403414ebe23f52e5b17f6ffc2eb98c2be"}, - {file = "lxml-4.9.4-cp36-cp36m-win_amd64.whl", hash = "sha256:6197c3f3c0b960ad033b9b7d611db11285bb461fc6b802c1dd50d04ad715c225"}, - {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:7b378847a09d6bd46047f5f3599cdc64fcb4cc5a5a2dd0a2af610361fbe77b16"}, - {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:1343df4e2e6e51182aad12162b23b0a4b3fd77f17527a78c53f0f23573663545"}, - {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6dbdacf5752fbd78ccdb434698230c4f0f95df7dd956d5f205b5ed6911a1367c"}, - {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:506becdf2ecaebaf7f7995f776394fcc8bd8a78022772de66677c84fb02dd33d"}, - {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca8e44b5ba3edb682ea4e6185b49661fc22b230cf811b9c13963c9f982d1d964"}, - {file = "lxml-4.9.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9d9d5726474cbbef279fd709008f91a49c4f758bec9c062dfbba88eab00e3ff9"}, - {file = "lxml-4.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:bbdd69e20fe2943b51e2841fc1e6a3c1de460d630f65bde12452d8c97209464d"}, - {file = "lxml-4.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8671622256a0859f5089cbe0ce4693c2af407bc053dcc99aadff7f5310b4aa02"}, - {file = "lxml-4.9.4-cp37-cp37m-win32.whl", hash = "sha256:dd4fda67f5faaef4f9ee5383435048ee3e11ad996901225ad7615bc92245bc8e"}, - {file = "lxml-4.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6bee9c2e501d835f91460b2c904bc359f8433e96799f5c2ff20feebd9bb1e590"}, - {file = "lxml-4.9.4-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:1f10f250430a4caf84115b1e0f23f3615566ca2369d1962f82bef40dd99cd81a"}, - {file = "lxml-4.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:3b505f2bbff50d261176e67be24e8909e54b5d9d08b12d4946344066d66b3e43"}, - {file = "lxml-4.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1449f9451cd53e0fd0a7ec2ff5ede4686add13ac7a7bfa6988ff6d75cff3ebe2"}, - {file = "lxml-4.9.4-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:4ece9cca4cd1c8ba889bfa67eae7f21d0d1a2e715b4d5045395113361e8c533d"}, - {file = "lxml-4.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59bb5979f9941c61e907ee571732219fa4774d5a18f3fa5ff2df963f5dfaa6bc"}, - {file = "lxml-4.9.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b1980dbcaad634fe78e710c8587383e6e3f61dbe146bcbfd13a9c8ab2d7b1192"}, - {file = "lxml-4.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9ae6c3363261021144121427b1552b29e7b59de9d6a75bf51e03bc072efb3c37"}, - {file = "lxml-4.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bcee502c649fa6351b44bb014b98c09cb00982a475a1912a9881ca28ab4f9cd9"}, - {file = "lxml-4.9.4-cp38-cp38-win32.whl", hash = "sha256:a8edae5253efa75c2fc79a90068fe540b197d1c7ab5803b800fccfe240eed33c"}, - {file = "lxml-4.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:701847a7aaefef121c5c0d855b2affa5f9bd45196ef00266724a80e439220e46"}, - {file = "lxml-4.9.4-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:f610d980e3fccf4394ab3806de6065682982f3d27c12d4ce3ee46a8183d64a6a"}, - {file = "lxml-4.9.4-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:aa9b5abd07f71b081a33115d9758ef6077924082055005808f68feccb27616bd"}, - {file = "lxml-4.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:365005e8b0718ea6d64b374423e870648ab47c3a905356ab6e5a5ff03962b9a9"}, - {file = "lxml-4.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:16b9ec51cc2feab009e800f2c6327338d6ee4e752c76e95a35c4465e80390ccd"}, - {file = "lxml-4.9.4-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a905affe76f1802edcac554e3ccf68188bea16546071d7583fb1b693f9cf756b"}, - {file = "lxml-4.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fd814847901df6e8de13ce69b84c31fc9b3fb591224d6762d0b256d510cbf382"}, - {file = "lxml-4.9.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:91bbf398ac8bb7d65a5a52127407c05f75a18d7015a270fdd94bbcb04e65d573"}, - {file = "lxml-4.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f99768232f036b4776ce419d3244a04fe83784bce871b16d2c2e984c7fcea847"}, - {file = "lxml-4.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bb5bd6212eb0edfd1e8f254585290ea1dadc3687dd8fd5e2fd9a87c31915cdab"}, - {file = "lxml-4.9.4-cp39-cp39-win32.whl", hash = "sha256:88f7c383071981c74ec1998ba9b437659e4fd02a3c4a4d3efc16774eb108d0ec"}, - {file = "lxml-4.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:936e8880cc00f839aa4173f94466a8406a96ddce814651075f95837316369899"}, - {file = "lxml-4.9.4-pp310-pypy310_pp73-macosx_11_0_x86_64.whl", hash = "sha256:f6c35b2f87c004270fa2e703b872fcc984d714d430b305145c39d53074e1ffe0"}, - {file = "lxml-4.9.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:606d445feeb0856c2b424405236a01c71af7c97e5fe42fbc778634faef2b47e4"}, - {file = "lxml-4.9.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a1bdcbebd4e13446a14de4dd1825f1e778e099f17f79718b4aeaf2403624b0f7"}, - {file = "lxml-4.9.4-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0a08c89b23117049ba171bf51d2f9c5f3abf507d65d016d6e0fa2f37e18c0fc5"}, - {file = "lxml-4.9.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:232fd30903d3123be4c435fb5159938c6225ee8607b635a4d3fca847003134ba"}, - {file = "lxml-4.9.4-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:231142459d32779b209aa4b4d460b175cadd604fed856f25c1571a9d78114771"}, - {file = "lxml-4.9.4-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:520486f27f1d4ce9654154b4494cf9307b495527f3a2908ad4cb48e4f7ed7ef7"}, - {file = "lxml-4.9.4-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:562778586949be7e0d7435fcb24aca4810913771f845d99145a6cee64d5b67ca"}, - {file = "lxml-4.9.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a9e7c6d89c77bb2770c9491d988f26a4b161d05c8ca58f63fb1f1b6b9a74be45"}, - {file = "lxml-4.9.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:786d6b57026e7e04d184313c1359ac3d68002c33e4b1042ca58c362f1d09ff58"}, - {file = "lxml-4.9.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95ae6c5a196e2f239150aa4a479967351df7f44800c93e5a975ec726fef005e2"}, - {file = "lxml-4.9.4-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:9b556596c49fa1232b0fff4b0e69b9d4083a502e60e404b44341e2f8fb7187f5"}, - {file = "lxml-4.9.4-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:cc02c06e9e320869d7d1bd323df6dd4281e78ac2e7f8526835d3d48c69060683"}, - {file = "lxml-4.9.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:857d6565f9aa3464764c2cb6a2e3c2e75e1970e877c188f4aeae45954a314e0c"}, - {file = "lxml-4.9.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c42ae7e010d7d6bc51875d768110c10e8a59494855c3d4c348b068f5fb81fdcd"}, - {file = "lxml-4.9.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f10250bb190fb0742e3e1958dd5c100524c2cc5096c67c8da51233f7448dc137"}, - {file = "lxml-4.9.4.tar.gz", hash = "sha256:b1541e50b78e15fa06a2670157a1962ef06591d4c998b998047fff5e3236880e"}, + {file = "lxml-5.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1f7785f4f789fdb522729ae465adcaa099e2a3441519df750ebdccc481d961a1"}, + {file = "lxml-5.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cc6ee342fb7fa2471bd9b6d6fdfc78925a697bf5c2bcd0a302e98b0d35bfad3"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:794f04eec78f1d0e35d9e0c36cbbb22e42d370dda1609fb03bcd7aeb458c6377"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817d420c60a5183953c783b0547d9eb43b7b344a2c46f69513d5952a78cddf3"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2213afee476546a7f37c7a9b4ad4d74b1e112a6fafffc9185d6d21f043128c81"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b070bbe8d3f0f6147689bed981d19bbb33070225373338df755a46893528104a"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e02c5175f63effbd7c5e590399c118d5db6183bbfe8e0d118bdb5c2d1b48d937"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:3dc773b2861b37b41a6136e0b72a1a44689a9c4c101e0cddb6b854016acc0aa8"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:d7520db34088c96cc0e0a3ad51a4fd5b401f279ee112aa2b7f8f976d8582606d"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:bcbf4af004f98793a95355980764b3d80d47117678118a44a80b721c9913436a"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2b44bec7adf3e9305ce6cbfa47a4395667e744097faed97abb4728748ba7d47"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1c5bb205e9212d0ebddf946bc07e73fa245c864a5f90f341d11ce7b0b854475d"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2c9d147f754b1b0e723e6afb7ba1566ecb162fe4ea657f53d2139bbf894d050a"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3545039fa4779be2df51d6395e91a810f57122290864918b172d5dc7ca5bb433"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a91481dbcddf1736c98a80b122afa0f7296eeb80b72344d7f45dc9f781551f56"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2ddfe41ddc81f29a4c44c8ce239eda5ade4e7fc305fb7311759dd6229a080052"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a7baf9ffc238e4bf401299f50e971a45bfcc10a785522541a6e3179c83eabf0a"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:31e9a882013c2f6bd2f2c974241bf4ba68c85eba943648ce88936d23209a2e01"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0a15438253b34e6362b2dc41475e7f80de76320f335e70c5528b7148cac253a1"}, + {file = "lxml-5.2.1-cp310-cp310-win32.whl", hash = "sha256:6992030d43b916407c9aa52e9673612ff39a575523c5f4cf72cdef75365709a5"}, + {file = "lxml-5.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:da052e7962ea2d5e5ef5bc0355d55007407087392cf465b7ad84ce5f3e25fe0f"}, + {file = "lxml-5.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:70ac664a48aa64e5e635ae5566f5227f2ab7f66a3990d67566d9907edcbbf867"}, + {file = "lxml-5.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1ae67b4e737cddc96c99461d2f75d218bdf7a0c3d3ad5604d1f5e7464a2f9ffe"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f18a5a84e16886898e51ab4b1d43acb3083c39b14c8caeb3589aabff0ee0b270"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6f2c8372b98208ce609c9e1d707f6918cc118fea4e2c754c9f0812c04ca116d"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:394ed3924d7a01b5bd9a0d9d946136e1c2f7b3dc337196d99e61740ed4bc6fe1"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d077bc40a1fe984e1a9931e801e42959a1e6598edc8a3223b061d30fbd26bbc"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:764b521b75701f60683500d8621841bec41a65eb739b8466000c6fdbc256c240"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:3a6b45da02336895da82b9d472cd274b22dc27a5cea1d4b793874eead23dd14f"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:5ea7b6766ac2dfe4bcac8b8595107665a18ef01f8c8343f00710b85096d1b53a"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:e196a4ff48310ba62e53a8e0f97ca2bca83cdd2fe2934d8b5cb0df0a841b193a"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:200e63525948e325d6a13a76ba2911f927ad399ef64f57898cf7c74e69b71095"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dae0ed02f6b075426accbf6b2863c3d0a7eacc1b41fb40f2251d931e50188dad"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:ab31a88a651039a07a3ae327d68ebdd8bc589b16938c09ef3f32a4b809dc96ef"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:df2e6f546c4df14bc81f9498bbc007fbb87669f1bb707c6138878c46b06f6510"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5dd1537e7cc06efd81371f5d1a992bd5ab156b2b4f88834ca852de4a8ea523fa"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9b9ec9c9978b708d488bec36b9e4c94d88fd12ccac3e62134a9d17ddba910ea9"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8e77c69d5892cb5ba71703c4057091e31ccf534bd7f129307a4d084d90d014b8"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a8d5c70e04aac1eda5c829a26d1f75c6e5286c74743133d9f742cda8e53b9c2f"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c94e75445b00319c1fad60f3c98b09cd63fe1134a8a953dcd48989ef42318534"}, + {file = "lxml-5.2.1-cp311-cp311-win32.whl", hash = "sha256:4951e4f7a5680a2db62f7f4ab2f84617674d36d2d76a729b9a8be4b59b3659be"}, + {file = "lxml-5.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:5c670c0406bdc845b474b680b9a5456c561c65cf366f8db5a60154088c92d102"}, + {file = "lxml-5.2.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:abc25c3cab9ec7fcd299b9bcb3b8d4a1231877e425c650fa1c7576c5107ab851"}, + {file = "lxml-5.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6935bbf153f9a965f1e07c2649c0849d29832487c52bb4a5c5066031d8b44fd5"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d793bebb202a6000390a5390078e945bbb49855c29c7e4d56a85901326c3b5d9"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afd5562927cdef7c4f5550374acbc117fd4ecc05b5007bdfa57cc5355864e0a4"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e7259016bc4345a31af861fdce942b77c99049d6c2107ca07dc2bba2435c1d9"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:530e7c04f72002d2f334d5257c8a51bf409db0316feee7c87e4385043be136af"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59689a75ba8d7ffca577aefd017d08d659d86ad4585ccc73e43edbfc7476781a"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f9737bf36262046213a28e789cc82d82c6ef19c85a0cf05e75c670a33342ac2c"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:3a74c4f27167cb95c1d4af1c0b59e88b7f3e0182138db2501c353555f7ec57f4"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:68a2610dbe138fa8c5826b3f6d98a7cfc29707b850ddcc3e21910a6fe51f6ca0"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f0a1bc63a465b6d72569a9bba9f2ef0334c4e03958e043da1920299100bc7c08"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c2d35a1d047efd68027817b32ab1586c1169e60ca02c65d428ae815b593e65d4"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:79bd05260359170f78b181b59ce871673ed01ba048deef4bf49a36ab3e72e80b"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:865bad62df277c04beed9478fe665b9ef63eb28fe026d5dedcb89b537d2e2ea6"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:44f6c7caff88d988db017b9b0e4ab04934f11e3e72d478031efc7edcac6c622f"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71e97313406ccf55d32cc98a533ee05c61e15d11b99215b237346171c179c0b0"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:057cdc6b86ab732cf361f8b4d8af87cf195a1f6dc5b0ff3de2dced242c2015e0"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f3bbbc998d42f8e561f347e798b85513ba4da324c2b3f9b7969e9c45b10f6169"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:491755202eb21a5e350dae00c6d9a17247769c64dcf62d8c788b5c135e179dc4"}, + {file = "lxml-5.2.1-cp312-cp312-win32.whl", hash = "sha256:8de8f9d6caa7f25b204fc861718815d41cbcf27ee8f028c89c882a0cf4ae4134"}, + {file = "lxml-5.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:f2a9efc53d5b714b8df2b4b3e992accf8ce5bbdfe544d74d5c6766c9e1146a3a"}, + {file = "lxml-5.2.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:70a9768e1b9d79edca17890175ba915654ee1725975d69ab64813dd785a2bd5c"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c38d7b9a690b090de999835f0443d8aa93ce5f2064035dfc48f27f02b4afc3d0"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5670fb70a828663cc37552a2a85bf2ac38475572b0e9b91283dc09efb52c41d1"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:958244ad566c3ffc385f47dddde4145088a0ab893504b54b52c041987a8c1863"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2a66bf12fbd4666dd023b6f51223aed3d9f3b40fef06ce404cb75bafd3d89536"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:9123716666e25b7b71c4e1789ec829ed18663152008b58544d95b008ed9e21e9"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:0c3f67e2aeda739d1cc0b1102c9a9129f7dc83901226cc24dd72ba275ced4218"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5d5792e9b3fb8d16a19f46aa8208987cfeafe082363ee2745ea8b643d9cc5b45"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:88e22fc0a6684337d25c994381ed8a1580a6f5ebebd5ad41f89f663ff4ec2885"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:21c2e6b09565ba5b45ae161b438e033a86ad1736b8c838c766146eff8ceffff9"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_s390x.whl", hash = "sha256:afbbdb120d1e78d2ba8064a68058001b871154cc57787031b645c9142b937a62"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:627402ad8dea044dde2eccde4370560a2b750ef894c9578e1d4f8ffd54000461"}, + {file = "lxml-5.2.1-cp36-cp36m-win32.whl", hash = "sha256:e89580a581bf478d8dcb97d9cd011d567768e8bc4095f8557b21c4d4c5fea7d0"}, + {file = "lxml-5.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:59565f10607c244bc4c05c0c5fa0c190c990996e0c719d05deec7030c2aa8289"}, + {file = "lxml-5.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:857500f88b17a6479202ff5fe5f580fc3404922cd02ab3716197adf1ef628029"}, + {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56c22432809085b3f3ae04e6e7bdd36883d7258fcd90e53ba7b2e463efc7a6af"}, + {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a55ee573116ba208932e2d1a037cc4b10d2c1cb264ced2184d00b18ce585b2c0"}, + {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:6cf58416653c5901e12624e4013708b6e11142956e7f35e7a83f1ab02f3fe456"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:64c2baa7774bc22dd4474248ba16fe1a7f611c13ac6123408694d4cc93d66dbd"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:74b28c6334cca4dd704e8004cba1955af0b778cf449142e581e404bd211fb619"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7221d49259aa1e5a8f00d3d28b1e0b76031655ca74bb287123ef56c3db92f213"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3dbe858ee582cbb2c6294dc85f55b5f19c918c2597855e950f34b660f1a5ede6"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:04ab5415bf6c86e0518d57240a96c4d1fcfc3cb370bb2ac2a732b67f579e5a04"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:6ab833e4735a7e5533711a6ea2df26459b96f9eec36d23f74cafe03631647c41"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f443cdef978430887ed55112b491f670bba6462cea7a7742ff8f14b7abb98d75"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9e2addd2d1866fe112bc6f80117bcc6bc25191c5ed1bfbcf9f1386a884252ae8"}, + {file = "lxml-5.2.1-cp37-cp37m-win32.whl", hash = "sha256:f51969bac61441fd31f028d7b3b45962f3ecebf691a510495e5d2cd8c8092dbd"}, + {file = "lxml-5.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b0b58fbfa1bf7367dde8a557994e3b1637294be6cf2169810375caf8571a085c"}, + {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3e183c6e3298a2ed5af9d7a356ea823bccaab4ec2349dc9ed83999fd289d14d5"}, + {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:804f74efe22b6a227306dd890eecc4f8c59ff25ca35f1f14e7482bbce96ef10b"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08802f0c56ed150cc6885ae0788a321b73505d2263ee56dad84d200cab11c07a"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8c09ed18ecb4ebf23e02b8e7a22a05d6411911e6fabef3a36e4f371f4f2585"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3d30321949861404323c50aebeb1943461a67cd51d4200ab02babc58bd06a86"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:b560e3aa4b1d49e0e6c847d72665384db35b2f5d45f8e6a5c0072e0283430533"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:058a1308914f20784c9f4674036527e7c04f7be6fb60f5d61353545aa7fcb739"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:adfb84ca6b87e06bc6b146dc7da7623395db1e31621c4785ad0658c5028b37d7"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:417d14450f06d51f363e41cace6488519038f940676ce9664b34ebf5653433a5"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a2dfe7e2473f9b59496247aad6e23b405ddf2e12ef0765677b0081c02d6c2c0b"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bf2e2458345d9bffb0d9ec16557d8858c9c88d2d11fed53998512504cd9df49b"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:58278b29cb89f3e43ff3e0c756abbd1518f3ee6adad9e35b51fb101c1c1daaec"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:64641a6068a16201366476731301441ce93457eb8452056f570133a6ceb15fca"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:78bfa756eab503673991bdcf464917ef7845a964903d3302c5f68417ecdc948c"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:11a04306fcba10cd9637e669fd73aa274c1c09ca64af79c041aa820ea992b637"}, + {file = "lxml-5.2.1-cp38-cp38-win32.whl", hash = "sha256:66bc5eb8a323ed9894f8fa0ee6cb3e3fb2403d99aee635078fd19a8bc7a5a5da"}, + {file = "lxml-5.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:9676bfc686fa6a3fa10cd4ae6b76cae8be26eb5ec6811d2a325636c460da1806"}, + {file = "lxml-5.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cf22b41fdae514ee2f1691b6c3cdeae666d8b7fa9434de445f12bbeee0cf48dd"}, + {file = "lxml-5.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ec42088248c596dbd61d4ae8a5b004f97a4d91a9fd286f632e42e60b706718d7"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd53553ddad4a9c2f1f022756ae64abe16da1feb497edf4d9f87f99ec7cf86bd"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feaa45c0eae424d3e90d78823f3828e7dc42a42f21ed420db98da2c4ecf0a2cb"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddc678fb4c7e30cf830a2b5a8d869538bc55b28d6c68544d09c7d0d8f17694dc"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:853e074d4931dbcba7480d4dcab23d5c56bd9607f92825ab80ee2bd916edea53"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc4691d60512798304acb9207987e7b2b7c44627ea88b9d77489bbe3e6cc3bd4"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:beb72935a941965c52990f3a32d7f07ce869fe21c6af8b34bf6a277b33a345d3"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:6588c459c5627fefa30139be4d2e28a2c2a1d0d1c265aad2ba1935a7863a4913"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:588008b8497667f1ddca7c99f2f85ce8511f8f7871b4a06ceede68ab62dff64b"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6787b643356111dfd4032b5bffe26d2f8331556ecb79e15dacb9275da02866e"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7c17b64b0a6ef4e5affae6a3724010a7a66bda48a62cfe0674dabd46642e8b54"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:27aa20d45c2e0b8cd05da6d4759649170e8dfc4f4e5ef33a34d06f2d79075d57"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d4f2cc7060dc3646632d7f15fe68e2fa98f58e35dd5666cd525f3b35d3fed7f8"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff46d772d5f6f73564979cd77a4fffe55c916a05f3cb70e7c9c0590059fb29ef"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:96323338e6c14e958d775700ec8a88346014a85e5de73ac7967db0367582049b"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:52421b41ac99e9d91934e4d0d0fe7da9f02bfa7536bb4431b4c05c906c8c6919"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:7a7efd5b6d3e30d81ec68ab8a88252d7c7c6f13aaa875009fe3097eb4e30b84c"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ed777c1e8c99b63037b91f9d73a6aad20fd035d77ac84afcc205225f8f41188"}, + {file = "lxml-5.2.1-cp39-cp39-win32.whl", hash = "sha256:644df54d729ef810dcd0f7732e50e5ad1bd0a135278ed8d6bcb06f33b6b6f708"}, + {file = "lxml-5.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:9ca66b8e90daca431b7ca1408cae085d025326570e57749695d6a01454790e95"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9b0ff53900566bc6325ecde9181d89afadc59c5ffa39bddf084aaedfe3b06a11"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd6037392f2d57793ab98d9e26798f44b8b4da2f2464388588f48ac52c489ea1"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b9c07e7a45bb64e21df4b6aa623cb8ba214dfb47d2027d90eac197329bb5e94"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3249cc2989d9090eeac5467e50e9ec2d40704fea9ab72f36b034ea34ee65ca98"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f42038016852ae51b4088b2862126535cc4fc85802bfe30dea3500fdfaf1864e"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:533658f8fbf056b70e434dff7e7aa611bcacb33e01f75de7f821810e48d1bb66"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:622020d4521e22fb371e15f580d153134bfb68d6a429d1342a25f051ec72df1c"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efa7b51824aa0ee957ccd5a741c73e6851de55f40d807f08069eb4c5a26b2baa"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c6ad0fbf105f6bcc9300c00010a2ffa44ea6f555df1a2ad95c88f5656104817"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e233db59c8f76630c512ab4a4daf5a5986da5c3d5b44b8e9fc742f2a24dbd460"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a014510830df1475176466b6087fc0c08b47a36714823e58d8b8d7709132a96"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:d38c8f50ecf57f0463399569aa388b232cf1a2ffb8f0a9a5412d0db57e054860"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5aea8212fb823e006b995c4dda533edcf98a893d941f173f6c9506126188860d"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff097ae562e637409b429a7ac958a20aab237a0378c42dabaa1e3abf2f896e5f"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f5d65c39f16717a47c36c756af0fb36144069c4718824b7533f803ecdf91138"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3d0c3dd24bb4605439bf91068598d00c6370684f8de4a67c2992683f6c309d6b"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e32be23d538753a8adb6c85bd539f5fd3b15cb987404327c569dfc5fd8366e85"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cc518cea79fd1e2f6c90baafa28906d4309d24f3a63e801d855e7424c5b34144"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a0af35bd8ebf84888373630f73f24e86bf016642fb8576fba49d3d6b560b7cbc"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8aca2e3a72f37bfc7b14ba96d4056244001ddcc18382bd0daa087fd2e68a354"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ca1e8188b26a819387b29c3895c47a5e618708fe6f787f3b1a471de2c4a94d9"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c8ba129e6d3b0136a0f50345b2cb3db53f6bda5dd8c7f5d83fbccba97fb5dcb5"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e998e304036198b4f6914e6a1e2b6f925208a20e2042563d9734881150c6c246"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d3be9b2076112e51b323bdf6d5a7f8a798de55fb8d95fcb64bd179460cdc0704"}, + {file = "lxml-5.2.1.tar.gz", hash = "sha256:3f7765e69bbce0906a7c74d5fe46d2c7a7596147318dbc08e4a2431f3060e306"}, ] [package.extras] cssselect = ["cssselect (>=0.7)"] +html-clean = ["lxml-html-clean"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (==0.29.37)"] +source = ["Cython (>=3.0.10)"] [[package]] name = "mako" @@ -3955,6 +4602,21 @@ babel = ["Babel"] lingua = ["lingua"] testing = ["pytest"] +[[package]] +name = "markdown" +version = "3.6" +description = "Python implementation of John Gruber's Markdown." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, + {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, +] + +[package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] +testing = ["coverage", "pyyaml"] + [[package]] name = "markdown-it-py" version = "3.0.0" @@ -4303,7 +4965,6 @@ files = [ {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, - {file = "msgpack-1.0.8-py3-none-any.whl", hash = "sha256:24f727df1e20b9876fa6e95f840a2a2651e34c0ad147676356f4bf5fbb0206ca"}, {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, ] @@ -4423,27 +5084,31 @@ files = [ [[package]] name = "multiprocess" -version = "0.70.16" +version = "0.70.15" description = "better multiprocessing and multithreading in Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "multiprocess-0.70.16-pp310-pypy310_pp73-macosx_10_13_x86_64.whl", hash = "sha256:476887be10e2f59ff183c006af746cb6f1fd0eadcfd4ef49e605cbe2659920ee"}, - {file = "multiprocess-0.70.16-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d951bed82c8f73929ac82c61f01a7b5ce8f3e5ef40f5b52553b4f547ce2b08ec"}, - {file = "multiprocess-0.70.16-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37b55f71c07e2d741374998c043b9520b626a8dddc8b3129222ca4f1a06ef67a"}, - {file = "multiprocess-0.70.16-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba8c31889abf4511c7308a8c52bb4a30b9d590e7f58523302ba00237702ca054"}, - {file = "multiprocess-0.70.16-pp39-pypy39_pp73-macosx_10_13_x86_64.whl", hash = "sha256:0dfd078c306e08d46d7a8d06fb120313d87aa43af60d66da43ffff40b44d2f41"}, - {file = "multiprocess-0.70.16-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e7b9d0f307cd9bd50851afaac0dba2cb6c44449efff697df7c7645f7d3f2be3a"}, - {file = "multiprocess-0.70.16-py310-none-any.whl", hash = "sha256:c4a9944c67bd49f823687463660a2d6daae94c289adff97e0f9d696ba6371d02"}, - {file = "multiprocess-0.70.16-py311-none-any.whl", hash = "sha256:af4cabb0dac72abfb1e794fa7855c325fd2b55a10a44628a3c1ad3311c04127a"}, - {file = "multiprocess-0.70.16-py312-none-any.whl", hash = "sha256:fc0544c531920dde3b00c29863377f87e1632601092ea2daca74e4beb40faa2e"}, - {file = "multiprocess-0.70.16-py38-none-any.whl", hash = "sha256:a71d82033454891091a226dfc319d0cfa8019a4e888ef9ca910372a446de4435"}, - {file = "multiprocess-0.70.16-py39-none-any.whl", hash = "sha256:a0bafd3ae1b732eac64be2e72038231c1ba97724b60b09400d68f229fcc2fbf3"}, - {file = "multiprocess-0.70.16.tar.gz", hash = "sha256:161af703d4652a0e1410be6abccecde4a7ddffd19341be0a7011b94aeb171ac1"}, + {file = "multiprocess-0.70.15-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:aa36c7ed16f508091438687fe9baa393a7a8e206731d321e443745e743a0d4e5"}, + {file = "multiprocess-0.70.15-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:20e024018c46d0d1602024c613007ac948f9754659e3853b0aa705e83f6931d8"}, + {file = "multiprocess-0.70.15-pp37-pypy37_pp73-manylinux_2_24_i686.whl", hash = "sha256:e576062981c91f0fe8a463c3d52506e598dfc51320a8dd8d78b987dfca91c5db"}, + {file = "multiprocess-0.70.15-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:e73f497e6696a0f5433ada2b3d599ae733b87a6e8b008e387c62ac9127add177"}, + {file = "multiprocess-0.70.15-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:73db2e7b32dcc7f9b0f075c2ffa45c90b6729d3f1805f27e88534c8d321a1be5"}, + {file = "multiprocess-0.70.15-pp38-pypy38_pp73-manylinux_2_24_i686.whl", hash = "sha256:4271647bd8a49c28ecd6eb56a7fdbd3c212c45529ad5303b40b3c65fc6928e5f"}, + {file = "multiprocess-0.70.15-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:cf981fb998d6ec3208cb14f0cf2e9e80216e834f5d51fd09ebc937c32b960902"}, + {file = "multiprocess-0.70.15-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:18f9f2c7063346d1617bd1684fdcae8d33380ae96b99427260f562e1a1228b67"}, + {file = "multiprocess-0.70.15-pp39-pypy39_pp73-manylinux_2_24_i686.whl", hash = "sha256:0eac53214d664c49a34695e5824872db4006b1a465edd7459a251809c3773370"}, + {file = "multiprocess-0.70.15-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:1a51dd34096db47fb21fa2b839e615b051d51b97af9a67afbcdaa67186b44883"}, + {file = "multiprocess-0.70.15-py310-none-any.whl", hash = "sha256:7dd58e33235e83cf09d625e55cffd7b0f0eede7ee9223cdd666a87624f60c21a"}, + {file = "multiprocess-0.70.15-py311-none-any.whl", hash = "sha256:134f89053d82c9ed3b73edd3a2531eb791e602d4f4156fc92a79259590bd9670"}, + {file = "multiprocess-0.70.15-py37-none-any.whl", hash = "sha256:f7d4a1629bccb433114c3b4885f69eccc200994323c80f6feee73b0edc9199c5"}, + {file = "multiprocess-0.70.15-py38-none-any.whl", hash = "sha256:bee9afba476c91f9ebee7beeee0601face9eff67d822e893f9a893725fbd6316"}, + {file = "multiprocess-0.70.15-py39-none-any.whl", hash = "sha256:3e0953f5d52b4c76f1c973eaf8214554d146f2be5decb48e928e55c7a2d19338"}, + {file = "multiprocess-0.70.15.tar.gz", hash = "sha256:f20eed3036c0ef477b07a4177cf7c1ba520d9a2677870a4f47fe026f0cd6787e"}, ] [package.dependencies] -dill = ">=0.3.8" +dill = ">=0.3.7" [[package]] name = "mypy" @@ -4559,44 +5224,44 @@ twitter = ["twython"] [[package]] name = "numexpr" -version = "2.9.0" +version = "2.10.0" description = "Fast numerical expression evaluator for NumPy" optional = false python-versions = ">=3.9" files = [ - {file = "numexpr-2.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c52b4ac54514f5d4d8ead66768810cd5f77aa198e6064213d9b5c7b2e1c97c35"}, - {file = "numexpr-2.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50f57bc333f285e8c46b1ce61c6e94ec9bb74e4ea0d674d1c6c6f4a286f64fe4"}, - {file = "numexpr-2.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:943ba141f3884ffafa3fa1a3ebf3cdda9e9688a67a3c91986e6eae13dc073d43"}, - {file = "numexpr-2.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee48acd6339748a65c0e32403b802ebfadd9cb0e3b602ba5889896238eafdd61"}, - {file = "numexpr-2.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:972e29b5cecc21466c5b177e38568372ab66aab1f053ae04690a49cea09e747d"}, - {file = "numexpr-2.9.0-cp310-cp310-win32.whl", hash = "sha256:520e55d75bd99c76e376b6326e35ecf44c5ce2635a5caed72799a3885fc49173"}, - {file = "numexpr-2.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:5615497c3f34b637fda9b571f7774b6a82f2367cc1364b7a4573068dd1aabcaa"}, - {file = "numexpr-2.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bffcbc55dea5a5f5255e2586da08f00929998820e6592ee717273a08ad021eb3"}, - {file = "numexpr-2.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:374dc6ca54b2af813cb15c2b34e85092dfeac1f73d51ec358dd81876bd9adcec"}, - {file = "numexpr-2.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:549afc1622296cca3478a132c6e0fb5e55a19e08d32bc0d5a415434824a9c157"}, - {file = "numexpr-2.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c618a5895e34db0a364dcdb9960084c080f93f9d377c45b1ca9c394c24b4e77"}, - {file = "numexpr-2.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:37a7dd36fd79a2b69c3fd2bc2b51ac8270bebc69cc96e6d78f1148e147fcbfa8"}, - {file = "numexpr-2.9.0-cp311-cp311-win32.whl", hash = "sha256:00dab81d49239ea5423861ad627097b44d10d802df5f883d1b00f742139c3349"}, - {file = "numexpr-2.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:0e2574cafb18373774f351cac45ed23b5b360d9ecd1dbf3c12dac6d6eefefc87"}, - {file = "numexpr-2.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9761195526a228e05eba400b8c484c94bbabfea853b9ea35ab8fa1bf415331b1"}, - {file = "numexpr-2.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0f619e91034b346ea85a4e1856ff06011dcb7dce10a60eda75e74db90120f880"}, - {file = "numexpr-2.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2749bce1c48706d58894992634a43b8458c4ba9411191471c4565fa41e9979ec"}, - {file = "numexpr-2.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1c31f621a625c7be602f92b027d90f2d3d60dcbc19b106e77fb04a4362152af"}, - {file = "numexpr-2.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b937861d13de67d440d54c85a835faed7572be5a6fd10d4f3bd4e66e157f"}, - {file = "numexpr-2.9.0-cp312-cp312-win32.whl", hash = "sha256:aa6298fb46bd7ec69911b5b80927a00663d066e719b29f48eb952d559bdd8371"}, - {file = "numexpr-2.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:8efd879839572bde5a38a1aa3ac23fd4dd9b956fb969bc5e43d1c403419e1e8c"}, - {file = "numexpr-2.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b04f12a6130094a251e3a8fff40130589c1c83be6d4eb223873bea14d8c8b630"}, - {file = "numexpr-2.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:977537f2a1cc843f888fb5f0507626f956ada674e4b3847168214a3f3c7446fa"}, - {file = "numexpr-2.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6eae6c0c2d5682c02e8ac9c4287c2232c2443c9148b239df22500eaa3c5d73b7"}, - {file = "numexpr-2.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fae6828042b70c2f52a132bfcb9139da704274ed11b982fbf537f91c075d2ef"}, - {file = "numexpr-2.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c77392aea53f0700d60eb270ad63174b4ff10b04f8de92861101ca2129fee51"}, - {file = "numexpr-2.9.0-cp39-cp39-win32.whl", hash = "sha256:3b03a6cf37a72f5b52f2b962d7ac7f565bea8eaba83c3c4e5fcf8fbb6a938153"}, - {file = "numexpr-2.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:d655b6eacc4e81006b662cba014e4615a9ddd96881b8b4db4ad0d7f6d38069af"}, - {file = "numexpr-2.9.0.tar.gz", hash = "sha256:f21d12f6c432ce349089eb95342babf6629aebb3fddf187a4492d3aadaadaaf0"}, + {file = "numexpr-2.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1af6dc6b3bd2e11a802337b352bf58f30df0b70be16c4f863b70a3af3a8ef95e"}, + {file = "numexpr-2.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3c66dc0188358cdcc9465b6ee54fd5eef2e83ac64b1d4ba9117c41df59bf6fca"}, + {file = "numexpr-2.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83f1e7a7f7ee741b8dcd20c56c3f862a3a3ec26fa8b9fcadb7dcd819876d2f35"}, + {file = "numexpr-2.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f0b045e1831953a47cc9fabae76a6794c69cbb60921751a5cf2d555034c55bf"}, + {file = "numexpr-2.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1d8eb88b0ae3d3c609d732a17e71096779b2bf47b3a084320ffa93d9f9132786"}, + {file = "numexpr-2.10.0-cp310-cp310-win32.whl", hash = "sha256:629b66cc1b750671e7fb396506b3f9410612e5bd8bc1dd55b5a0a0041d839f95"}, + {file = "numexpr-2.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:78e0a8bc4417c3dedcbae3c473505b69080535246edc977c7dccf3ec8454a685"}, + {file = "numexpr-2.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a602692cd52ce923ce8a0a90fb1d6cf186ebe8706eed83eee0de685e634b9aa9"}, + {file = "numexpr-2.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:745b46a1fb76920a3eebfaf26e50bc94a9c13b5aee34b256ab4b2d792dbaa9ca"}, + {file = "numexpr-2.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10789450032357afaeda4ac4d06da9542d1535c13151e8d32b49ae1a488d1358"}, + {file = "numexpr-2.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4feafc65ea3044b8bf8f305b757a928e59167a310630c22b97a57dff07a56490"}, + {file = "numexpr-2.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:937d36c6d3cf15601f26f84f0f706649f976491e9e0892d16cd7c876d77fa7dc"}, + {file = "numexpr-2.10.0-cp311-cp311-win32.whl", hash = "sha256:03d0ba492e484a5a1aeb24b300c4213ed168f2c246177be5733abb4e18cbb043"}, + {file = "numexpr-2.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:6b5f8242c075477156d26b3a6b8e0cd0a06d4c8eb68d907bde56dd3c9c683e92"}, + {file = "numexpr-2.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b276e2ba3e87ace9a30fd49078ad5dcdc6a1674d030b1ec132599c55465c0346"}, + {file = "numexpr-2.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb5e12787101f1216f2cdabedc3417748f2e1f472442e16bbfabf0bab2336300"}, + {file = "numexpr-2.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05278bad96b5846d712eba58b44e5cec743bdb3e19ca624916c921d049fdbcf6"}, + {file = "numexpr-2.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6cdf9e64c5b3dbb61729edb505ea75ee212fa02b85c5b1d851331381ae3b0e1"}, + {file = "numexpr-2.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e3a973265591b0a875fd1151c4549e468959c7192821aac0bb86937694a08efa"}, + {file = "numexpr-2.10.0-cp312-cp312-win32.whl", hash = "sha256:416e0e9f0fc4cced67767585e44cb6b301728bdb9edbb7c534a853222ec62cac"}, + {file = "numexpr-2.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:748e8d4cde22d9a5603165293fb293a4de1a4623513299416c64fdab557118c2"}, + {file = "numexpr-2.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc3506c30c03b082da2cadef43747d474e5170c1f58a6dcdf882b3dc88b1e849"}, + {file = "numexpr-2.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:efa63ecdc9fcaf582045639ddcf56e9bdc1f4d9a01729be528f62df4db86c9d6"}, + {file = "numexpr-2.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96a64d0dd8f8e694da3f8582d73d7da8446ff375f6dd239b546010efea371ac3"}, + {file = "numexpr-2.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d47bb567e330ebe86781864219a36cbccb3a47aec893bd509f0139c6b23e8104"}, + {file = "numexpr-2.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c7517b774d309b1f0896c89bdd1ddd33c4418a92ecfbe5e1df3ac698698f6fcf"}, + {file = "numexpr-2.10.0-cp39-cp39-win32.whl", hash = "sha256:04e8620e7e676504201d4082e7b3ee2d9b561d1cb9470b47a6104e10c1e2870e"}, + {file = "numexpr-2.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:56d0d96b130f7cd4d78d0017030d6a0e9d9fc2a717ac51d4cf4860b39637e86a"}, + {file = "numexpr-2.10.0.tar.gz", hash = "sha256:c89e930752639df040539160326d8f99a84159bbea41943ab8e960591edaaef0"}, ] [package.dependencies] -numpy = ">=1.13.3" +numpy = ">=1.19.3" [[package]] name = "numpy" @@ -4769,6 +5434,7 @@ description = "Nvidia JIT LTO Library" optional = true python-versions = ">=3" files = [ + {file = "nvidia_nvjitlink_cu12-12.4.99-py3-none-manylinux2014_aarch64.whl", hash = "sha256:75d6498c96d9adb9435f2bbdbddb479805ddfb97b5c1b32395c694185c20ca57"}, {file = "nvidia_nvjitlink_cu12-12.4.99-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c6428836d20fe7e327191c175791d38570e10762edc588fb46749217cd444c74"}, {file = "nvidia_nvjitlink_cu12-12.4.99-py3-none-win_amd64.whl", hash = "sha256:991905ffa2144cb603d8ca7962d75c35334ae82bf92820b6ba78157277da1ad2"}, ] @@ -4880,13 +5546,13 @@ sympy = "*" [[package]] name = "openai" -version = "1.14.3" +version = "1.16.1" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.14.3-py3-none-any.whl", hash = "sha256:7a465994a7ccf677a110c6cc2ef9d86229bad42c060b585b67049aa749f3b774"}, - {file = "openai-1.14.3.tar.gz", hash = "sha256:37b514e9c0ff45383ec9b242abd0f7859b1080d4b54b61393ed341ecad1b8eb9"}, + {file = "openai-1.16.1-py3-none-any.whl", hash = "sha256:77ef3db6110071f7154859e234250fb945a36554207a30a4491092eadb73fcb5"}, + {file = "openai-1.16.1.tar.gz", hash = "sha256:58922c785d167458b46e3c76e7b1bc2306f313ee9b71791e84cbf590abe160f2"}, ] [package.dependencies] @@ -4953,6 +5619,26 @@ opentelemetry-sdk = ">=1.24.0,<1.25.0" [package.extras] test = ["pytest-grpc"] +[[package]] +name = "opentelemetry-exporter-otlp-proto-http" +version = "1.24.0" +description = "OpenTelemetry Collector Protobuf over HTTP Exporter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_exporter_otlp_proto_http-1.24.0-py3-none-any.whl", hash = "sha256:25af10e46fdf4cd3833175e42f4879a1255fc01655fe14c876183a2903949836"}, + {file = "opentelemetry_exporter_otlp_proto_http-1.24.0.tar.gz", hash = "sha256:704c066cc96f5131881b75c0eac286cd73fc735c490b054838b4513254bd7850"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +googleapis-common-protos = ">=1.52,<2.0" +opentelemetry-api = ">=1.15,<2.0" +opentelemetry-exporter-otlp-proto-common = "1.24.0" +opentelemetry-proto = "1.24.0" +opentelemetry-sdk = ">=1.24.0,<1.25.0" +requests = ">=2.7,<3.0" + [[package]] name = "opentelemetry-instrumentation" version = "0.45b0" @@ -5063,6 +5749,33 @@ files = [ {file = "opentelemetry_util_http-0.45b0.tar.gz", hash = "sha256:4ce08b6a7d52dd7c96b7705b5b4f06fdb6aa3eac1233b3b0bfef8a0cab9a92cd"}, ] +[[package]] +name = "optuna" +version = "3.6.1" +description = "A hyperparameter optimization framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "optuna-3.6.1-py3-none-any.whl", hash = "sha256:b32e0490bd6552790b70ec94de77dd2855057c9e229cd9f4da48fe8a31c7f1cc"}, + {file = "optuna-3.6.1.tar.gz", hash = "sha256:146e530b57b4b9afd7526b3e642fbe65491f7e292b405913355f8e438e361ecf"}, +] + +[package.dependencies] +alembic = ">=1.5.0" +colorlog = "*" +numpy = "*" +packaging = ">=20.0" +PyYAML = "*" +sqlalchemy = ">=1.3.0" +tqdm = "*" + +[package.extras] +benchmark = ["asv (>=0.5.0)", "botorch", "cma", "virtualenv"] +checking = ["black", "blackdoc", "flake8", "isort", "mypy", "mypy-boto3-s3", "types-PyYAML", "types-redis", "types-setuptools", "types-tqdm", "typing-extensions (>=3.10.0.0)"] +document = ["ase", "cmaes (>=0.10.0)", "fvcore", "lightgbm", "matplotlib (!=3.6.0)", "pandas", "pillow", "plotly (>=4.9.0)", "scikit-learn", "sphinx", "sphinx-copybutton", "sphinx-gallery", "sphinx-plotly-directive", "sphinx-rtd-theme (>=1.2.0)", "torch", "torchvision"] +optional = ["boto3", "cmaes (>=0.10.0)", "google-cloud-storage", "matplotlib (!=3.6.0)", "pandas", "plotly (>=4.9.0)", "redis", "scikit-learn (>=0.24.2)", "scipy", "torch"] +test = ["coverage", "fakeredis[lua]", "kaleido", "moto", "pytest", "scipy (>=1.9.2)", "torch"] + [[package]] name = "ordered-set" version = "4.1.0" @@ -5320,79 +6033,80 @@ numpy = "*" [[package]] name = "pillow" -version = "10.2.0" +version = "10.3.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.8" files = [ - {file = "pillow-10.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:7823bdd049099efa16e4246bdf15e5a13dbb18a51b68fa06d6c1d4d8b99a796e"}, - {file = "pillow-10.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:83b2021f2ade7d1ed556bc50a399127d7fb245e725aa0113ebd05cfe88aaf588"}, - {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fad5ff2f13d69b7e74ce5b4ecd12cc0ec530fcee76356cac6742785ff71c452"}, - {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da2b52b37dad6d9ec64e653637a096905b258d2fc2b984c41ae7d08b938a67e4"}, - {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:47c0995fc4e7f79b5cfcab1fc437ff2890b770440f7696a3ba065ee0fd496563"}, - {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:322bdf3c9b556e9ffb18f93462e5f749d3444ce081290352c6070d014c93feb2"}, - {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51f1a1bffc50e2e9492e87d8e09a17c5eea8409cda8d3f277eb6edc82813c17c"}, - {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69ffdd6120a4737710a9eee73e1d2e37db89b620f702754b8f6e62594471dee0"}, - {file = "pillow-10.2.0-cp310-cp310-win32.whl", hash = "sha256:c6dafac9e0f2b3c78df97e79af707cdc5ef8e88208d686a4847bab8266870023"}, - {file = "pillow-10.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:aebb6044806f2e16ecc07b2a2637ee1ef67a11840a66752751714a0d924adf72"}, - {file = "pillow-10.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:7049e301399273a0136ff39b84c3678e314f2158f50f517bc50285fb5ec847ad"}, - {file = "pillow-10.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35bb52c37f256f662abdfa49d2dfa6ce5d93281d323a9af377a120e89a9eafb5"}, - {file = "pillow-10.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c23f307202661071d94b5e384e1e1dc7dfb972a28a2310e4ee16103e66ddb67"}, - {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:773efe0603db30c281521a7c0214cad7836c03b8ccff897beae9b47c0b657d61"}, - {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11fa2e5984b949b0dd6d7a94d967743d87c577ff0b83392f17cb3990d0d2fd6e"}, - {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:716d30ed977be8b37d3ef185fecb9e5a1d62d110dfbdcd1e2a122ab46fddb03f"}, - {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a086c2af425c5f62a65e12fbf385f7c9fcb8f107d0849dba5839461a129cf311"}, - {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c8de2789052ed501dd829e9cae8d3dcce7acb4777ea4a479c14521c942d395b1"}, - {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:609448742444d9290fd687940ac0b57fb35e6fd92bdb65386e08e99af60bf757"}, - {file = "pillow-10.2.0-cp311-cp311-win32.whl", hash = "sha256:823ef7a27cf86df6597fa0671066c1b596f69eba53efa3d1e1cb8b30f3533068"}, - {file = "pillow-10.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1da3b2703afd040cf65ec97efea81cfba59cdbed9c11d8efc5ab09df9509fc56"}, - {file = "pillow-10.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:edca80cbfb2b68d7b56930b84a0e45ae1694aeba0541f798e908a49d66b837f1"}, - {file = "pillow-10.2.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:1b5e1b74d1bd1b78bc3477528919414874748dd363e6272efd5abf7654e68bef"}, - {file = "pillow-10.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0eae2073305f451d8ecacb5474997c08569fb4eb4ac231ffa4ad7d342fdc25ac"}, - {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7c2286c23cd350b80d2fc9d424fc797575fb16f854b831d16fd47ceec078f2c"}, - {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e23412b5c41e58cec602f1135c57dfcf15482013ce6e5f093a86db69646a5aa"}, - {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:52a50aa3fb3acb9cf7213573ef55d31d6eca37f5709c69e6858fe3bc04a5c2a2"}, - {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:127cee571038f252a552760076407f9cff79761c3d436a12af6000cd182a9d04"}, - {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8d12251f02d69d8310b046e82572ed486685c38f02176bd08baf216746eb947f"}, - {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54f1852cd531aa981bc0965b7d609f5f6cc8ce8c41b1139f6ed6b3c54ab82bfb"}, - {file = "pillow-10.2.0-cp312-cp312-win32.whl", hash = "sha256:257d8788df5ca62c980314053197f4d46eefedf4e6175bc9412f14412ec4ea2f"}, - {file = "pillow-10.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:154e939c5f0053a383de4fd3d3da48d9427a7e985f58af8e94d0b3c9fcfcf4f9"}, - {file = "pillow-10.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:f379abd2f1e3dddb2b61bc67977a6b5a0a3f7485538bcc6f39ec76163891ee48"}, - {file = "pillow-10.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8373c6c251f7ef8bda6675dd6d2b3a0fcc31edf1201266b5cf608b62a37407f9"}, - {file = "pillow-10.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:870ea1ada0899fd0b79643990809323b389d4d1d46c192f97342eeb6ee0b8483"}, - {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4b6b1e20608493548b1f32bce8cca185bf0480983890403d3b8753e44077129"}, - {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3031709084b6e7852d00479fd1d310b07d0ba82765f973b543c8af5061cf990e"}, - {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:3ff074fc97dd4e80543a3e91f69d58889baf2002b6be64347ea8cf5533188213"}, - {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:cb4c38abeef13c61d6916f264d4845fab99d7b711be96c326b84df9e3e0ff62d"}, - {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b1b3020d90c2d8e1dae29cf3ce54f8094f7938460fb5ce8bc5c01450b01fbaf6"}, - {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:170aeb00224ab3dc54230c797f8404507240dd868cf52066f66a41b33169bdbe"}, - {file = "pillow-10.2.0-cp38-cp38-win32.whl", hash = "sha256:c4225f5220f46b2fde568c74fca27ae9771536c2e29d7c04f4fb62c83275ac4e"}, - {file = "pillow-10.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:0689b5a8c5288bc0504d9fcee48f61a6a586b9b98514d7d29b840143d6734f39"}, - {file = "pillow-10.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b792a349405fbc0163190fde0dc7b3fef3c9268292586cf5645598b48e63dc67"}, - {file = "pillow-10.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c570f24be1e468e3f0ce7ef56a89a60f0e05b30a3669a459e419c6eac2c35364"}, - {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8ecd059fdaf60c1963c58ceb8997b32e9dc1b911f5da5307aab614f1ce5c2fb"}, - {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c365fd1703040de1ec284b176d6af5abe21b427cb3a5ff68e0759e1e313a5e7e"}, - {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:70c61d4c475835a19b3a5aa42492409878bbca7438554a1f89d20d58a7c75c01"}, - {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6f491cdf80ae540738859d9766783e3b3c8e5bd37f5dfa0b76abdecc5081f13"}, - {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d189550615b4948f45252d7f005e53c2040cea1af5b60d6f79491a6e147eef7"}, - {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:49d9ba1ed0ef3e061088cd1e7538a0759aab559e2e0a80a36f9fd9d8c0c21591"}, - {file = "pillow-10.2.0-cp39-cp39-win32.whl", hash = "sha256:babf5acfede515f176833ed6028754cbcd0d206f7f614ea3447d67c33be12516"}, - {file = "pillow-10.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:0304004f8067386b477d20a518b50f3fa658a28d44e4116970abfcd94fac34a8"}, - {file = "pillow-10.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:0fb3e7fc88a14eacd303e90481ad983fd5b69c761e9e6ef94c983f91025da869"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:322209c642aabdd6207517e9739c704dc9f9db943015535783239022002f054a"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eedd52442c0a5ff4f887fab0c1c0bb164d8635b32c894bc1faf4c618dd89df2"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb28c753fd5eb3dd859b4ee95de66cc62af91bcff5db5f2571d32a520baf1f04"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:33870dc4653c5017bf4c8873e5488d8f8d5f8935e2f1fb9a2208c47cdd66efd2"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3c31822339516fb3c82d03f30e22b1d038da87ef27b6a78c9549888f8ceda39a"}, - {file = "pillow-10.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a2b56ba36e05f973d450582fb015594aaa78834fefe8dfb8fcd79b93e64ba4c6"}, - {file = "pillow-10.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d8e6aeb9201e655354b3ad049cb77d19813ad4ece0df1249d3c793de3774f8c7"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:2247178effb34a77c11c0e8ac355c7a741ceca0a732b27bf11e747bbc950722f"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15587643b9e5eb26c48e49a7b33659790d28f190fc514a322d55da2fb5c2950e"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753cd8f2086b2b80180d9b3010dd4ed147efc167c90d3bf593fe2af21265e5a5"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7c8f97e8e7a9009bcacbe3766a36175056c12f9a44e6e6f2d5caad06dcfbf03b"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d1b35bcd6c5543b9cb547dee3150c93008f8dd0f1fef78fc0cd2b141c5baf58a"}, - {file = "pillow-10.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe4c15f6c9285dc54ce6553a3ce908ed37c8f3825b5a51a15c91442bb955b868"}, - {file = "pillow-10.2.0.tar.gz", hash = "sha256:e87f0b2c78157e12d7686b27d63c070fd65d994e8ddae6f328e0dcf4a0cd007e"}, + {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, + {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, + {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, + {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, + {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, + {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, + {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, + {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, + {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, + {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, + {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, + {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, + {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, + {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, + {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, + {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, + {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, ] [package.extras] @@ -5405,13 +6119,13 @@ xmp = ["defusedxml"] [[package]] name = "pinecone-client" -version = "3.2.1" +version = "3.2.2" description = "Pinecone client and SDK" optional = false python-versions = "<4.0,>=3.8" files = [ - {file = "pinecone_client-3.2.1-py3-none-any.whl", hash = "sha256:e3e7983762509235250b9bcd543ec6283b7dffaed2e899f1631327f2b77859e3"}, - {file = "pinecone_client-3.2.1.tar.gz", hash = "sha256:8560ffafb13b9c45a92eb9eb77a2db32d5a1fa7903a1db17f7af58ee1058bb60"}, + {file = "pinecone_client-3.2.2-py3-none-any.whl", hash = "sha256:7e492fdda23c73726bc0cb94c689bb950d06fb94e82b701a0c610c2e830db327"}, + {file = "pinecone_client-3.2.2.tar.gz", hash = "sha256:887a12405f90ac11c396490f605fc479f31cf282361034d1ae0fccc02ac75bee"}, ] [package.dependencies] @@ -5914,6 +6628,17 @@ files = [ [package.dependencies] numpy = ">=1.16.6" +[[package]] +name = "pyarrow-hotfix" +version = "0.6" +description = "" +optional = false +python-versions = ">=3.5" +files = [ + {file = "pyarrow_hotfix-0.6-py3-none-any.whl", hash = "sha256:dcc9ae2d220dff0083be6a9aa8e0cdee5182ad358d4931fce825c545e5c89178"}, + {file = "pyarrow_hotfix-0.6.tar.gz", hash = "sha256:79d3e030f7ff890d408a100ac16d6f00b14d44a502d7897cd9fc3e3a534e9945"}, +] + [[package]] name = "pyasn1" version = "0.6.0" @@ -5978,13 +6703,13 @@ websurfer = ["beautifulsoup4", "markdownify", "pathvalidate", "pdfminer.six"] [[package]] name = "pycparser" -version = "2.21" +version = "2.22" description = "C parser in Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] [[package]] @@ -6030,18 +6755,18 @@ files = [ [[package]] name = "pydantic" -version = "2.6.4" +version = "2.5.0" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"}, - {file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"}, + {file = "pydantic-2.5.0-py3-none-any.whl", hash = "sha256:7ce6e766c456ad026fe5712f7bcf036efc34bd5d107b3e669ef7ea01b3a9050c"}, + {file = "pydantic-2.5.0.tar.gz", hash = "sha256:69bd6fb62d2d04b7055f59a396993486a2ee586c43a0b89231ce0000de07627c"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.16.3" +pydantic-core = "2.14.1" typing-extensions = ">=4.6.1" [package.extras] @@ -6049,90 +6774,112 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.16.3" +version = "2.14.1" description = "" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, - {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, - {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, - {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, - {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, - {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, - {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, - {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, - {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, - {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, - {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, - {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, - {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, - {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, - {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, - {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, - {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, - {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, - {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, - {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, - {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, - {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, - {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, - {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, - {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, - {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, - {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, - {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, - {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, - {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, + {file = "pydantic_core-2.14.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:812beca1dcb2b722cccc7e9c620bd972cbc323321194ec2725eab3222e6ac573"}, + {file = "pydantic_core-2.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2ccdc53cb88e51c7d47d74c59630d7be844428f6b8d463055ffad6f0392d8da"}, + {file = "pydantic_core-2.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd937733bf2fe7d6a8bf208c12741f1f730b7bf5636033877767a75093c29b8a"}, + {file = "pydantic_core-2.14.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:581bb606a31749a00796f5257947a0968182d7fe91e1dada41f06aeb6bfbc91a"}, + {file = "pydantic_core-2.14.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aadf74a40a7ae49c3c1aa7d32334fe94f4f968e21dd948e301bb4ed431fb2412"}, + {file = "pydantic_core-2.14.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b89821a2c77cc1b8f2c1fc3aacd6a3ecc5df8f7e518dc3f18aef8c4dcf66003d"}, + {file = "pydantic_core-2.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49ee28d65f506b2858a60745cc974ed005298ebab12693646b97641dd7c99c35"}, + {file = "pydantic_core-2.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97246f896b4df7fd84caa8a75a67abb95f94bc0b547665bf0889e3262b060399"}, + {file = "pydantic_core-2.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1185548665bc61bbab0dc78f10c8eafa0db0aa1e920fe9a451b77782b10a65cc"}, + {file = "pydantic_core-2.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2a7d08b39fac97540fba785fce3b21ee01a81f081a07a4d031efd791da6666f9"}, + {file = "pydantic_core-2.14.1-cp310-none-win32.whl", hash = "sha256:0a8c8daf4e3aa3aeb98e3638fc3d58a359738f3d12590b2474c6bb64031a0764"}, + {file = "pydantic_core-2.14.1-cp310-none-win_amd64.whl", hash = "sha256:4f0788699a92d604f348e9c1ac5e97e304e97127ba8325c7d0af88dcc7d35bd3"}, + {file = "pydantic_core-2.14.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:2be018a84995b6be1bbd40d6064395dbf71592a981169cf154c0885637f5f54a"}, + {file = "pydantic_core-2.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fc3227408808ba7df8e95eb1d8389f4ba2203bed8240b308de1d7ae66d828f24"}, + {file = "pydantic_core-2.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42d5d0e9bbb50481a049bd0203224b339d4db04006b78564df2b782e2fd16ebc"}, + {file = "pydantic_core-2.14.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc6a4ea9f88a810cb65ccae14404da846e2a02dd5c0ad21dee712ff69d142638"}, + {file = "pydantic_core-2.14.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d312ad20e3c6d179cb97c42232b53111bcd8dcdd5c1136083db9d6bdd489bc73"}, + {file = "pydantic_core-2.14.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:679cc4e184f213c8227862e57340d12fd4d4d19dc0e3ddb0f653f86f01e90f94"}, + {file = "pydantic_core-2.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101df420e954966868b8bc992aefed5fa71dd1f2755104da62ee247abab28e2f"}, + {file = "pydantic_core-2.14.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c964c0cc443d6c08a2347c0e5c1fc2d85a272dc66c1a6f3cde4fc4843882ada4"}, + {file = "pydantic_core-2.14.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8276bbab68a9dbe721da92d19cbc061f76655248fe24fb63969d0c3e0e5755e7"}, + {file = "pydantic_core-2.14.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:12163197fec7c95751a3c71b36dcc1909eed9959f011ffc79cc8170a6a74c826"}, + {file = "pydantic_core-2.14.1-cp311-none-win32.whl", hash = "sha256:b8ff0302518dcd001bd722bbe342919c29e5066c7eda86828fe08cdc112668b8"}, + {file = "pydantic_core-2.14.1-cp311-none-win_amd64.whl", hash = "sha256:59fa83873223f856d898452c6162a390af4297756f6ba38493a67533387d85d9"}, + {file = "pydantic_core-2.14.1-cp311-none-win_arm64.whl", hash = "sha256:798590d38c9381f07c48d13af1f1ef337cebf76ee452fcec5deb04aceced51c7"}, + {file = "pydantic_core-2.14.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:587d75aec9ae50d0d63788cec38bf13c5128b3fc1411aa4b9398ebac884ab179"}, + {file = "pydantic_core-2.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26242e3593d4929123615bd9365dd86ef79b7b0592d64a96cd11fd83c69c9f34"}, + {file = "pydantic_core-2.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5879ac4791508d8f0eb7dec71ff8521855180688dac0c55f8c99fc4d1a939845"}, + {file = "pydantic_core-2.14.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ad9ea86f5fc50f1b62c31184767fe0cacaa13b54fe57d38898c3776d30602411"}, + {file = "pydantic_core-2.14.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:102ac85a775e77821943ae38da9634ddd774b37a8d407181b4f7b05cdfb36b55"}, + {file = "pydantic_core-2.14.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2459cc06572730e079ec1e694e8f68c99d977b40d98748ae72ff11ef21a56b0b"}, + {file = "pydantic_core-2.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:217dcbfaf429a9b8f1d54eb380908b9c778e78f31378283b30ba463c21e89d5d"}, + {file = "pydantic_core-2.14.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d59e0d7cdfe8ed1d4fcd28aad09625c715dc18976c7067e37d8a11b06f4be3e"}, + {file = "pydantic_core-2.14.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e2be646a5155d408e68b560c0553e8a83dc7b9f90ec6e5a2fc3ff216719385db"}, + {file = "pydantic_core-2.14.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ffba979801e3931a19cd30ed2049450820effe8f152aaa317e2fd93795d318d7"}, + {file = "pydantic_core-2.14.1-cp312-none-win32.whl", hash = "sha256:132b40e479cb5cebbbb681f77aaceabbc8355df16c9124cff1d4060ada83cde2"}, + {file = "pydantic_core-2.14.1-cp312-none-win_amd64.whl", hash = "sha256:744b807fe2733b6da3b53e8ad93e8b3ea3ee3dfc3abece4dd2824cc1f39aa343"}, + {file = "pydantic_core-2.14.1-cp312-none-win_arm64.whl", hash = "sha256:24ba48f9d0b8d64fc5e42e1600366c3d7db701201294989aebdaca23110c02ab"}, + {file = "pydantic_core-2.14.1-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:ba55d73a2df4771b211d0bcdea8b79454980a81ed34a1d77a19ddcc81f98c895"}, + {file = "pydantic_core-2.14.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e905014815687d88cbb14bbc0496420526cf20d49f20606537d87646b70f1046"}, + {file = "pydantic_core-2.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:443dc5eede7fa76b2370213e0abe881eb17c96f7d694501853c11d5d56916602"}, + {file = "pydantic_core-2.14.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:abae6fd5504e5e438e4f6f739f8364fd9ff5a5cdca897e68363e2318af90bc28"}, + {file = "pydantic_core-2.14.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9486e27bb3f137f33e2315be2baa0b0b983dae9e2f5f5395240178ad8e644728"}, + {file = "pydantic_core-2.14.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:69df82892ff00491d673b1929538efb8c8d68f534fdc6cb7fd3ac8a5852b9034"}, + {file = "pydantic_core-2.14.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:184ff7b30c3f60e1b775378c060099285fd4b5249271046c9005f8b247b39377"}, + {file = "pydantic_core-2.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3d5b2a4b3c10cad0615670cab99059441ff42e92cf793a0336f4bc611e895204"}, + {file = "pydantic_core-2.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:871c641a83719caaa856a11dcc61c5e5b35b0db888e1a0d338fe67ce744575e2"}, + {file = "pydantic_core-2.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1e7208946ea9b27a8cef13822c339d4ae96e45952cc01fc4a91c7f1cb0ae2861"}, + {file = "pydantic_core-2.14.1-cp37-none-win32.whl", hash = "sha256:b4ff385a525017f5adf6066d7f9fb309f99ade725dcf17ed623dc7dce1f85d9f"}, + {file = "pydantic_core-2.14.1-cp37-none-win_amd64.whl", hash = "sha256:c7411cd06afeb263182e38c6ca5b4f5fe4f20d91466ad7db0cd6af453a02edec"}, + {file = "pydantic_core-2.14.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:2871daf5b2823bf77bf7d3d43825e5d904030c155affdf84b21a00a2e00821d2"}, + {file = "pydantic_core-2.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7977e261cac5f99873dc2c6f044315d09b19a71c4246560e1e67593889a90978"}, + {file = "pydantic_core-2.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5a111f9158555582deadd202a60bd7803b6c68f406391b7cf6905adf0af6811"}, + {file = "pydantic_core-2.14.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac417312bf6b7a0223ba73fb12e26b2854c93bf5b1911f7afef6d24c379b22aa"}, + {file = "pydantic_core-2.14.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c36987f5eb2a7856b5f5feacc3be206b4d1852a6ce799f6799dd9ffb0cba56ae"}, + {file = "pydantic_core-2.14.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6e98227eb02623d57e1fd061788837834b68bb995a869565211b9abf3de4bf4"}, + {file = "pydantic_core-2.14.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023b6d7ec4e97890b28eb2ee24413e69a6d48de4e8b75123957edd5432f4eeb3"}, + {file = "pydantic_core-2.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6015beb28deb5306049ecf2519a59627e9e050892927850a884df6d5672f8c7d"}, + {file = "pydantic_core-2.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3f48d4afd973abbd65266ac24b24de1591116880efc7729caf6b6b94a9654c9e"}, + {file = "pydantic_core-2.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:28734bcfb8fc5b03293dec5eb5ea73b32ff767f6ef79a31f6e41dad2f5470270"}, + {file = "pydantic_core-2.14.1-cp38-none-win32.whl", hash = "sha256:3303113fdfaca927ef11e0c5f109e2ec196c404f9d7ba5f8ddb63cdf287ea159"}, + {file = "pydantic_core-2.14.1-cp38-none-win_amd64.whl", hash = "sha256:144f2c1d5579108b6ed1193fcc9926124bd4142b0f7020a7744980d1235c8a40"}, + {file = "pydantic_core-2.14.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:893bf4fb9bfb9c4639bc12f3de323325ada4c6d60e478d5cded65453e9364890"}, + {file = "pydantic_core-2.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:052d8731aaf844f91fe4cd3faf28983b109a5865b3a256ec550b80a5689ead87"}, + {file = "pydantic_core-2.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb1c6ecb53e4b907ee8486f453dd940b8cbb509946e2b671e3bf807d310a96fc"}, + {file = "pydantic_core-2.14.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:94cf6d0274eb899d39189144dcf52814c67f9b0fd196f211420d9aac793df2da"}, + {file = "pydantic_core-2.14.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36c3bf96f803e207a80dbcb633d82b98ff02a9faa76dd446e969424dec8e2b9f"}, + {file = "pydantic_core-2.14.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb290491f1f0786a7da4585250f1feee200fc17ff64855bdd7c42fb54526fa29"}, + {file = "pydantic_core-2.14.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6590ed9d13eb51b28ea17ddcc6c8dbd6050b4eb589d497105f0e13339f223b72"}, + {file = "pydantic_core-2.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:69cd74e55a5326d920e7b46daa2d81c2bdb8bcf588eafb2330d981297b742ddc"}, + {file = "pydantic_core-2.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d965bdb50725a805b083f5f58d05669a85705f50a6a864e31b545c589290ee31"}, + {file = "pydantic_core-2.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca942a2dc066ca5e04c27feaa8dfb9d353ddad14c6641660c565149186095343"}, + {file = "pydantic_core-2.14.1-cp39-none-win32.whl", hash = "sha256:72c2ef3787c3b577e5d6225d73a77167b942d12cef3c1fbd5e74e55b7f881c36"}, + {file = "pydantic_core-2.14.1-cp39-none-win_amd64.whl", hash = "sha256:55713d155da1e508083c4b08d0b1ad2c3054f68b8ef7eb3d3864822e456f0bb5"}, + {file = "pydantic_core-2.14.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:53efe03cc383a83660cfdda6a3cb40ee31372cedea0fde0b2a2e55e838873ab6"}, + {file = "pydantic_core-2.14.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f523e116879bc6714e61d447ce934676473b068069dce6563ea040381dc7a257"}, + {file = "pydantic_core-2.14.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85bb66d661be51b2cba9ca06759264b3469d2dbb53c3e6effb3f05fec6322be6"}, + {file = "pydantic_core-2.14.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f53a3ccdc30234cb4342cec541e3e6ed87799c7ca552f0b5f44e3967a5fed526"}, + {file = "pydantic_core-2.14.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1bfb63821ada76719ffcd703fc40dd57962e0d8c253e3c565252e6de6d3e0bc6"}, + {file = "pydantic_core-2.14.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e2c689439f262c29cf3fcd5364da1e64d8600facecf9eabea8643b8755d2f0de"}, + {file = "pydantic_core-2.14.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a15f6e5588f7afb7f6fc4b0f4ff064749e515d34f34c666ed6e37933873d8ad8"}, + {file = "pydantic_core-2.14.1-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:f1a30eef060e21af22c7d23349f1028de0611f522941c80efa51c05a63142c62"}, + {file = "pydantic_core-2.14.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16f4a7e1ec6b3ea98a1e108a2739710cd659d68b33fbbeaba066202cab69c7b6"}, + {file = "pydantic_core-2.14.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fd80a2d383940eec3db6a5b59d1820f947317acc5c75482ff8d79bf700f8ad6a"}, + {file = "pydantic_core-2.14.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a68a36d71c7f638dda6c9e6b67f6aabf3fa1471b198d246457bfdc7c777cdeb7"}, + {file = "pydantic_core-2.14.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ebc79120e105e4bcd7865f369e3b9dbabb0d492d221e1a7f62a3e8e292550278"}, + {file = "pydantic_core-2.14.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:c8c466facec2ccdf025b0b1455b18f2c3d574d5f64d24df905d3d7b8f05d5f4e"}, + {file = "pydantic_core-2.14.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b91b5ec423e88caa16777094c4b2b97f11453283e7a837e5e5e1b886abba1251"}, + {file = "pydantic_core-2.14.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:130e49aa0cb316f743bc7792c36aefa39fc2221312f1d4b333b19edbdd71f2b1"}, + {file = "pydantic_core-2.14.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f483467c046f549572f8aca3b7128829e09ae3a9fe933ea421f7cb7c58120edb"}, + {file = "pydantic_core-2.14.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dee4682bd7947afc682d342a8d65ad1834583132383f8e801601a8698cb8d17a"}, + {file = "pydantic_core-2.14.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:8d927d042c0ef04607ee7822828b208ab045867d20477ec6593d612156798547"}, + {file = "pydantic_core-2.14.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5a1570875eb0d1479fb2270ed80c88c231aaaf68b0c3f114f35e7fb610435e4f"}, + {file = "pydantic_core-2.14.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cb2fd3ab67558eb16aecfb4f2db4febb4d37dc74e6b8613dc2e7160fb58158a9"}, + {file = "pydantic_core-2.14.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7991f25b98038252363a03e6a9fe92e60fe390fda2631d238dc3b0e396632f8"}, + {file = "pydantic_core-2.14.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b45b7be9f99991405ecd6f6172fb6798908a8097106ae78d5cc5cc15121bad9"}, + {file = "pydantic_core-2.14.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:51506e7652a2ef1d1cf763c4b51b972ff4568d1dddc96ca83931a6941f5e6389"}, + {file = "pydantic_core-2.14.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:66dc0e63349ec39c1ea66622aa5c2c1f84382112afd3ab2fa0cca4fb01f7db39"}, + {file = "pydantic_core-2.14.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:8e17f0c3ba4cb07faa0038a59ce162de584ed48ba645c8d05a5de1e40d4c21e7"}, + {file = "pydantic_core-2.14.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d983222223f63e323a5f497f5b85e211557a5d8fb670dc88f343784502b466ba"}, + {file = "pydantic_core-2.14.1.tar.gz", hash = "sha256:0d82a6ee815388a362885186e431fac84c7a06623bc136f508e9f88261d8cadb"}, ] [package.dependencies] @@ -6275,6 +7022,64 @@ snappy = ["python-snappy"] test = ["pytest (>=7)"] zstd = ["zstandard"] +[[package]] +name = "pymupdf" +version = "1.24.1" +description = "A high performance Python library for data extraction, analysis, conversion & manipulation of PDF (and other) documents." +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyMuPDF-1.24.1-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:6427aee313e24447f57edbfc7a28aa6bbca007fe0ad77603f54a371c6c510eeb"}, + {file = "PyMuPDF-1.24.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:12078c0bee337de969dbd6d89ef446312794d74db365cb9ac14902b863b35414"}, + {file = "PyMuPDF-1.24.1-cp310-none-manylinux2014_aarch64.whl", hash = "sha256:73f86eefd7f3878f112fa10791aa2e63934cf59a4c024dd54cd6fe94443c352c"}, + {file = "PyMuPDF-1.24.1-cp310-none-manylinux2014_x86_64.whl", hash = "sha256:caf6ceb1dbebe9f70bf7dd683cc91b896604a7c62873e5b50089f9e85e85c517"}, + {file = "PyMuPDF-1.24.1-cp310-none-win32.whl", hash = "sha256:468a8bb2b95828e0f6739fbfe509700cc0dac600f756d8cb6316316e1eba9689"}, + {file = "PyMuPDF-1.24.1-cp310-none-win_amd64.whl", hash = "sha256:e47504391908e2d721c743aed36196310a5e15355a85459c1c4ddcf8f2002fbe"}, + {file = "PyMuPDF-1.24.1-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:c54ff927257b432ffd39dc6a0a46bd1120e85d192100efca021f27d4b881cfd6"}, + {file = "PyMuPDF-1.24.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:6d412da9f9a73f66973eea4284776f292135906700a06c39122e862a1e3ccf58"}, + {file = "PyMuPDF-1.24.1-cp311-none-manylinux2014_aarch64.whl", hash = "sha256:95a54611abb7322f5b10b44cbf19b605ed172df2c4c7995ad78854bc8423dd9c"}, + {file = "PyMuPDF-1.24.1-cp311-none-manylinux2014_x86_64.whl", hash = "sha256:9a3b21c8fc274ff42855ca2da65961e2319b05b75ef9e2caf25c04f9083ec79c"}, + {file = "PyMuPDF-1.24.1-cp311-none-win32.whl", hash = "sha256:8a81106a8bc229823736487d2492fd3af724a94521a1cd9b67849dd04b9c31ed"}, + {file = "PyMuPDF-1.24.1-cp311-none-win_amd64.whl", hash = "sha256:de5b6c4db4a2a9f28937e79135f732827c424f7444c12767cc1081c8006f0430"}, + {file = "PyMuPDF-1.24.1-cp312-none-macosx_10_9_x86_64.whl", hash = "sha256:02a6586979df2ad958b524ba42955beaa67fd21661616a0ed04ac07db009474c"}, + {file = "PyMuPDF-1.24.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:8eb292d16671166acdaa280e98cac4368298f32556f2de2ee690782a635df8ee"}, + {file = "PyMuPDF-1.24.1-cp312-none-manylinux2014_aarch64.whl", hash = "sha256:f7b7f2011fa522a57fb3d6a7a58bcdcf01ee59bdad536ef9eb5c3fdf1e04e6c3"}, + {file = "PyMuPDF-1.24.1-cp312-none-manylinux2014_x86_64.whl", hash = "sha256:6832f1d9332810760b587ad375eb84d64ec8d8f29395995b463cb5f30533a413"}, + {file = "PyMuPDF-1.24.1-cp312-none-win32.whl", hash = "sha256:f775bb56391629e81b5f870fc3dec0a0fb44cb34a92b4696b9207b31234711df"}, + {file = "PyMuPDF-1.24.1-cp312-none-win_amd64.whl", hash = "sha256:8489df092473d590fb14903433bd99a07dc3d2924f5a5c8ead615795f2d65a65"}, + {file = "PyMuPDF-1.24.1-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:ee9cfac470aeb6b5b7deb4f6472b7796c3132856849c635c8e56c7a371e40238"}, + {file = "PyMuPDF-1.24.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:825c62367b01e61b4bce0cc96d45b0ec336475422cfa36de6f441b4d3389a26e"}, + {file = "PyMuPDF-1.24.1-cp38-none-manylinux2014_aarch64.whl", hash = "sha256:73d07e127936948a29a7dbd4c831e9eb45a60b495d72e604d454fd040fd08c5f"}, + {file = "PyMuPDF-1.24.1-cp38-none-manylinux2014_x86_64.whl", hash = "sha256:d2b4f8956d0ca7564604491db8b29cd7872a2b4d65f1d7e16a1bccfecf84bb56"}, + {file = "PyMuPDF-1.24.1-cp38-none-win32.whl", hash = "sha256:7df966954ff0edbcd5d743c5f6fb68b3203e67534747e8753691b8ffedeaa518"}, + {file = "PyMuPDF-1.24.1-cp38-none-win_amd64.whl", hash = "sha256:6952d47f0f05cf9338470dda078e4533ddb876368b199ebfa2f9e6076311898b"}, + {file = "PyMuPDF-1.24.1-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:e3f7a101a14d742c93b660b7586ab4c1491caea9062a5de9c308578a7a4f8b69"}, + {file = "PyMuPDF-1.24.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:dbc5d67dfd07123293993eb93bee35d329fce0bc8134b9cd5514ef75c68ffee8"}, + {file = "PyMuPDF-1.24.1-cp39-none-manylinux2014_aarch64.whl", hash = "sha256:0edda1024ada67603e5888f31656048d3fd53167c8b0d56f435b986eb507df8f"}, + {file = "PyMuPDF-1.24.1-cp39-none-manylinux2014_x86_64.whl", hash = "sha256:38728bb6aab9e3879aa8ac4d337be8fe838d33973f43e3b7805b86265c24f349"}, + {file = "PyMuPDF-1.24.1-cp39-none-win32.whl", hash = "sha256:b8a5247d0cec87765481c38d2b8602f0264bf7ca6b5dc3013caf64ce46ad4d5e"}, + {file = "PyMuPDF-1.24.1-cp39-none-win_amd64.whl", hash = "sha256:d1078ea265635e962693d7298bd39be64af7d1dd2c6dc663a8562e75f547f948"}, + {file = "PyMuPDF-1.24.1.tar.gz", hash = "sha256:38e6101dab2ff86c4e2444fcec8a04377ae1d6db1bef0f7a1ddab3ac6abe4d41"}, +] + +[package.dependencies] +PyMuPDFb = "1.24.1" + +[[package]] +name = "pymupdfb" +version = "1.24.1" +description = "MuPDF shared libraries for PyMuPDF." +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyMuPDFb-1.24.1-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:37179e363bf69ce9be637937c5469957b96968341dabe3ce8f4b690a82e9ad92"}, + {file = "PyMuPDFb-1.24.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:17444ea7d6897c27759880ad76af537d19779f901de82ae9548598a70f614558"}, + {file = "PyMuPDFb-1.24.1-py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:490f7fff4dbe362bc895cefdfc5030d712311d024d357a1388d64816eb215d34"}, + {file = "PyMuPDFb-1.24.1-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0fbcc0d2a9ce79fa38eb4e8bb5c959b582f7a49938874e9f61d1a6f5eeb1e4b8"}, + {file = "PyMuPDFb-1.24.1-py3-none-win32.whl", hash = "sha256:ae67736058882cdd9459810a4aae9ac2b2e89ac2e916cb5fefb0f651c9739e9e"}, + {file = "PyMuPDFb-1.24.1-py3-none-win_amd64.whl", hash = "sha256:01c8b7f0ce9166310eb28c7aebcb8d5fe12a4bc082f9b00d580095eebeaf0af5"}, +] + [[package]] name = "pyparsing" version = "2.4.7" @@ -6297,9 +7102,6 @@ files = [ {file = "pypdf-4.1.0.tar.gz", hash = "sha256:01c3257ec908676efd60a4537e525b89d48e0852bc92b4e0aa4cc646feda17cc"}, ] -[package.dependencies] -typing_extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.10\""} - [package.extras] crypto = ["PyCryptodome", "cryptography"] dev = ["black", "flit", "pip-tools", "pre-commit (<2.18.0)", "pytest-cov", "pytest-socket", "pytest-timeout", "pytest-xdist", "wheel"] @@ -6498,19 +7300,53 @@ files = [ six = ">=1.5" [[package]] -name = "python-dotenv" -version = "0.21.1" -description = "Read key-value pairs from a .env file and set them as environment variables" +name = "python-docx" +version = "1.1.0" +description = "Create, read, and update Microsoft Word .docx files." optional = false python-versions = ">=3.7" files = [ - {file = "python-dotenv-0.21.1.tar.gz", hash = "sha256:1c93de8f636cde3ce377292818d0e440b6e45a82f215c3744979151fa8151c49"}, - {file = "python_dotenv-0.21.1-py3-none-any.whl", hash = "sha256:41e12e0318bebc859fcc4d97d4db8d20ad21721a6aa5047dd59f090391cb549a"}, + {file = "python-docx-1.1.0.tar.gz", hash = "sha256:5829b722141cf1ab79aedf0c34d9fe9924b29764584c0f2164eb2b02dcdf17c9"}, + {file = "python_docx-1.1.0-py3-none-any.whl", hash = "sha256:bac9773278098a1ddc43a52d84e22f5909c4a3080a624530b3ecb3771b07c6cd"}, +] + +[package.dependencies] +lxml = ">=3.1.0" +typing-extensions = "*" + +[[package]] +name = "python-dotenv" +version = "1.0.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, + {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, ] [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "python-engineio" +version = "4.9.0" +description = "Engine.IO server and client for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "python-engineio-4.9.0.tar.gz", hash = "sha256:e87459c15638e567711fd156e6f9c4a402668871bed79523f0ecfec744729ec7"}, + {file = "python_engineio-4.9.0-py3-none-any.whl", hash = "sha256:979859bff770725b75e60353d7ae53b397e8b517d05ba76733b404a3dcca3e4c"}, +] + +[package.dependencies] +simple-websocket = ">=0.10.0" + +[package.extras] +asyncio-client = ["aiohttp (>=3.4)"] +client = ["requests (>=2.21.0)", "websocket-client (>=0.54.0)"] +docs = ["sphinx"] + [[package]] name = "python-iso639" version = "2024.2.7" @@ -6571,6 +7407,26 @@ files = [ [package.extras] dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatch", "invoke (==2.2.0)", "more-itertools (==4.3.0)", "pbr (==4.3.0)", "pluggy (==1.0.0)", "py (==1.11.0)", "pytest (==7.2.0)", "pytest-cov (==4.0.0)", "pytest-timeout (==2.1.0)", "pyyaml (==5.1)"] +[[package]] +name = "python-socketio" +version = "5.11.2" +description = "Socket.IO server and client for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-socketio-5.11.2.tar.gz", hash = "sha256:ae6a1de5c5209ca859dc574dccc8931c4be17ee003e74ce3b8d1306162bb4a37"}, + {file = "python_socketio-5.11.2-py3-none-any.whl", hash = "sha256:b9f22a8ff762d7a6e123d16a43ddb1a27d50f07c3c88ea999334f2f89b0ad52b"}, +] + +[package.dependencies] +bidict = ">=0.21.0" +python-engineio = ">=4.8.0" + +[package.extras] +asyncio-client = ["aiohttp (>=3.4)"] +client = ["requests (>=2.21.0)", "websocket-client (>=0.54.0)"] +docs = ["sphinx"] + [[package]] name = "pytube" version = "15.0.0" @@ -6806,25 +7662,28 @@ fastembed = ["fastembed (==0.2.5)"] [[package]] name = "qianfan" -version = "0.3.0" +version = "0.3.5" description = "文心千帆大模型平台 Python SDK" optional = false python-versions = ">=3.7,<4" files = [ - {file = "qianfan-0.3.0-py3-none-any.whl", hash = "sha256:1fc324aa494f10c8f4569251d298c14e6700ea93f27cc361d8e5cce456b930a1"}, - {file = "qianfan-0.3.0.tar.gz", hash = "sha256:b12aab31b5eb6f58d5423b2d4648627add7941a6cfa2f7c36d7da268302ea72c"}, + {file = "qianfan-0.3.5-py3-none-any.whl", hash = "sha256:0d5712c93ec6877c4176aae21ff58b41ccfef7ba661c6e19c07209c353353a16"}, + {file = "qianfan-0.3.5.tar.gz", hash = "sha256:b71847888bd99d61cee5f84f614f431204f3d656d71dd7ae1d0f9bc9ae51b42b"}, ] [package.dependencies] aiohttp = ">=3.7.0" aiolimiter = ">=1.1.0" bce-python-sdk = ">=0.8.79" +clevercsv = {version = "*", markers = "python_version >= \"3.8\""} +ijson = "*" +multiprocess = "*" numpy = {version = ">=1.22.0", markers = "python_version >= \"3.8\""} prompt-toolkit = ">=3.0.38" pyarrow = {version = ">=14.0.1", markers = "python_version >= \"3.8\""} pydantic = "*" python-dateutil = ">=2.8.2,<3.0.0" -python-dotenv = "<=0.21.1" +python-dotenv = {version = ">=1.0", markers = "python_version >= \"3.8\""} pyyaml = ">=6.0.1,<7.0.0" requests = ">=2.24" rich = ">=13.0.0" @@ -6941,19 +7800,19 @@ full = ["numpy"] [[package]] name = "realtime" -version = "1.0.0" +version = "1.0.3" description = "" optional = false -python-versions = ">=3.8,<4.0" +python-versions = "<4.0,>=3.8" files = [ - {file = "realtime-1.0.0-py3-none-any.whl", hash = "sha256:ceab9e292211ab08b5792ac52b3fa25398440031d5b369bd5799b8125056e2d8"}, - {file = "realtime-1.0.0.tar.gz", hash = "sha256:14e540c4a0cc2736ae83e0cbd7efbbfb8b736df1681df2b9141556cb4848502d"}, + {file = "realtime-1.0.3-py3-none-any.whl", hash = "sha256:809b99a1c09390a4580ca2d37d84c85dffacb1804f80c6f5a4491d312c20e6e3"}, + {file = "realtime-1.0.3.tar.gz", hash = "sha256:1a39b5dcdb345b4cc7fd43bc035feb38ca915c9248962f20d264625bc8eb2c4e"}, ] [package.dependencies] python-dateutil = ">=2.8.1,<3.0.0" typing-extensions = ">=4.2.0,<5.0.0" -websockets = ">=10.3,<11.0" +websockets = ">=11,<13" [[package]] name = "red-black-tree-mod" @@ -7124,6 +7983,20 @@ requests = ">=2.0.0" [package.extras] rsa = ["oauthlib[signedtoken] (>=3.0.0)"] +[[package]] +name = "respx" +version = "0.20.2" +description = "A utility for mocking out the Python HTTPX and HTTP Core libraries." +optional = false +python-versions = ">=3.7" +files = [ + {file = "respx-0.20.2-py2.py3-none-any.whl", hash = "sha256:ab8e1cf6da28a5b2dd883ea617f8130f77f676736e6e9e4a25817ad116a172c9"}, + {file = "respx-0.20.2.tar.gz", hash = "sha256:07cf4108b1c88b82010f67d3c831dae33a375c7b436e54d87737c7f9f99be643"}, +] + +[package.dependencies] +httpx = ">=0.21.0" + [[package]] name = "rich" version = "13.7.1" @@ -7404,45 +8277,45 @@ tests = ["black (>=23.3.0)", "matplotlib (>=3.3.4)", "mypy (>=1.3)", "numpydoc ( [[package]] name = "scipy" -version = "1.12.0" +version = "1.13.0" description = "Fundamental algorithms for scientific computing in Python" optional = true python-versions = ">=3.9" files = [ - {file = "scipy-1.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:78e4402e140879387187f7f25d91cc592b3501a2e51dfb320f48dfb73565f10b"}, - {file = "scipy-1.12.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:f5f00ebaf8de24d14b8449981a2842d404152774c1a1d880c901bf454cb8e2a1"}, - {file = "scipy-1.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e53958531a7c695ff66c2e7bb7b79560ffdc562e2051644c5576c39ff8efb563"}, - {file = "scipy-1.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e32847e08da8d895ce09d108a494d9eb78974cf6de23063f93306a3e419960c"}, - {file = "scipy-1.12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4c1020cad92772bf44b8e4cdabc1df5d87376cb219742549ef69fc9fd86282dd"}, - {file = "scipy-1.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:75ea2a144096b5e39402e2ff53a36fecfd3b960d786b7efd3c180e29c39e53f2"}, - {file = "scipy-1.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:408c68423f9de16cb9e602528be4ce0d6312b05001f3de61fe9ec8b1263cad08"}, - {file = "scipy-1.12.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5adfad5dbf0163397beb4aca679187d24aec085343755fcdbdeb32b3679f254c"}, - {file = "scipy-1.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3003652496f6e7c387b1cf63f4bb720951cfa18907e998ea551e6de51a04467"}, - {file = "scipy-1.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b8066bce124ee5531d12a74b617d9ac0ea59245246410e19bca549656d9a40a"}, - {file = "scipy-1.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8bee4993817e204d761dba10dbab0774ba5a8612e57e81319ea04d84945375ba"}, - {file = "scipy-1.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:a24024d45ce9a675c1fb8494e8e5244efea1c7a09c60beb1eeb80373d0fecc70"}, - {file = "scipy-1.12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e7e76cc48638228212c747ada851ef355c2bb5e7f939e10952bc504c11f4e372"}, - {file = "scipy-1.12.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:f7ce148dffcd64ade37b2df9315541f9adad6efcaa86866ee7dd5db0c8f041c3"}, - {file = "scipy-1.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c39f92041f490422924dfdb782527a4abddf4707616e07b021de33467f917bc"}, - {file = "scipy-1.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7ebda398f86e56178c2fa94cad15bf457a218a54a35c2a7b4490b9f9cb2676c"}, - {file = "scipy-1.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:95e5c750d55cf518c398a8240571b0e0782c2d5a703250872f36eaf737751338"}, - {file = "scipy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e646d8571804a304e1da01040d21577685ce8e2db08ac58e543eaca063453e1c"}, - {file = "scipy-1.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:913d6e7956c3a671de3b05ccb66b11bc293f56bfdef040583a7221d9e22a2e35"}, - {file = "scipy-1.12.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba1b0c7256ad75401c73e4b3cf09d1f176e9bd4248f0d3112170fb2ec4db067"}, - {file = "scipy-1.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:730badef9b827b368f351eacae2e82da414e13cf8bd5051b4bdfd720271a5371"}, - {file = "scipy-1.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6546dc2c11a9df6926afcbdd8a3edec28566e4e785b915e849348c6dd9f3f490"}, - {file = "scipy-1.12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:196ebad3a4882081f62a5bf4aeb7326aa34b110e533aab23e4374fcccb0890dc"}, - {file = "scipy-1.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:b360f1b6b2f742781299514e99ff560d1fe9bd1bff2712894b52abe528d1fd1e"}, - {file = "scipy-1.12.0.tar.gz", hash = "sha256:4bf5abab8a36d20193c698b0f1fc282c1d083c94723902c447e5d2f1780936a3"}, + {file = "scipy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba419578ab343a4e0a77c0ef82f088238a93eef141b2b8017e46149776dfad4d"}, + {file = "scipy-1.13.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:22789b56a999265431c417d462e5b7f2b487e831ca7bef5edeb56efe4c93f86e"}, + {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f1432ba070e90d42d7fd836462c50bf98bd08bed0aa616c359eed8a04e3922"}, + {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8434f6f3fa49f631fae84afee424e2483289dfc30a47755b4b4e6b07b2633a4"}, + {file = "scipy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:dcbb9ea49b0167de4167c40eeee6e167caeef11effb0670b554d10b1e693a8b9"}, + {file = "scipy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:1d2f7bb14c178f8b13ebae93f67e42b0a6b0fc50eba1cd8021c9b6e08e8fb1cd"}, + {file = "scipy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fbcf8abaf5aa2dc8d6400566c1a727aed338b5fe880cde64907596a89d576fa"}, + {file = "scipy-1.13.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5e4a756355522eb60fcd61f8372ac2549073c8788f6114449b37e9e8104f15a5"}, + {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5acd8e1dbd8dbe38d0004b1497019b2dbbc3d70691e65d69615f8a7292865d7"}, + {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ff7dad5d24a8045d836671e082a490848e8639cabb3dbdacb29f943a678683d"}, + {file = "scipy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4dca18c3ffee287ddd3bc8f1dabaf45f5305c5afc9f8ab9cbfab855e70b2df5c"}, + {file = "scipy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:a2f471de4d01200718b2b8927f7d76b5d9bde18047ea0fa8bd15c5ba3f26a1d6"}, + {file = "scipy-1.13.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d0de696f589681c2802f9090fff730c218f7c51ff49bf252b6a97ec4a5d19e8b"}, + {file = "scipy-1.13.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:b2a3ff461ec4756b7e8e42e1c681077349a038f0686132d623fa404c0bee2551"}, + {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf9fe63e7a4bf01d3645b13ff2aa6dea023d38993f42aaac81a18b1bda7a82a"}, + {file = "scipy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e7626dfd91cdea5714f343ce1176b6c4745155d234f1033584154f60ef1ff42"}, + {file = "scipy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:109d391d720fcebf2fbe008621952b08e52907cf4c8c7efc7376822151820820"}, + {file = "scipy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:8930ae3ea371d6b91c203b1032b9600d69c568e537b7988a3073dfe4d4774f21"}, + {file = "scipy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5407708195cb38d70fd2d6bb04b1b9dd5c92297d86e9f9daae1576bd9e06f602"}, + {file = "scipy-1.13.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:ac38c4c92951ac0f729c4c48c9e13eb3675d9986cc0c83943784d7390d540c78"}, + {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c74543c4fbeb67af6ce457f6a6a28e5d3739a87f62412e4a16e46f164f0ae5"}, + {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28e286bf9ac422d6beb559bc61312c348ca9b0f0dae0d7c5afde7f722d6ea13d"}, + {file = "scipy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33fde20efc380bd23a78a4d26d59fc8704e9b5fd9b08841693eb46716ba13d86"}, + {file = "scipy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:45c08bec71d3546d606989ba6e7daa6f0992918171e2a6f7fbedfa7361c2de1e"}, + {file = "scipy-1.13.0.tar.gz", hash = "sha256:58569af537ea29d3f78e5abd18398459f195546bb3be23d16677fb26616cc11e"}, ] [package.dependencies] -numpy = ">=1.22.4,<1.29.0" +numpy = ">=1.22.4,<2.3" [package.extras] -dev = ["click", "cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] -doc = ["jupytext", "matplotlib (>2)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] -test = ["asv", "gmpy2", "hypothesis", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] +doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.12.0)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] +test = ["array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] [[package]] name = "sentence-transformers" @@ -7482,61 +8355,21 @@ testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] -name = "shapely" -version = "2.0.3" -description = "Manipulation and analysis of geometric objects" +name = "simple-websocket" +version = "1.0.0" +description = "Simple WebSocket server and client for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.6" files = [ - {file = "shapely-2.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:af7e9abe180b189431b0f490638281b43b84a33a960620e6b2e8d3e3458b61a1"}, - {file = "shapely-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98040462b36ced9671e266b95c326b97f41290d9d17504a1ee4dc313a7667b9c"}, - {file = "shapely-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:71eb736ef2843f23473c6e37f6180f90f0a35d740ab284321548edf4e55d9a52"}, - {file = "shapely-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:881eb9dbbb4a6419667e91fcb20313bfc1e67f53dbb392c6840ff04793571ed1"}, - {file = "shapely-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f10d2ccf0554fc0e39fad5886c839e47e207f99fdf09547bc687a2330efda35b"}, - {file = "shapely-2.0.3-cp310-cp310-win32.whl", hash = "sha256:6dfdc077a6fcaf74d3eab23a1ace5abc50c8bce56ac7747d25eab582c5a2990e"}, - {file = "shapely-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:64c5013dacd2d81b3bb12672098a0b2795c1bf8190cfc2980e380f5ef9d9e4d9"}, - {file = "shapely-2.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:56cee3e4e8159d6f2ce32e421445b8e23154fd02a0ac271d6a6c0b266a8e3cce"}, - {file = "shapely-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:619232c8276fded09527d2a9fd91a7885ff95c0ff9ecd5e3cb1e34fbb676e2ae"}, - {file = "shapely-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2a7d256db6f5b4b407dc0c98dd1b2fcf1c9c5814af9416e5498d0a2e4307a4b"}, - {file = "shapely-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e45f0c8cd4583647db3216d965d49363e6548c300c23fd7e57ce17a03f824034"}, - {file = "shapely-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13cb37d3826972a82748a450328fe02a931dcaed10e69a4d83cc20ba021bc85f"}, - {file = "shapely-2.0.3-cp311-cp311-win32.whl", hash = "sha256:9302d7011e3e376d25acd30d2d9e70d315d93f03cc748784af19b00988fc30b1"}, - {file = "shapely-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6b464f2666b13902835f201f50e835f2f153f37741db88f68c7f3b932d3505fa"}, - {file = "shapely-2.0.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e86e7cb8e331a4850e0c2a8b2d66dc08d7a7b301b8d1d34a13060e3a5b4b3b55"}, - {file = "shapely-2.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c91981c99ade980fc49e41a544629751a0ccd769f39794ae913e53b07b2f78b9"}, - {file = "shapely-2.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd45d456983dc60a42c4db437496d3f08a4201fbf662b69779f535eb969660af"}, - {file = "shapely-2.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:882fb1ffc7577e88c1194f4f1757e277dc484ba096a3b94844319873d14b0f2d"}, - {file = "shapely-2.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9f2d93bff2ea52fa93245798cddb479766a18510ea9b93a4fb9755c79474889"}, - {file = "shapely-2.0.3-cp312-cp312-win32.whl", hash = "sha256:99abad1fd1303b35d991703432c9481e3242b7b3a393c186cfb02373bf604004"}, - {file = "shapely-2.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:6f555fe3304a1f40398977789bc4fe3c28a11173196df9ece1e15c5bc75a48db"}, - {file = "shapely-2.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a983cc418c1fa160b7d797cfef0e0c9f8c6d5871e83eae2c5793fce6a837fad9"}, - {file = "shapely-2.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18bddb8c327f392189a8d5d6b9a858945722d0bb95ccbd6a077b8e8fc4c7890d"}, - {file = "shapely-2.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:442f4dcf1eb58c5a4e3428d88e988ae153f97ab69a9f24e07bf4af8038536325"}, - {file = "shapely-2.0.3-cp37-cp37m-win32.whl", hash = "sha256:31a40b6e3ab00a4fd3a1d44efb2482278642572b8e0451abdc8e0634b787173e"}, - {file = "shapely-2.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:59b16976c2473fec85ce65cc9239bef97d4205ab3acead4e6cdcc72aee535679"}, - {file = "shapely-2.0.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:705efbce1950a31a55b1daa9c6ae1c34f1296de71ca8427974ec2f27d57554e3"}, - {file = "shapely-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:601c5c0058a6192df704cb889439f64994708563f57f99574798721e9777a44b"}, - {file = "shapely-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f24ecbb90a45c962b3b60d8d9a387272ed50dc010bfe605f1d16dfc94772d8a1"}, - {file = "shapely-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8c2a2989222c6062f7a0656e16276c01bb308bc7e5d999e54bf4e294ce62e76"}, - {file = "shapely-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42bceb9bceb3710a774ce04908fda0f28b291323da2688f928b3f213373b5aee"}, - {file = "shapely-2.0.3-cp38-cp38-win32.whl", hash = "sha256:54d925c9a311e4d109ec25f6a54a8bd92cc03481a34ae1a6a92c1fe6729b7e01"}, - {file = "shapely-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:300d203b480a4589adefff4c4af0b13919cd6d760ba3cbb1e56275210f96f654"}, - {file = "shapely-2.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:083d026e97b6c1f4a9bd2a9171c7692461092ed5375218170d91705550eecfd5"}, - {file = "shapely-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:27b6e1910094d93e9627f2664121e0e35613262fc037051680a08270f6058daf"}, - {file = "shapely-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:71b2de56a9e8c0e5920ae5ddb23b923490557ac50cb0b7fa752761bf4851acde"}, - {file = "shapely-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d279e56bbb68d218d63f3efc80c819cedcceef0e64efbf058a1df89dc57201b"}, - {file = "shapely-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88566d01a30f0453f7d038db46bc83ce125e38e47c5f6bfd4c9c287010e9bf74"}, - {file = "shapely-2.0.3-cp39-cp39-win32.whl", hash = "sha256:58afbba12c42c6ed44c4270bc0e22f3dadff5656d711b0ad335c315e02d04707"}, - {file = "shapely-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:5026b30433a70911979d390009261b8c4021ff87c7c3cbd825e62bb2ffa181bc"}, - {file = "shapely-2.0.3.tar.gz", hash = "sha256:4d65d0aa7910af71efa72fd6447e02a8e5dd44da81a983de9d736d6e6ccbe674"}, + {file = "simple-websocket-1.0.0.tar.gz", hash = "sha256:17d2c72f4a2bd85174a97e3e4c88b01c40c3f81b7b648b0cc3ce1305968928c8"}, + {file = "simple_websocket-1.0.0-py3-none-any.whl", hash = "sha256:1d5bf585e415eaa2083e2bcf02a3ecf91f9712e7b3e6b9fa0b461ad04e0837bc"}, ] [package.dependencies] -numpy = ">=1.14,<2" +wsproto = "*" [package.extras] -docs = ["matplotlib", "numpydoc (==1.1.*)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"] -test = ["pytest", "pytest-cov"] +docs = ["sphinx"] [[package]] name = "six" @@ -7705,7 +8538,6 @@ files = [ [package.dependencies] anyio = ">=3.4.0,<5" -typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} [package.extras] full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] @@ -7742,6 +8574,17 @@ docs = ["myst-parser[linkify]", "sphinx", "sphinx-rtd-theme"] release = ["twine"] test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"] +[[package]] +name = "striprtf" +version = "0.0.26" +description = "A simple library to convert rtf to text" +optional = false +python-versions = "*" +files = [ + {file = "striprtf-0.0.26-py3-none-any.whl", hash = "sha256:8c8f9d32083cdc2e8bfb149455aa1cc5a4e0a035893bedc75db8b73becb3a1bb"}, + {file = "striprtf-0.0.26.tar.gz", hash = "sha256:fdb2bba7ac440072d1c41eab50d8d74ae88f60a8b6575c6e2c7805dc462093aa"}, +] + [[package]] name = "supabase" version = "2.4.1" @@ -7844,47 +8687,47 @@ files = [ [[package]] name = "tiktoken" -version = "0.6.0" +version = "0.5.2" description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" optional = false python-versions = ">=3.8" files = [ - {file = "tiktoken-0.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:277de84ccd8fa12730a6b4067456e5cf72fef6300bea61d506c09e45658d41ac"}, - {file = "tiktoken-0.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c44433f658064463650d61387623735641dcc4b6c999ca30bc0f8ba3fccaf5c"}, - {file = "tiktoken-0.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afb9a2a866ae6eef1995ab656744287a5ac95acc7e0491c33fad54d053288ad3"}, - {file = "tiktoken-0.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c62c05b3109fefca26fedb2820452a050074ad8e5ad9803f4652977778177d9f"}, - {file = "tiktoken-0.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ef917fad0bccda07bfbad835525bbed5f3ab97a8a3e66526e48cdc3e7beacf7"}, - {file = "tiktoken-0.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e095131ab6092d0769a2fda85aa260c7c383072daec599ba9d8b149d2a3f4d8b"}, - {file = "tiktoken-0.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:05b344c61779f815038292a19a0c6eb7098b63c8f865ff205abb9ea1b656030e"}, - {file = "tiktoken-0.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cefb9870fb55dca9e450e54dbf61f904aab9180ff6fe568b61f4db9564e78871"}, - {file = "tiktoken-0.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:702950d33d8cabc039845674107d2e6dcabbbb0990ef350f640661368df481bb"}, - {file = "tiktoken-0.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8d49d076058f23254f2aff9af603863c5c5f9ab095bc896bceed04f8f0b013a"}, - {file = "tiktoken-0.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:430bc4e650a2d23a789dc2cdca3b9e5e7eb3cd3935168d97d43518cbb1f9a911"}, - {file = "tiktoken-0.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:293cb8669757301a3019a12d6770bd55bec38a4d3ee9978ddbe599d68976aca7"}, - {file = "tiktoken-0.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7bd1a288b7903aadc054b0e16ea78e3171f70b670e7372432298c686ebf9dd47"}, - {file = "tiktoken-0.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac76e000183e3b749634968a45c7169b351e99936ef46f0d2353cd0d46c3118d"}, - {file = "tiktoken-0.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17cc8a4a3245ab7d935c83a2db6bb71619099d7284b884f4b2aea4c74f2f83e3"}, - {file = "tiktoken-0.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:284aebcccffe1bba0d6571651317df6a5b376ff6cfed5aeb800c55df44c78177"}, - {file = "tiktoken-0.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c1a3a5d33846f8cd9dd3b7897c1d45722f48625a587f8e6f3d3e85080559be8"}, - {file = "tiktoken-0.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6318b2bb2337f38ee954fd5efa82632c6e5ced1d52a671370fa4b2eff1355e91"}, - {file = "tiktoken-0.6.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f5f0f2ed67ba16373f9a6013b68da298096b27cd4e1cf276d2d3868b5c7efd1"}, - {file = "tiktoken-0.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:75af4c0b16609c2ad02581f3cdcd1fb698c7565091370bf6c0cf8624ffaba6dc"}, - {file = "tiktoken-0.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:45577faf9a9d383b8fd683e313cf6df88b6076c034f0a16da243bb1c139340c3"}, - {file = "tiktoken-0.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7c1492ab90c21ca4d11cef3a236ee31a3e279bb21b3fc5b0e2210588c4209e68"}, - {file = "tiktoken-0.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e2b380c5b7751272015400b26144a2bab4066ebb8daae9c3cd2a92c3b508fe5a"}, - {file = "tiktoken-0.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9f497598b9f58c99cbc0eb764b4a92272c14d5203fc713dd650b896a03a50ad"}, - {file = "tiktoken-0.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e65e8bd6f3f279d80f1e1fbd5f588f036b9a5fa27690b7f0cc07021f1dfa0839"}, - {file = "tiktoken-0.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5f1495450a54e564d236769d25bfefbf77727e232d7a8a378f97acddee08c1ae"}, - {file = "tiktoken-0.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6c4e4857d99f6fb4670e928250835b21b68c59250520a1941618b5b4194e20c3"}, - {file = "tiktoken-0.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:168d718f07a39b013032741867e789971346df8e89983fe3c0ef3fbd5a0b1cb9"}, - {file = "tiktoken-0.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:47fdcfe11bd55376785a6aea8ad1db967db7f66ea81aed5c43fad497521819a4"}, - {file = "tiktoken-0.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fb7d2ccbf1a7784810aff6b80b4012fb42c6fc37eaa68cb3b553801a5cc2d1fc"}, - {file = "tiktoken-0.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ccb7a111ee76af5d876a729a347f8747d5ad548e1487eeea90eaf58894b3138"}, - {file = "tiktoken-0.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2048e1086b48e3c8c6e2ceeac866561374cd57a84622fa49a6b245ffecb7744"}, - {file = "tiktoken-0.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:07f229a5eb250b6403a61200199cecf0aac4aa23c3ecc1c11c1ca002cbb8f159"}, - {file = "tiktoken-0.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:432aa3be8436177b0db5a2b3e7cc28fd6c693f783b2f8722539ba16a867d0c6a"}, - {file = "tiktoken-0.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:8bfe8a19c8b5c40d121ee7938cd9c6a278e5b97dc035fd61714b4f0399d2f7a1"}, - {file = "tiktoken-0.6.0.tar.gz", hash = "sha256:ace62a4ede83c75b0374a2ddfa4b76903cf483e9cb06247f566be3bf14e6beed"}, + {file = "tiktoken-0.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c4e654282ef05ec1bd06ead22141a9a1687991cef2c6a81bdd1284301abc71d"}, + {file = "tiktoken-0.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7b3134aa24319f42c27718c6967f3c1916a38a715a0fa73d33717ba121231307"}, + {file = "tiktoken-0.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6092e6e77730929c8c6a51bb0d7cfdf1b72b63c4d033d6258d1f2ee81052e9e5"}, + {file = "tiktoken-0.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ad8ae2a747622efae75837abba59be6c15a8f31b4ac3c6156bc56ec7a8e631"}, + {file = "tiktoken-0.5.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51cba7c8711afa0b885445f0637f0fcc366740798c40b981f08c5f984e02c9d1"}, + {file = "tiktoken-0.5.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3d8c7d2c9313f8e92e987d585ee2ba0f7c40a0de84f4805b093b634f792124f5"}, + {file = "tiktoken-0.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:692eca18c5fd8d1e0dde767f895c17686faaa102f37640e884eecb6854e7cca7"}, + {file = "tiktoken-0.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:138d173abbf1ec75863ad68ca289d4da30caa3245f3c8d4bfb274c4d629a2f77"}, + {file = "tiktoken-0.5.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7388fdd684690973fdc450b47dfd24d7f0cbe658f58a576169baef5ae4658607"}, + {file = "tiktoken-0.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a114391790113bcff670c70c24e166a841f7ea8f47ee2fe0e71e08b49d0bf2d4"}, + {file = "tiktoken-0.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca96f001e69f6859dd52926d950cfcc610480e920e576183497ab954e645e6ac"}, + {file = "tiktoken-0.5.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:15fed1dd88e30dfadcdd8e53a8927f04e1f6f81ad08a5ca824858a593ab476c7"}, + {file = "tiktoken-0.5.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:93f8e692db5756f7ea8cb0cfca34638316dcf0841fb8469de8ed7f6a015ba0b0"}, + {file = "tiktoken-0.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:bcae1c4c92df2ffc4fe9f475bf8148dbb0ee2404743168bbeb9dcc4b79dc1fdd"}, + {file = "tiktoken-0.5.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b76a1e17d4eb4357d00f0622d9a48ffbb23401dcf36f9716d9bd9c8e79d421aa"}, + {file = "tiktoken-0.5.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:01d8b171bb5df4035580bc26d4f5339a6fd58d06f069091899d4a798ea279d3e"}, + {file = "tiktoken-0.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42adf7d4fb1ed8de6e0ff2e794a6a15005f056a0d83d22d1d6755a39bffd9e7f"}, + {file = "tiktoken-0.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3f894dbe0adb44609f3d532b8ea10820d61fdcb288b325a458dfc60fefb7db"}, + {file = "tiktoken-0.5.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:58ccfddb4e62f0df974e8f7e34a667981d9bb553a811256e617731bf1d007d19"}, + {file = "tiktoken-0.5.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58902a8bad2de4268c2a701f1c844d22bfa3cbcc485b10e8e3e28a050179330b"}, + {file = "tiktoken-0.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:5e39257826d0647fcac403d8fa0a474b30d02ec8ffc012cfaf13083e9b5e82c5"}, + {file = "tiktoken-0.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bde3b0fbf09a23072d39c1ede0e0821f759b4fa254a5f00078909158e90ae1f"}, + {file = "tiktoken-0.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2ddee082dcf1231ccf3a591d234935e6acf3e82ee28521fe99af9630bc8d2a60"}, + {file = "tiktoken-0.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35c057a6a4e777b5966a7540481a75a31429fc1cb4c9da87b71c8b75b5143037"}, + {file = "tiktoken-0.5.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c4a049b87e28f1dc60509f8eb7790bc8d11f9a70d99b9dd18dfdd81a084ffe6"}, + {file = "tiktoken-0.5.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5bf5ce759089f4f6521ea6ed89d8f988f7b396e9f4afb503b945f5c949c6bec2"}, + {file = "tiktoken-0.5.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0c964f554af1a96884e01188f480dad3fc224c4bbcf7af75d4b74c4b74ae0125"}, + {file = "tiktoken-0.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:368dd5726d2e8788e47ea04f32e20f72a2012a8a67af5b0b003d1e059f1d30a3"}, + {file = "tiktoken-0.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a2deef9115b8cd55536c0a02c0203512f8deb2447f41585e6d929a0b878a0dd2"}, + {file = "tiktoken-0.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2ed7d380195affbf886e2f8b92b14edfe13f4768ff5fc8de315adba5b773815e"}, + {file = "tiktoken-0.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c76fce01309c8140ffe15eb34ded2bb94789614b7d1d09e206838fc173776a18"}, + {file = "tiktoken-0.5.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60a5654d6a2e2d152637dd9a880b4482267dfc8a86ccf3ab1cec31a8c76bfae8"}, + {file = "tiktoken-0.5.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:41d4d3228e051b779245a8ddd21d4336f8975563e92375662f42d05a19bdff41"}, + {file = "tiktoken-0.5.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c1cdec2c92fcde8c17a50814b525ae6a88e8e5b02030dc120b76e11db93f13"}, + {file = "tiktoken-0.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:84ddb36faedb448a50b246e13d1b6ee3437f60b7169b723a4b2abad75e914f3e"}, + {file = "tiktoken-0.5.2.tar.gz", hash = "sha256:f54c581f134a8ea96ce2023ab221d4d4d81ab614efa0b2fbce926387deb56c80"}, ] [package.dependencies] @@ -8021,6 +8864,17 @@ dev = ["tokenizers[testing]"] docs = ["setuptools_rust", "sphinx", "sphinx_rtd_theme"] testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + [[package]] name = "tomli" version = "2.0.1" @@ -8147,13 +9001,13 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, [[package]] name = "transformers" -version = "4.39.2" +version = "4.39.3" description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" optional = true python-versions = ">=3.8.0" files = [ - {file = "transformers-4.39.2-py3-none-any.whl", hash = "sha256:8388a4ae1d91ade935f5c5b36dc47aa1a352b092c30595e3337b49a5f7e71b4e"}, - {file = "transformers-4.39.2.tar.gz", hash = "sha256:be0c7392cb92ab48efab2656f1cfd1cbda33b2b8a2917a18bd1196707dbebe14"}, + {file = "transformers-4.39.3-py3-none-any.whl", hash = "sha256:7838034a12cca3168247f9d2d1dba6724c9de3ae0f73a108258c6b8fc5912601"}, + {file = "transformers-4.39.3.tar.gz", hash = "sha256:2586e5ff4150f122716fc40f5530e92871befc051848fbe82600969c535b762d"}, ] [package.dependencies] @@ -8292,24 +9146,24 @@ files = [ [[package]] name = "types-pillow" -version = "10.2.0.20240324" +version = "10.2.0.20240331" description = "Typing stubs for Pillow" optional = false python-versions = ">=3.8" files = [ - {file = "types-Pillow-10.2.0.20240324.tar.gz", hash = "sha256:e0108f0b30ea926a3a5d00f201cde627cde1574181b586eb36dd6be1e4ba09cf"}, - {file = "types_Pillow-10.2.0.20240324-py3-none-any.whl", hash = "sha256:e0ac3b50ade911b2a238b6633cc6be9559a6f8bc54141db6ad13b70989fd3d99"}, + {file = "types-Pillow-10.2.0.20240331.tar.gz", hash = "sha256:c604e39aeb70719e463d359157a2238bfc0e81bf9c658375703f7b98cfd607da"}, + {file = "types_Pillow-10.2.0.20240331-py3-none-any.whl", hash = "sha256:ceec177ab78d9d7c110b8efdb592298ae9ae3bb4da90f7de28b99818dfbe5179"}, ] [[package]] name = "types-pyasn1" -version = "0.5.0.20240301" +version = "0.6.0.20240402" description = "Typing stubs for pyasn1" optional = false python-versions = ">=3.8" files = [ - {file = "types-pyasn1-0.5.0.20240301.tar.gz", hash = "sha256:da328f5771d54a2016863270b281047f9cc38e39f65a297ba9f987d5de3403f1"}, - {file = "types_pyasn1-0.5.0.20240301-py3-none-any.whl", hash = "sha256:d9989899184bbd6e2adf6f812c8f49c48197fceea251a6fb13666dae3203f80d"}, + {file = "types-pyasn1-0.6.0.20240402.tar.gz", hash = "sha256:5d54dcb33f69dd269071ca098e923ac20c5f03c814631fa7f3ed9ee035a5da3a"}, + {file = "types_pyasn1-0.6.0.20240402-py3-none-any.whl", hash = "sha256:848d01e7313c200acc035a8b3d377fe7b2aecbe77f2be49eb160a7f82835aaaf"}, ] [[package]] @@ -8353,13 +9207,13 @@ files = [ [[package]] name = "types-pywin32" -version = "306.0.0.20240319" +version = "306.0.0.20240331" description = "Typing stubs for pywin32" optional = false python-versions = ">=3.8" files = [ - {file = "types-pywin32-306.0.0.20240319.tar.gz", hash = "sha256:a578ba15167a7a3a2bca0b307a401650d2f6b7bb58b2215d3ad9a6584b7baea5"}, - {file = "types_pywin32-306.0.0.20240319-py3-none-any.whl", hash = "sha256:acaa5da80f1b875115da8af2836c75c3cee4b45a2dd19e0fcac23dd011e2387b"}, + {file = "types-pywin32-306.0.0.20240331.tar.gz", hash = "sha256:3427e60ffbbc47b31e6bc416c3e4c37d579e6774e5152dab953589c9b64c4d58"}, + {file = "types_pywin32-306.0.0.20240331-py3-none-any.whl", hash = "sha256:844b90253f3da38e254c9ce199ba41f63da52c511003daedcda414e88f1cf319"}, ] [[package]] @@ -8390,28 +9244,17 @@ types-pyOpenSSL = "*" [[package]] name = "types-requests" -version = "2.31.0.6" +version = "2.31.0.20240403" description = "Typing stubs for requests" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "types-requests-2.31.0.6.tar.gz", hash = "sha256:cd74ce3b53c461f1228a9b783929ac73a666658f223e28ed29753771477b3bd0"}, - {file = "types_requests-2.31.0.6-py3-none-any.whl", hash = "sha256:a2db9cb228a81da8348b49ad6db3f5519452dd20a9c1e1a868c83c5fe88fd1a9"}, + {file = "types-requests-2.31.0.20240403.tar.gz", hash = "sha256:e1e0cd0b655334f39d9f872b68a1310f0e343647688bf2cee932ec4c2b04de59"}, + {file = "types_requests-2.31.0.20240403-py3-none-any.whl", hash = "sha256:06abf6a68f5c4f2a62f6bb006672dfb26ed50ccbfddb281e1ee6f09a65707d5d"}, ] [package.dependencies] -types-urllib3 = "*" - -[[package]] -name = "types-urllib3" -version = "1.26.25.14" -description = "Typing stubs for urllib3" -optional = false -python-versions = "*" -files = [ - {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"}, - {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"}, -] +urllib3 = ">=2" [[package]] name = "typing-extensions" @@ -8467,6 +9310,80 @@ tzdata = {version = "*", markers = "platform_system == \"Windows\""} [package.extras] devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] +[[package]] +name = "ujson" +version = "5.9.0" +description = "Ultra fast JSON encoder and decoder for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ujson-5.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ab71bf27b002eaf7d047c54a68e60230fbd5cd9da60de7ca0aa87d0bccead8fa"}, + {file = "ujson-5.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a365eac66f5aa7a7fdf57e5066ada6226700884fc7dce2ba5483538bc16c8c5"}, + {file = "ujson-5.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e015122b337858dba5a3dc3533af2a8fc0410ee9e2374092f6a5b88b182e9fcc"}, + {file = "ujson-5.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:779a2a88c53039bebfbccca934430dabb5c62cc179e09a9c27a322023f363e0d"}, + {file = "ujson-5.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10ca3c41e80509fd9805f7c149068fa8dbee18872bbdc03d7cca928926a358d5"}, + {file = "ujson-5.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a566e465cb2fcfdf040c2447b7dd9718799d0d90134b37a20dff1e27c0e9096"}, + {file = "ujson-5.9.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f833c529e922577226a05bc25b6a8b3eb6c4fb155b72dd88d33de99d53113124"}, + {file = "ujson-5.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b68a0caab33f359b4cbbc10065c88e3758c9f73a11a65a91f024b2e7a1257106"}, + {file = "ujson-5.9.0-cp310-cp310-win32.whl", hash = "sha256:7cc7e605d2aa6ae6b7321c3ae250d2e050f06082e71ab1a4200b4ae64d25863c"}, + {file = "ujson-5.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:a6d3f10eb8ccba4316a6b5465b705ed70a06011c6f82418b59278fbc919bef6f"}, + {file = "ujson-5.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b23bbb46334ce51ddb5dded60c662fbf7bb74a37b8f87221c5b0fec1ec6454b"}, + {file = "ujson-5.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6974b3a7c17bbf829e6c3bfdc5823c67922e44ff169851a755eab79a3dd31ec0"}, + {file = "ujson-5.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5964ea916edfe24af1f4cc68488448fbb1ec27a3ddcddc2b236da575c12c8ae"}, + {file = "ujson-5.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ba7cac47dd65ff88571eceeff48bf30ed5eb9c67b34b88cb22869b7aa19600d"}, + {file = "ujson-5.9.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bbd91a151a8f3358c29355a491e915eb203f607267a25e6ab10531b3b157c5e"}, + {file = "ujson-5.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:829a69d451a49c0de14a9fecb2a2d544a9b2c884c2b542adb243b683a6f15908"}, + {file = "ujson-5.9.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a807ae73c46ad5db161a7e883eec0fbe1bebc6a54890152ccc63072c4884823b"}, + {file = "ujson-5.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8fc2aa18b13d97b3c8ccecdf1a3c405f411a6e96adeee94233058c44ff92617d"}, + {file = "ujson-5.9.0-cp311-cp311-win32.whl", hash = "sha256:70e06849dfeb2548be48fdd3ceb53300640bc8100c379d6e19d78045e9c26120"}, + {file = "ujson-5.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:7309d063cd392811acc49b5016728a5e1b46ab9907d321ebbe1c2156bc3c0b99"}, + {file = "ujson-5.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:20509a8c9f775b3a511e308bbe0b72897ba6b800767a7c90c5cca59d20d7c42c"}, + {file = "ujson-5.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b28407cfe315bd1b34f1ebe65d3bd735d6b36d409b334100be8cdffae2177b2f"}, + {file = "ujson-5.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d302bd17989b6bd90d49bade66943c78f9e3670407dbc53ebcf61271cadc399"}, + {file = "ujson-5.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f21315f51e0db8ee245e33a649dd2d9dce0594522de6f278d62f15f998e050e"}, + {file = "ujson-5.9.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5635b78b636a54a86fdbf6f027e461aa6c6b948363bdf8d4fbb56a42b7388320"}, + {file = "ujson-5.9.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:82b5a56609f1235d72835ee109163c7041b30920d70fe7dac9176c64df87c164"}, + {file = "ujson-5.9.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5ca35f484622fd208f55041b042d9d94f3b2c9c5add4e9af5ee9946d2d30db01"}, + {file = "ujson-5.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:829b824953ebad76d46e4ae709e940bb229e8999e40881338b3cc94c771b876c"}, + {file = "ujson-5.9.0-cp312-cp312-win32.whl", hash = "sha256:25fa46e4ff0a2deecbcf7100af3a5d70090b461906f2299506485ff31d9ec437"}, + {file = "ujson-5.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:60718f1720a61560618eff3b56fd517d107518d3c0160ca7a5a66ac949c6cf1c"}, + {file = "ujson-5.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d581db9db9e41d8ea0b2705c90518ba623cbdc74f8d644d7eb0d107be0d85d9c"}, + {file = "ujson-5.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ff741a5b4be2d08fceaab681c9d4bc89abf3c9db600ab435e20b9b6d4dfef12e"}, + {file = "ujson-5.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdcb02cabcb1e44381221840a7af04433c1dc3297af76fde924a50c3054c708c"}, + {file = "ujson-5.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e208d3bf02c6963e6ef7324dadf1d73239fb7008491fdf523208f60be6437402"}, + {file = "ujson-5.9.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4b3917296630a075e04d3d07601ce2a176479c23af838b6cf90a2d6b39b0d95"}, + {file = "ujson-5.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0c4d6adb2c7bb9eb7c71ad6f6f612e13b264942e841f8cc3314a21a289a76c4e"}, + {file = "ujson-5.9.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0b159efece9ab5c01f70b9d10bbb77241ce111a45bc8d21a44c219a2aec8ddfd"}, + {file = "ujson-5.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0cb4a7814940ddd6619bdce6be637a4b37a8c4760de9373bac54bb7b229698b"}, + {file = "ujson-5.9.0-cp38-cp38-win32.whl", hash = "sha256:dc80f0f5abf33bd7099f7ac94ab1206730a3c0a2d17549911ed2cb6b7aa36d2d"}, + {file = "ujson-5.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:506a45e5fcbb2d46f1a51fead991c39529fc3737c0f5d47c9b4a1d762578fc30"}, + {file = "ujson-5.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d0fd2eba664a22447102062814bd13e63c6130540222c0aa620701dd01f4be81"}, + {file = "ujson-5.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bdf7fc21a03bafe4ba208dafa84ae38e04e5d36c0e1c746726edf5392e9f9f36"}, + {file = "ujson-5.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2f909bc08ce01f122fd9c24bc6f9876aa087188dfaf3c4116fe6e4daf7e194f"}, + {file = "ujson-5.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd4ea86c2afd41429751d22a3ccd03311c067bd6aeee2d054f83f97e41e11d8f"}, + {file = "ujson-5.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:63fb2e6599d96fdffdb553af0ed3f76b85fda63281063f1cb5b1141a6fcd0617"}, + {file = "ujson-5.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:32bba5870c8fa2a97f4a68f6401038d3f1922e66c34280d710af00b14a3ca562"}, + {file = "ujson-5.9.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:37ef92e42535a81bf72179d0e252c9af42a4ed966dc6be6967ebfb929a87bc60"}, + {file = "ujson-5.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f69f16b8f1c69da00e38dc5f2d08a86b0e781d0ad3e4cc6a13ea033a439c4844"}, + {file = "ujson-5.9.0-cp39-cp39-win32.whl", hash = "sha256:3382a3ce0ccc0558b1c1668950008cece9bf463ebb17463ebf6a8bfc060dae34"}, + {file = "ujson-5.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:6adef377ed583477cf005b58c3025051b5faa6b8cc25876e594afbb772578f21"}, + {file = "ujson-5.9.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ffdfebd819f492e48e4f31c97cb593b9c1a8251933d8f8972e81697f00326ff1"}, + {file = "ujson-5.9.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4eec2ddc046360d087cf35659c7ba0cbd101f32035e19047013162274e71fcf"}, + {file = "ujson-5.9.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbb90aa5c23cb3d4b803c12aa220d26778c31b6e4b7a13a1f49971f6c7d088e"}, + {file = "ujson-5.9.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba0823cb70866f0d6a4ad48d998dd338dce7314598721bc1b7986d054d782dfd"}, + {file = "ujson-5.9.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:4e35d7885ed612feb6b3dd1b7de28e89baaba4011ecdf995e88be9ac614765e9"}, + {file = "ujson-5.9.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b048aa93eace8571eedbd67b3766623e7f0acbf08ee291bef7d8106210432427"}, + {file = "ujson-5.9.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:323279e68c195110ef85cbe5edce885219e3d4a48705448720ad925d88c9f851"}, + {file = "ujson-5.9.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ac92d86ff34296f881e12aa955f7014d276895e0e4e868ba7fddebbde38e378"}, + {file = "ujson-5.9.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6eecbd09b316cea1fd929b1e25f70382917542ab11b692cb46ec9b0a26c7427f"}, + {file = "ujson-5.9.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:473fb8dff1d58f49912323d7cb0859df5585cfc932e4b9c053bf8cf7f2d7c5c4"}, + {file = "ujson-5.9.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f91719c6abafe429c1a144cfe27883eace9fb1c09a9c5ef1bcb3ae80a3076a4e"}, + {file = "ujson-5.9.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1c0991c4fe256f5fdb19758f7eac7f47caac29a6c57d0de16a19048eb86bad"}, + {file = "ujson-5.9.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ea0f55a1396708e564595aaa6696c0d8af532340f477162ff6927ecc46e21"}, + {file = "ujson-5.9.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:07e0cfdde5fd91f54cd2d7ffb3482c8ff1bf558abf32a8b953a5d169575ae1cd"}, + {file = "ujson-5.9.0.tar.gz", hash = "sha256:89cc92e73d5501b8a7f48575eeb14ad27156ad092c2e9fc7e3cf949f07e75532"}, +] + [[package]] name = "unstructured" version = "0.12.5" @@ -8487,6 +9404,7 @@ emoji = "*" filetype = "*" langdetect = "*" lxml = "*" +markdown = {version = "*", optional = true, markers = "extra == \"md\""} nltk = "*" numpy = "*" python-iso639 = "*" @@ -8603,19 +9521,20 @@ files = [ [[package]] name = "urllib3" -version = "1.26.18" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.8" files = [ - {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, - {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] -brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" @@ -8857,91 +9776,94 @@ test = ["websockets"] [[package]] name = "websockets" -version = "10.4" +version = "12.0" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "websockets-10.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d58804e996d7d2307173d56c297cf7bc132c52df27a3efaac5e8d43e36c21c48"}, - {file = "websockets-10.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc0b82d728fe21a0d03e65f81980abbbcb13b5387f733a1a870672c5be26edab"}, - {file = "websockets-10.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ba089c499e1f4155d2a3c2a05d2878a3428cf321c848f2b5a45ce55f0d7d310c"}, - {file = "websockets-10.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33d69ca7612f0ddff3316b0c7b33ca180d464ecac2d115805c044bf0a3b0d032"}, - {file = "websockets-10.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62e627f6b6d4aed919a2052efc408da7a545c606268d5ab5bfab4432734b82b4"}, - {file = "websockets-10.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ea7b82bfcae927eeffc55d2ffa31665dc7fec7b8dc654506b8e5a518eb4d50"}, - {file = "websockets-10.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e0cb5cc6ece6ffa75baccfd5c02cffe776f3f5c8bf486811f9d3ea3453676ce8"}, - {file = "websockets-10.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae5e95cfb53ab1da62185e23b3130e11d64431179debac6dc3c6acf08760e9b1"}, - {file = "websockets-10.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7c584f366f46ba667cfa66020344886cf47088e79c9b9d39c84ce9ea98aaa331"}, - {file = "websockets-10.4-cp310-cp310-win32.whl", hash = "sha256:b029fb2032ae4724d8ae8d4f6b363f2cc39e4c7b12454df8df7f0f563ed3e61a"}, - {file = "websockets-10.4-cp310-cp310-win_amd64.whl", hash = "sha256:8dc96f64ae43dde92530775e9cb169979f414dcf5cff670455d81a6823b42089"}, - {file = "websockets-10.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:47a2964021f2110116cc1125b3e6d87ab5ad16dea161949e7244ec583b905bb4"}, - {file = "websockets-10.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e789376b52c295c4946403bd0efecf27ab98f05319df4583d3c48e43c7342c2f"}, - {file = "websockets-10.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7d3f0b61c45c3fa9a349cf484962c559a8a1d80dae6977276df8fd1fa5e3cb8c"}, - {file = "websockets-10.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f55b5905705725af31ccef50e55391621532cd64fbf0bc6f4bac935f0fccec46"}, - {file = "websockets-10.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00c870522cdb69cd625b93f002961ffb0c095394f06ba8c48f17eef7c1541f96"}, - {file = "websockets-10.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f38706e0b15d3c20ef6259fd4bc1700cd133b06c3c1bb108ffe3f8947be15fa"}, - {file = "websockets-10.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f2c38d588887a609191d30e902df2a32711f708abfd85d318ca9b367258cfd0c"}, - {file = "websockets-10.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fe10ddc59b304cb19a1bdf5bd0a7719cbbc9fbdd57ac80ed436b709fcf889106"}, - {file = "websockets-10.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:90fcf8929836d4a0e964d799a58823547df5a5e9afa83081761630553be731f9"}, - {file = "websockets-10.4-cp311-cp311-win32.whl", hash = "sha256:b9968694c5f467bf67ef97ae7ad4d56d14be2751000c1207d31bf3bb8860bae8"}, - {file = "websockets-10.4-cp311-cp311-win_amd64.whl", hash = "sha256:a7a240d7a74bf8d5cb3bfe6be7f21697a28ec4b1a437607bae08ac7acf5b4882"}, - {file = "websockets-10.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:74de2b894b47f1d21cbd0b37a5e2b2392ad95d17ae983e64727e18eb281fe7cb"}, - {file = "websockets-10.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3a686ecb4aa0d64ae60c9c9f1a7d5d46cab9bfb5d91a2d303d00e2cd4c4c5cc"}, - {file = "websockets-10.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d15c968ea7a65211e084f523151dbf8ae44634de03c801b8bd070b74e85033"}, - {file = "websockets-10.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00213676a2e46b6ebf6045bc11d0f529d9120baa6f58d122b4021ad92adabd41"}, - {file = "websockets-10.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e23173580d740bf8822fd0379e4bf30aa1d5a92a4f252d34e893070c081050df"}, - {file = "websockets-10.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:dd500e0a5e11969cdd3320935ca2ff1e936f2358f9c2e61f100a1660933320ea"}, - {file = "websockets-10.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4239b6027e3d66a89446908ff3027d2737afc1a375f8fd3eea630a4842ec9a0c"}, - {file = "websockets-10.4-cp37-cp37m-win32.whl", hash = "sha256:8a5cc00546e0a701da4639aa0bbcb0ae2bb678c87f46da01ac2d789e1f2d2038"}, - {file = "websockets-10.4-cp37-cp37m-win_amd64.whl", hash = "sha256:a9f9a735deaf9a0cadc2d8c50d1a5bcdbae8b6e539c6e08237bc4082d7c13f28"}, - {file = "websockets-10.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5c1289596042fad2cdceb05e1ebf7aadf9995c928e0da2b7a4e99494953b1b94"}, - {file = "websockets-10.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0cff816f51fb33c26d6e2b16b5c7d48eaa31dae5488ace6aae468b361f422b63"}, - {file = "websockets-10.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dd9becd5fe29773d140d68d607d66a38f60e31b86df75332703757ee645b6faf"}, - {file = "websockets-10.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45ec8e75b7dbc9539cbfafa570742fe4f676eb8b0d3694b67dabe2f2ceed8aa6"}, - {file = "websockets-10.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f72e5cd0f18f262f5da20efa9e241699e0cf3a766317a17392550c9ad7b37d8"}, - {file = "websockets-10.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:185929b4808b36a79c65b7865783b87b6841e852ef5407a2fb0c03381092fa3b"}, - {file = "websockets-10.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7d27a7e34c313b3a7f91adcd05134315002aaf8540d7b4f90336beafaea6217c"}, - {file = "websockets-10.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:884be66c76a444c59f801ac13f40c76f176f1bfa815ef5b8ed44321e74f1600b"}, - {file = "websockets-10.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:931c039af54fc195fe6ad536fde4b0de04da9d5916e78e55405436348cfb0e56"}, - {file = "websockets-10.4-cp38-cp38-win32.whl", hash = "sha256:db3c336f9eda2532ec0fd8ea49fef7a8df8f6c804cdf4f39e5c5c0d4a4ad9a7a"}, - {file = "websockets-10.4-cp38-cp38-win_amd64.whl", hash = "sha256:48c08473563323f9c9debac781ecf66f94ad5a3680a38fe84dee5388cf5acaf6"}, - {file = "websockets-10.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:40e826de3085721dabc7cf9bfd41682dadc02286d8cf149b3ad05bff89311e4f"}, - {file = "websockets-10.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:56029457f219ade1f2fc12a6504ea61e14ee227a815531f9738e41203a429112"}, - {file = "websockets-10.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f5fc088b7a32f244c519a048c170f14cf2251b849ef0e20cbbb0fdf0fdaf556f"}, - {file = "websockets-10.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fc8709c00704194213d45e455adc106ff9e87658297f72d544220e32029cd3d"}, - {file = "websockets-10.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0154f7691e4fe6c2b2bc275b5701e8b158dae92a1ab229e2b940efe11905dff4"}, - {file = "websockets-10.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c6d2264f485f0b53adf22697ac11e261ce84805c232ed5dbe6b1bcb84b00ff0"}, - {file = "websockets-10.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9bc42e8402dc5e9905fb8b9649f57efcb2056693b7e88faa8fb029256ba9c68c"}, - {file = "websockets-10.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:edc344de4dac1d89300a053ac973299e82d3db56330f3494905643bb68801269"}, - {file = "websockets-10.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:84bc2a7d075f32f6ed98652db3a680a17a4edb21ca7f80fe42e38753a58ee02b"}, - {file = "websockets-10.4-cp39-cp39-win32.whl", hash = "sha256:c94ae4faf2d09f7c81847c63843f84fe47bf6253c9d60b20f25edfd30fb12588"}, - {file = "websockets-10.4-cp39-cp39-win_amd64.whl", hash = "sha256:bbccd847aa0c3a69b5f691a84d2341a4f8a629c6922558f2a70611305f902d74"}, - {file = "websockets-10.4-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:82ff5e1cae4e855147fd57a2863376ed7454134c2bf49ec604dfe71e446e2193"}, - {file = "websockets-10.4-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d210abe51b5da0ffdbf7b43eed0cfdff8a55a1ab17abbec4301c9ff077dd0342"}, - {file = "websockets-10.4-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:942de28af58f352a6f588bc72490ae0f4ccd6dfc2bd3de5945b882a078e4e179"}, - {file = "websockets-10.4-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9b27d6c1c6cd53dc93614967e9ce00ae7f864a2d9f99fe5ed86706e1ecbf485"}, - {file = "websockets-10.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:3d3cac3e32b2c8414f4f87c1b2ab686fa6284a980ba283617404377cd448f631"}, - {file = "websockets-10.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:da39dd03d130162deb63da51f6e66ed73032ae62e74aaccc4236e30edccddbb0"}, - {file = "websockets-10.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:389f8dbb5c489e305fb113ca1b6bdcdaa130923f77485db5b189de343a179393"}, - {file = "websockets-10.4-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09a1814bb15eff7069e51fed0826df0bc0702652b5cb8f87697d469d79c23576"}, - {file = "websockets-10.4-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff64a1d38d156d429404aaa84b27305e957fd10c30e5880d1765c9480bea490f"}, - {file = "websockets-10.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b343f521b047493dc4022dd338fc6db9d9282658862756b4f6fd0e996c1380e1"}, - {file = "websockets-10.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:932af322458da7e4e35df32f050389e13d3d96b09d274b22a7aa1808f292fee4"}, - {file = "websockets-10.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6a4162139374a49eb18ef5b2f4da1dd95c994588f5033d64e0bbfda4b6b6fcf"}, - {file = "websockets-10.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c57e4c1349fbe0e446c9fa7b19ed2f8a4417233b6984277cce392819123142d3"}, - {file = "websockets-10.4-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b627c266f295de9dea86bd1112ed3d5fafb69a348af30a2422e16590a8ecba13"}, - {file = "websockets-10.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:05a7233089f8bd355e8cbe127c2e8ca0b4ea55467861906b80d2ebc7db4d6b72"}, - {file = "websockets-10.4.tar.gz", hash = "sha256:eef610b23933c54d5d921c92578ae5f89813438fded840c2e9809d378dc765d3"}, + {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, + {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, + {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, + {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, + {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, + {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, + {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, + {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, + {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, + {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, + {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, + {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, + {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, + {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, + {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, + {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, + {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, + {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, + {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, + {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, + {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, + {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, + {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, + {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, + {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, ] [[package]] name = "werkzeug" -version = "3.0.1" +version = "3.0.2" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" files = [ - {file = "werkzeug-3.0.1-py3-none-any.whl", hash = "sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10"}, - {file = "werkzeug-3.0.1.tar.gz", hash = "sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc"}, + {file = "werkzeug-3.0.2-py3-none-any.whl", hash = "sha256:3aac3f5da756f93030740bc235d3e09449efcf65f2f55e3602e1d851b8f48795"}, + {file = "werkzeug-3.0.2.tar.gz", hash = "sha256:e39b645a6ac92822588e7b39a692e7828724ceae0b0d702ef96701f90e70128d"}, ] [package.dependencies] @@ -9067,6 +9989,137 @@ files = [ {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] +[[package]] +name = "wsproto" +version = "1.2.0" +description = "WebSockets state-machine based protocol implementation" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, + {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, +] + +[package.dependencies] +h11 = ">=0.9.0,<1" + +[[package]] +name = "xxhash" +version = "3.4.1" +description = "Python binding for xxHash" +optional = false +python-versions = ">=3.7" +files = [ + {file = "xxhash-3.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:91dbfa55346ad3e18e738742236554531a621042e419b70ad8f3c1d9c7a16e7f"}, + {file = "xxhash-3.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:665a65c2a48a72068fcc4d21721510df5f51f1142541c890491afc80451636d2"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb11628470a6004dc71a09fe90c2f459ff03d611376c1debeec2d648f44cb693"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bef2a7dc7b4f4beb45a1edbba9b9194c60a43a89598a87f1a0226d183764189"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0f7b2d547d72c7eda7aa817acf8791f0146b12b9eba1d4432c531fb0352228"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00f2fdef6b41c9db3d2fc0e7f94cb3db86693e5c45d6de09625caad9a469635b"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23cfd9ca09acaf07a43e5a695143d9a21bf00f5b49b15c07d5388cadf1f9ce11"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6a9ff50a3cf88355ca4731682c168049af1ca222d1d2925ef7119c1a78e95b3b"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f1d7c69a1e9ca5faa75546fdd267f214f63f52f12692f9b3a2f6467c9e67d5e7"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:672b273040d5d5a6864a36287f3514efcd1d4b1b6a7480f294c4b1d1ee1b8de0"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4178f78d70e88f1c4a89ff1ffe9f43147185930bb962ee3979dba15f2b1cc799"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9804b9eb254d4b8cc83ab5a2002128f7d631dd427aa873c8727dba7f1f0d1c2b"}, + {file = "xxhash-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c09c49473212d9c87261d22c74370457cfff5db2ddfc7fd1e35c80c31a8c14ce"}, + {file = "xxhash-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ebbb1616435b4a194ce3466d7247df23499475c7ed4eb2681a1fa42ff766aff6"}, + {file = "xxhash-3.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:25dc66be3db54f8a2d136f695b00cfe88018e59ccff0f3b8f545869f376a8a46"}, + {file = "xxhash-3.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58c49083801885273e262c0f5bbeac23e520564b8357fbb18fb94ff09d3d3ea5"}, + {file = "xxhash-3.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b526015a973bfbe81e804a586b703f163861da36d186627e27524f5427b0d520"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36ad4457644c91a966f6fe137d7467636bdc51a6ce10a1d04f365c70d6a16d7e"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:248d3e83d119770f96003271fe41e049dd4ae52da2feb8f832b7a20e791d2920"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2070b6d5bbef5ee031666cf21d4953c16e92c2f8a24a94b5c240f8995ba3b1d0"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2746035f518f0410915e247877f7df43ef3372bf36cfa52cc4bc33e85242641"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ba6181514681c2591840d5632fcf7356ab287d4aff1c8dea20f3c78097088"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aac5010869240e95f740de43cd6a05eae180c59edd182ad93bf12ee289484fa"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4cb11d8debab1626181633d184b2372aaa09825bde709bf927704ed72765bed1"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b29728cff2c12f3d9f1d940528ee83918d803c0567866e062683f300d1d2eff3"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a15cbf3a9c40672523bdb6ea97ff74b443406ba0ab9bca10ceccd9546414bd84"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e66df260fed01ed8ea790c2913271641c58481e807790d9fca8bfd5a3c13844"}, + {file = "xxhash-3.4.1-cp311-cp311-win32.whl", hash = "sha256:e867f68a8f381ea12858e6d67378c05359d3a53a888913b5f7d35fbf68939d5f"}, + {file = "xxhash-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:200a5a3ad9c7c0c02ed1484a1d838b63edcf92ff538770ea07456a3732c577f4"}, + {file = "xxhash-3.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:1d03f1c0d16d24ea032e99f61c552cb2b77d502e545187338bea461fde253583"}, + {file = "xxhash-3.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c4bbba9b182697a52bc0c9f8ec0ba1acb914b4937cd4a877ad78a3b3eeabefb3"}, + {file = "xxhash-3.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9fd28a9da300e64e434cfc96567a8387d9a96e824a9be1452a1e7248b7763b78"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6066d88c9329ab230e18998daec53d819daeee99d003955c8db6fc4971b45ca3"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93805bc3233ad89abf51772f2ed3355097a5dc74e6080de19706fc447da99cd3"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64da57d5ed586ebb2ecdde1e997fa37c27fe32fe61a656b77fabbc58e6fbff6e"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a97322e9a7440bf3c9805cbaac090358b43f650516486746f7fa482672593df"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe750d512982ee7d831838a5dee9e9848f3fb440e4734cca3f298228cc957a6"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fd79d4087727daf4d5b8afe594b37d611ab95dc8e29fe1a7517320794837eb7d"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:743612da4071ff9aa4d055f3f111ae5247342931dedb955268954ef7201a71ff"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b41edaf05734092f24f48c0958b3c6cbaaa5b7e024880692078c6b1f8247e2fc"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a90356ead70d715fe64c30cd0969072de1860e56b78adf7c69d954b43e29d9fa"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ac56eebb364e44c85e1d9e9cc5f6031d78a34f0092fea7fc80478139369a8b4a"}, + {file = "xxhash-3.4.1-cp312-cp312-win32.whl", hash = "sha256:911035345932a153c427107397c1518f8ce456f93c618dd1c5b54ebb22e73747"}, + {file = "xxhash-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:f31ce76489f8601cc7b8713201ce94b4bd7b7ce90ba3353dccce7e9e1fee71fa"}, + {file = "xxhash-3.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:b5beb1c6a72fdc7584102f42c4d9df232ee018ddf806e8c90906547dfb43b2da"}, + {file = "xxhash-3.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6d42b24d1496deb05dee5a24ed510b16de1d6c866c626c2beb11aebf3be278b9"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b685fab18876b14a8f94813fa2ca80cfb5ab6a85d31d5539b7cd749ce9e3624"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419ffe34c17ae2df019a4685e8d3934d46b2e0bbe46221ab40b7e04ed9f11137"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e041ce5714f95251a88670c114b748bca3bf80cc72400e9f23e6d0d59cf2681"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc860d887c5cb2f524899fb8338e1bb3d5789f75fac179101920d9afddef284b"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:312eba88ffe0a05e332e3a6f9788b73883752be63f8588a6dc1261a3eaaaf2b2"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e01226b6b6a1ffe4e6bd6d08cfcb3ca708b16f02eb06dd44f3c6e53285f03e4f"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9f3025a0d5d8cf406a9313cd0d5789c77433ba2004b1c75439b67678e5136537"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:6d3472fd4afef2a567d5f14411d94060099901cd8ce9788b22b8c6f13c606a93"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:43984c0a92f06cac434ad181f329a1445017c33807b7ae4f033878d860a4b0f2"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a55e0506fdb09640a82ec4f44171273eeabf6f371a4ec605633adb2837b5d9d5"}, + {file = "xxhash-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:faec30437919555b039a8bdbaba49c013043e8f76c999670aef146d33e05b3a0"}, + {file = "xxhash-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:c9e1b646af61f1fc7083bb7b40536be944f1ac67ef5e360bca2d73430186971a"}, + {file = "xxhash-3.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:961d948b7b1c1b6c08484bbce3d489cdf153e4122c3dfb07c2039621243d8795"}, + {file = "xxhash-3.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:719a378930504ab159f7b8e20fa2aa1896cde050011af838af7e7e3518dd82de"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74fb5cb9406ccd7c4dd917f16630d2e5e8cbbb02fc2fca4e559b2a47a64f4940"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dab508ac39e0ab988039bc7f962c6ad021acd81fd29145962b068df4148c476"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c59f3e46e7daf4c589e8e853d700ef6607afa037bfad32c390175da28127e8c"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cc07256eff0795e0f642df74ad096f8c5d23fe66bc138b83970b50fc7f7f6c5"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9f749999ed80f3955a4af0eb18bb43993f04939350b07b8dd2f44edc98ffee9"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7688d7c02149a90a3d46d55b341ab7ad1b4a3f767be2357e211b4e893efbaaf6"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a8b4977963926f60b0d4f830941c864bed16aa151206c01ad5c531636da5708e"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8106d88da330f6535a58a8195aa463ef5281a9aa23b04af1848ff715c4398fb4"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4c76a77dbd169450b61c06fd2d5d436189fc8ab7c1571d39265d4822da16df22"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:11f11357c86d83e53719c592021fd524efa9cf024dc7cb1dfb57bbbd0d8713f2"}, + {file = "xxhash-3.4.1-cp38-cp38-win32.whl", hash = "sha256:0c786a6cd74e8765c6809892a0d45886e7c3dc54de4985b4a5eb8b630f3b8e3b"}, + {file = "xxhash-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:aabf37fb8fa27430d50507deeab2ee7b1bcce89910dd10657c38e71fee835594"}, + {file = "xxhash-3.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6127813abc1477f3a83529b6bbcfeddc23162cece76fa69aee8f6a8a97720562"}, + {file = "xxhash-3.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef2e194262f5db16075caea7b3f7f49392242c688412f386d3c7b07c7733a70a"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71be94265b6c6590f0018bbf73759d21a41c6bda20409782d8117e76cd0dfa8b"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10e0a619cdd1c0980e25eb04e30fe96cf8f4324758fa497080af9c21a6de573f"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa122124d2e3bd36581dd78c0efa5f429f5220313479fb1072858188bc2d5ff1"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17032f5a4fea0a074717fe33477cb5ee723a5f428de7563e75af64bfc1b1e10"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca7783b20e3e4f3f52f093538895863f21d18598f9a48211ad757680c3bd006f"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d77d09a1113899fad5f354a1eb4f0a9afcf58cefff51082c8ad643ff890e30cf"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:21287bcdd299fdc3328cc0fbbdeaa46838a1c05391264e51ddb38a3f5b09611f"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:dfd7a6cc483e20b4ad90224aeb589e64ec0f31e5610ab9957ff4314270b2bf31"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:543c7fcbc02bbb4840ea9915134e14dc3dc15cbd5a30873a7a5bf66039db97ec"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fe0a98d990e433013f41827b62be9ab43e3cf18e08b1483fcc343bda0d691182"}, + {file = "xxhash-3.4.1-cp39-cp39-win32.whl", hash = "sha256:b9097af00ebf429cc7c0e7d2fdf28384e4e2e91008130ccda8d5ae653db71e54"}, + {file = "xxhash-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:d699b921af0dcde50ab18be76c0d832f803034d80470703700cb7df0fbec2832"}, + {file = "xxhash-3.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:2be491723405e15cc099ade1280133ccfbf6322d2ef568494fb7d07d280e7eee"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:431625fad7ab5649368c4849d2b49a83dc711b1f20e1f7f04955aab86cd307bc"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc6dbd5fc3c9886a9e041848508b7fb65fd82f94cc793253990f81617b61fe49"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ff8dbd0ec97aec842476cb8ccc3e17dd288cd6ce3c8ef38bff83d6eb927817"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef73a53fe90558a4096e3256752268a8bdc0322f4692ed928b6cd7ce06ad4fe3"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:450401f42bbd274b519d3d8dcf3c57166913381a3d2664d6609004685039f9d3"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a162840cf4de8a7cd8720ff3b4417fbc10001eefdd2d21541a8226bb5556e3bb"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b736a2a2728ba45017cb67785e03125a79d246462dfa892d023b827007412c52"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d0ae4c2e7698adef58710d6e7a32ff518b66b98854b1c68e70eee504ad061d8"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6322c4291c3ff174dcd104fae41500e75dad12be6f3085d119c2c8a80956c51"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:dd59ed668801c3fae282f8f4edadf6dc7784db6d18139b584b6d9677ddde1b6b"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92693c487e39523a80474b0394645b393f0ae781d8db3474ccdcead0559ccf45"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4603a0f642a1e8d7f3ba5c4c25509aca6a9c1cc16f85091004a7028607ead663"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa45e8cbfbadb40a920fe9ca40c34b393e0b067082d94006f7f64e70c7490a6"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:595b252943b3552de491ff51e5bb79660f84f033977f88f6ca1605846637b7c6"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:562d8b8f783c6af969806aaacf95b6c7b776929ae26c0cd941d54644ea7ef51e"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:41ddeae47cf2828335d8d991f2d2b03b0bdc89289dc64349d712ff8ce59d0647"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c44d584afdf3c4dbb3277e32321d1a7b01d6071c1992524b6543025fb8f4206f"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd7bddb3a5b86213cc3f2c61500c16945a1b80ecd572f3078ddbbe68f9dabdfb"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ecb6c987b62437c2f99c01e97caf8d25660bf541fe79a481d05732e5236719c"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:696b4e18b7023527d5c50ed0626ac0520edac45a50ec7cf3fc265cd08b1f4c03"}, + {file = "xxhash-3.4.1.tar.gz", hash = "sha256:0379d6cf1ff987cd421609a264ce025e74f346e3e145dd106c0cc2e3ec3f99a9"}, +] + [[package]] name = "yarl" version = "1.9.4" @@ -9170,21 +10223,6 @@ files = [ idna = ">=2.0" multidict = ">=4.0" -[[package]] -name = "zep-python" -version = "2.0.0rc5" -description = "Long-Term Memory for AI Assistants. This is the Python client for the Zep service." -optional = false -python-versions = "<4,>=3.9.0" -files = [ - {file = "zep_python-2.0.0rc5-py3-none-any.whl", hash = "sha256:8b1b5c22c9e1ef439c9ef3d785347abf89b1243c7149e32025dd065cc022af40"}, - {file = "zep_python-2.0.0rc5.tar.gz", hash = "sha256:e6ced8089760374dead948d6b4b88fceb09a356bf9a7fe182b4ceb6e828f0bb1"}, -] - -[package.dependencies] -httpx = ">=0.24.0,<0.29.0" -pydantic = ">=2.0.0" - [[package]] name = "zipp" version = "3.18.1" @@ -9278,5 +10316,5 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"] [metadata] lock-version = "2.0" -python-versions = ">=3.9,<3.12" -content-hash = "1c79f55577a8fa603c09b2b7c02a01a7a397960c6c9c134dcee7e2f6eeea3d9e" +python-versions = ">=3.10,<3.12" +content-hash = "ed8605b2934fceb591d03d5be7461ed05a8f427512b693ce1baefeaa4fa21500" diff --git a/pyproject.toml b/pyproject.toml index 45661c562..5e34c2a0f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langflow" -version = "0.6.12" +version = "1.0.0a4" description = "A Python package with a built-in web application" authors = ["Logspace "] maintainers = [ @@ -23,91 +23,64 @@ documentation = "https://docs.langflow.org" [tool.poetry.scripts] langflow = "langflow.__main__:main" + [tool.poetry.dependencies] -python = ">=3.9,<3.12" -fastapi = "^0.109.0" -uvicorn = "^0.27.0" +python = ">=3.10,<3.12" +langflow-base = { path = "./src/backend/base", develop = true } beautifulsoup4 = "^4.12.2" google-search-results = "^2.4.1" google-api-python-client = "^2.118.0" -typer = "^0.9.0" -gunicorn = "^21.2.0" -langchain = "~0.1.0" -openai = "^1.12.0" -pandas = "2.2.0" -chromadb = "^0.4.0" huggingface-hub = { version = "^0.20.0", extras = ["inference"] } -rich = "^13.7.0" llama-cpp-python = { version = "~0.2.0", optional = true } networkx = "^3.1" -unstructured = "^0.12.0" -pypdf = "^4.0.0" -lxml = "^4.9.2" pysrt = "^1.1.2" fake-useragent = "^1.4.0" -docstring-parser = "^0.15" psycopg2-binary = "^2.9.6" pyarrow = "^14.0.0" -tiktoken = "~0.6.0" wikipedia = "^1.4.0" qdrant-client = "^1.7.0" -websockets = "^10.3" weaviate-client = "*" sentence-transformers = { version = "^2.3.1", optional = true } ctransformers = { version = "^0.2.10", optional = true } -cohere = "^4.47.0" -python-multipart = "^0.0.7" -sqlmodel = "^0.0.14" +cohere = "^5.1.7" faiss-cpu = "^1.7.4" -orjson = "3.9.15" -multiprocess = "^0.70.14" -cachetools = "^5.3.1" types-cachetools = "^5.3.0.5" -platformdirs = "^4.2.0" pinecone-client = "^3.0.3" pymongo = "^4.6.0" supabase = "^2.3.0" certifi = "^2023.11.17" -google-cloud-aiplatform = "^1.42.0" psycopg = "^3.1.9" psycopg-binary = "^3.1.9" fastavro = "^1.8.0" -langchain-experimental = "*" celery = { extras = ["redis"], version = "^5.3.6", optional = true } redis = { version = "^5.0.1", optional = true } flower = { version = "^2.0.0", optional = true } -alembic = "^1.13.0" -passlib = "^1.7.4" -bcrypt = "4.0.1" -python-jose = "^3.3.0" metaphor-python = "^0.1.11" -pydantic = "^2.6.0" -pydantic-settings = "^2.1.0" -zep-python = "*" pywin32 = { version = "^306", markers = "sys_platform == 'win32'" } -loguru = "^0.7.1" langfuse = "^2.9.0" -pillow = "^10.2.0" metal-sdk = "^2.5.0" markupsafe = "^2.1.3" extract-msg = "^0.47.0" # jq is not available for windows -jq = { version = "^1.6.0", markers = "sys_platform != 'win32'" } boto3 = "^1.34.0" numexpr = "^2.8.6" -qianfan = "0.3.0" +qianfan = "0.3.5" pgvector = "^0.2.3" pyautogen = "^0.2.0" -langchain-google-genai = "^0.0.6" +langchain-google-genai = "^1.0.1" +langchain-cohere = "^0.1.0rc1" elasticsearch = "^8.12.0" pytube = "^15.0.0" -llama-index = "0.9.48" -langchain-openai = "^0.0.6" -urllib3 = "<2" -langchain-anthropic = "^0.1.4" +llama-index = "^0.10.13" +langchain-openai = "^0.0.5" +unstructured = { extras = ["md"], version = "^0.12.4" } +dspy-ai = "^2.4.0" +crewai = "^0.22.5" +html2text = "^2024.2.26" +assemblyai = "^0.23.1" +litellm = "^1.34.22" [tool.poetry.group.dev.dependencies] -pytest-asyncio = "^0.23.1" types-redis = "^4.6.0.5" ipykernel = "^6.29.0" mypy = "^1.8.0" @@ -129,10 +102,12 @@ types-pywin32 = "^306.0.0.4" types-google-cloud-ndb = "^2.2.0.0" pytest-sugar = "^1.0.0" pytest-instafail = "^0.5.0" +pytest-asyncio = "^0.23.0" +respx = "^0.20.2" [tool.poetry.extras] -deploy = ["langchain-serve", "celery", "redis", "flower"] +deploy = ["celery", "redis", "flower"] local = ["llama-cpp-python", "sentence-transformers", "ctransformers"] all = ["deploy", "local"] diff --git a/scripts/setup/setup_env.sh b/scripts/setup/setup_env.sh new file mode 100644 index 000000000..70f580784 --- /dev/null +++ b/scripts/setup/setup_env.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +# Create a .env if it doesn't exist, log all cases +if [ ! -f .env ]; then + echo "Creating .env file" + touch .env +else + echo ".env file already exists" +fi \ No newline at end of file diff --git a/scripts/setup/update_poetry.sh b/scripts/setup/update_poetry.sh new file mode 100644 index 000000000..94c51fd56 --- /dev/null +++ b/scripts/setup/update_poetry.sh @@ -0,0 +1,148 @@ +#!/bin/bash + +# Check if version argument is provided +if [ -z "$1" ] +then + echo "No argument supplied. Please provide the Poetry version to check." + exit 1 +fi + +# Utility function to display an error message and exit +exit_with_message() { + echo "$1" >&2 + exit 1 +} + +# Check if version argument is provided +if [ -z "$1" ]; then + exit_with_message "No argument supplied. Please provide the Poetry version to check." +fi + +# Detect Operating System +OS="$(uname -s)" +case "$OS" in + Darwin) + OS="macOS" + ;; + Linux) + OS="Linux" + ;; + *) + exit_with_message "Unsupported operating system. This script supports macOS and Linux." + ;; +esac + +echo "Detected Operating System: $OS" + +# Installation of pipx based on the detected OS +install_pipx() { + case $1 in + macOS) + # macOS installation using Homebrew + command -v brew >/dev/null 2>&1 || exit_with_message "Homebrew is not installed. Please install Homebrew first." + echo "Installing pipx using Homebrew..." + brew install pipx + pipx ensurepath + ;; + Linux) + # Linux installation. Further checks are needed to distinguish between distributions + if grep -qEi "(ubuntu|debian)" /etc/*release; then + echo "Installing pipx on Ubuntu/Debian..." + sudo apt update + sudo apt install pipx -y + elif grep -qEi "fedora" /etc/*release; then + echo "Installing pipx on Fedora..." + sudo dnf install pipx -y + else + echo "Installing pipx using pip (other Linux distributions)..." + python3 -m pip install --user pipx + fi + pipx ensurepath + ;; + *) + exit_with_message "Unsupported operating system for pipx installation." + ;; + esac +} + +# Function to fetch the latest version of pipx from GitHub and compare with the installed version +check_for_pipx_update() { + echo "Checking for updates to pipx..." + # Fetch the latest version of pipx, ensuring only to capture the numeric version without 'v' prefix. + local latest_version=$(curl -s https://api.github.com/repos/pypa/pipx/releases/latest | grep '"tag_name":' | sed -E 's/.*"tag_name": "v?([^"]+)".*/\1/') + # Extract the current installed version of pipx. + local current_version=$(pipx --version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+') + + if [[ "$latest_version" == "$current_version" ]]; then + echo "You have the latest version of pipx ($current_version)." + else + echo "A newer version of pipx ($latest_version) is available. You have $current_version. Do you want to update? (yes/no)" + read -r user_input + if [[ "$user_input" == "yes" ]]; then + echo "Updating pipx..." + case "$OS" in + macOS) + brew upgrade pipx + ;; + Linux) + if grep -qEi "(ubuntu|debian)" /etc/*release; then + sudo apt update + sudo apt install --only-upgrade pipx -y + elif grep -qEi "fedora" /etc/*release; then + sudo dnf upgrade pipx -y + else + python3 -m pip install --user --upgrade pipx + fi + ;; + *) + exit_with_message "Unsupported operating system for pipx update." + ;; + esac + pipx ensurepath + echo "pipx updated to version $latest_version" + else + echo "Not updating pipx at this time." + fi + fi +} + +# Now, modify the existing check to call check_for_pipx_update even if pipx is installed +if ! command -v pipx &> /dev/null; then + echo "Pipx is not installed. Installing..." + install_pipx "$OS" + echo "Pipx installed successfully." +else + echo "Pipx is already installed." + check_for_pipx_update +fi + + +echo "Checking Poetry installation..." + +# Check if Poetry is installed +if ! command -v poetry &> /dev/null +then + echo "Poetry is not installed. Installing..." + # Also install python 3.10 and use + pipx install poetry --python python3.10 --fetch-missing-python + echo "Poetry installed successfully." +else + echo "Poetry is already installed." +fi + +echo "Checking Poetry version..." + +# Check Poetry version +poetry_version=$(poetry --version | awk '{print $3}' | tr -d '()') +echo "Current Poetry version: $poetry_version" + +# Compare version +if [[ "$(printf '%s\n' "$1" "$poetry_version" | sort -V | head -n1)" != "$1" ]]; then + echo "Poetry version is lower than $1. Updating..." + # Update Poetry + poetry self update + echo "Poetry updated successfully." +else + echo "Poetry version is $1 or higher. No need to update." +fi + diff --git a/scripts/update_dependencies.py b/scripts/update_dependencies.py new file mode 100644 index 000000000..c70cf8f8f --- /dev/null +++ b/scripts/update_dependencies.py @@ -0,0 +1,51 @@ +import re +from pathlib import Path + + +def read_version_from_pyproject(file_path): + with open(file_path, "r") as file: + for line in file: + match = re.search(r'version = "(.*)"', line) + if match: + return match.group(1) + return None + + +def get_version_from_pypi(package_name): + import requests + + response = requests.get(f"https://pypi.org/pypi/{package_name}/json") + if response.ok: + return response.json()["info"]["version"] + return None + + +def update_pyproject_dependency(pyproject_path, version): + pattern = re.compile(r'langflow-base = \{ path = "\./src/backend/base", develop = true \}') + replacement = f'langflow-base = "^{version}"' + with open(pyproject_path, "r") as file: + content = file.read() + content = pattern.sub(replacement, content) + with open(pyproject_path, "w") as file: + file.write(content) + + +if __name__ == "__main__": + # Backing up files + pyproject_path = Path(__file__).resolve().parent / "../pyproject.toml" + pyproject_path = pyproject_path.resolve() + with open(pyproject_path, "r") as original, open(pyproject_path.with_name("pyproject.toml.bak"), "w") as backup: + backup.write(original.read()) + # Now backup poetry.lock + with open(pyproject_path.with_name("poetry.lock"), "r") as original, open( + pyproject_path.with_name("poetry.lock.bak"), "w" + ) as backup: + backup.write(original.read()) + + # Reading version and updating pyproject.toml + langflow_base_path = Path(__file__).resolve().parent / "../src/backend/base/pyproject.toml" + version = get_version_from_pypi("langflow-base") + if version: + update_pyproject_dependency(pyproject_path, version) + else: + print("Error: Version not found.") diff --git a/src/backend/langflow/interface/document_loaders/__init__.py b/src/backend/base/README.md similarity index 100% rename from src/backend/langflow/interface/document_loaders/__init__.py rename to src/backend/base/README.md diff --git a/src/backend/langflow/__main__.py b/src/backend/base/langflow/__main__.py similarity index 85% rename from src/backend/langflow/__main__.py rename to src/backend/base/langflow/__main__.py index bc430e941..55f792c3c 100644 --- a/src/backend/langflow/__main__.py +++ b/src/backend/base/langflow/__main__.py @@ -1,18 +1,20 @@ import platform import socket import sys +import time +import webbrowser from pathlib import Path from typing import Optional +import httpx import typer from dotenv import load_dotenv -from multiprocess import cpu_count # type: ignore +from multiprocess import Process, cpu_count # type: ignore from rich import box from rich import print as rprint from rich.console import Console from rich.panel import Panel from rich.table import Table -from sqlmodel import select from langflow.main import setup_app from langflow.services.database.utils import session_getter @@ -71,7 +73,7 @@ def update_settings( dev: bool = False, remove_api_keys: bool = False, components_path: Optional[Path] = None, - store: bool = False, + store: bool = True, ): """Update the settings from a config file.""" @@ -95,33 +97,6 @@ def update_settings( settings_service.settings.update_settings(STORE=False) -def version_callback(value: bool): - """ - Show the version and exit. - """ - from langflow import __version__ - - if value: - typer.echo(f"Langflow Version: {__version__}") - raise typer.Exit() - - -@app.callback() -def main_entry_point( - version: bool = typer.Option( - None, - "--version", - callback=version_callback, - is_eager=True, - help="Show the version and exit.", - ), -): - """ - Main entry point for the Langflow CLI. - """ - pass - - @app.command() def run( host: str = typer.Option("127.0.0.1", help="Host to bind the server to.", envvar="LANGFLOW_HOST"), @@ -174,13 +149,13 @@ def run( Run the Langflow. """ + configure(log_level=log_level, log_file=log_file) set_var_for_macos_issue() # override env variables with .env file if env_file: load_dotenv(env_file, override=True) - configure(log_level=log_level, log_file=log_file) update_settings( config, dev=dev, @@ -213,12 +188,23 @@ def run( run_on_windows(host, port, log_level, options, app) else: # Run using gunicorn on Linux - run_on_mac_or_linux(host, port, log_level, options, app) + run_on_mac_or_linux(host, port, log_level, options, app, open_browser) -def run_on_mac_or_linux(host, port, log_level, options, app): +def run_on_mac_or_linux(host, port, log_level, options, app, open_browser=True): + webapp_process = Process(target=run_langflow, args=(host, port, log_level, options, app)) + webapp_process.start() + status_code = 0 + while status_code != 200: + try: + status_code = httpx.get(f"http://{host}:{port}/health").status_code + + except Exception: + time.sleep(1) + print_banner(host, port) - run_langflow(host, port, log_level, options, app) + if open_browser: + webbrowser.open(f"http://{host}:{port}") def run_on_windows(host, port, log_level, options, app): @@ -260,10 +246,18 @@ def get_free_port(port): def print_banner(host, port): - # console = Console() + try: + from langflow.version import __version__ # type: ignore - word = "Langflow" - colors = ["#3300cc"] + version = __version__ + word = "Langflow" + except ImportError: + from importlib import metadata + + version = metadata.version("langflow-base") + word = "Langflow Base" + + colors = ["#6e42f5"] styled_word = "" @@ -273,7 +267,7 @@ def print_banner(host, port): # Title with emojis and gradient text title = ( - f"[bold]Welcome to :chains: {styled_word} [/bold]\n\n" + f"[bold]Welcome to :chains: {styled_word} v{version}[/bold]\n" f"Access [link=http://{host}:{port}]http://{host}:{port}[/link]" ) info_text = ( @@ -293,26 +287,25 @@ def run_langflow(host, port, log_level, options, app): Run Langflow server on localhost """ try: - if platform.system() in ["Windows", "Darwin"]: + if platform.system() in ["Windows"]: # Run using uvicorn on MacOS and Windows # Windows doesn't support gunicorn # MacOS requires an env variable to be set to use gunicorn - import uvicorn uvicorn.run( app, host=host, port=port, - log_level=log_level, + log_level=log_level.lower(), + loop="asyncio", ) else: from langflow.server import LangflowApplication LangflowApplication(app, options).run() except KeyboardInterrupt: - logger.info("Shutting down server") - sys.exit(0) + pass except Exception as e: logger.exception(e) sys.exit(1) @@ -322,7 +315,7 @@ def run_langflow(host, port, log_level, options, app): def superuser( username: str = typer.Option(..., prompt=True, help="Username for the superuser."), password: str = typer.Option(..., prompt=True, hide_input=True, help="Password for the superuser."), - log_level: str = typer.Option("critical", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"), + log_level: str = typer.Option("error", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL"), ): """ Create a superuser. @@ -337,7 +330,7 @@ def superuser( # Verify that the superuser was created from langflow.services.database.models.user.model import User - user: User = session.exec(select(User).where(User.username == username)).first() + user: User = session.query(User).filter(User.username == username).first() if user is None or not user.is_superuser: typer.echo("Superuser creation failed.") return @@ -349,23 +342,11 @@ def superuser( @app.command() -def migration( - test: bool = typer.Option(True, help="Run migrations in test mode."), - fix: bool = typer.Option( - False, - help="Fix migrations. This is a destructive operation, and should only be used if you know what you are doing.", - ), -): +def migration(test: bool = typer.Option(True, help="Run migrations in test mode.")): """ Run or test migrations. """ - if fix: - if not typer.confirm( - "This will delete all data necessary to fix migrations. Are you sure you want to continue?" - ): - raise typer.Abort() - - initialize_services(fix_migration=fix) + initialize_services() db_service = get_db_service() if not test: db_service.run_migrations() diff --git a/src/backend/langflow/alembic.ini b/src/backend/base/langflow/alembic.ini similarity index 100% rename from src/backend/langflow/alembic.ini rename to src/backend/base/langflow/alembic.ini diff --git a/src/backend/langflow/alembic/README b/src/backend/base/langflow/alembic/README similarity index 100% rename from src/backend/langflow/alembic/README rename to src/backend/base/langflow/alembic/README diff --git a/src/backend/langflow/alembic/env.py b/src/backend/base/langflow/alembic/env.py similarity index 72% rename from src/backend/langflow/alembic/env.py rename to src/backend/base/langflow/alembic/env.py index 479db05bb..7400906c8 100644 --- a/src/backend/langflow/alembic/env.py +++ b/src/backend/base/langflow/alembic/env.py @@ -1,4 +1,5 @@ import os +import warnings from logging.config import fileConfig from alembic import context @@ -62,24 +63,32 @@ def run_migrations_online() -> None: and associate a connection with the context. """ - from langflow.services.deps import get_db_service try: + from langflow.services.database.factory import DatabaseServiceFactory + from langflow.services.deps import get_db_service + from langflow.services.manager import initialize_settings_service, service_manager + + initialize_settings_service() + service_manager.register_factory(DatabaseServiceFactory()) connectable = get_db_service().engine except Exception as e: logger.error(f"Error getting database engine: {e}") + url = os.getenv("LANGFLOW_DATABASE_URL") + url = url or config.get_main_option("sqlalchemy.url") + if url: + config.set_main_option("sqlalchemy.url", url) connectable = engine_from_config( config.get_section(config.config_ini_section, {}), prefix="sqlalchemy.", poolclass=pool.NullPool, ) - with connectable.connect() as connection: - context.configure( - connection=connection, target_metadata=target_metadata, render_as_batch=True - ) - - with context.begin_transaction(): - context.run_migrations() + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata, render_as_batch=True) + with context.begin_transaction(): + context.run_migrations() if context.is_offline_mode(): diff --git a/src/backend/langflow/alembic/script.py.mako b/src/backend/base/langflow/alembic/script.py.mako similarity index 90% rename from src/backend/langflow/alembic/script.py.mako rename to src/backend/base/langflow/alembic/script.py.mako index 2fbdc930d..bc9bca83a 100644 --- a/src/backend/langflow/alembic/script.py.mako +++ b/src/backend/base/langflow/alembic/script.py.mako @@ -23,10 +23,12 @@ depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} def upgrade() -> None: conn = op.get_bind() inspector = Inspector.from_engine(conn) # type: ignore + table_names = inspector.get_table_names() ${upgrades if upgrades else "pass"} def downgrade() -> None: conn = op.get_bind() inspector = Inspector.from_engine(conn) # type: ignore + table_names = inspector.get_table_names() ${downgrades if downgrades else "pass"} diff --git a/src/backend/langflow/alembic/versions/006b3990db50_add_unique_constraints.py b/src/backend/base/langflow/alembic/versions/006b3990db50_add_unique_constraints.py similarity index 93% rename from src/backend/langflow/alembic/versions/006b3990db50_add_unique_constraints.py rename to src/backend/base/langflow/alembic/versions/006b3990db50_add_unique_constraints.py index 68b765160..df59c4e9f 100644 --- a/src/backend/langflow/alembic/versions/006b3990db50_add_unique_constraints.py +++ b/src/backend/base/langflow/alembic/versions/006b3990db50_add_unique_constraints.py @@ -26,9 +26,8 @@ def upgrade() -> None: flow_constraints = inspector.get_unique_constraints("flow") user_constraints = inspector.get_unique_constraints("user") try: - if not any(constraint['column_names'] == ['id'] for constraint in api_key_constraints): + if not any(constraint["column_names"] == ["id"] for constraint in api_key_constraints): with op.batch_alter_table("apikey", schema=None) as batch_op: - batch_op.create_unique_constraint("uq_apikey_id", ["id"]) if not any(constraint["column_names"] == ["id"] for constraint in flow_constraints): with op.batch_alter_table("flow", schema=None) as batch_op: @@ -51,16 +50,13 @@ def downgrade() -> None: flow_constraints = inspector.get_unique_constraints("flow") user_constraints = inspector.get_unique_constraints("user") try: - if any( - constraint["name"] == "uq_apikey_id" for constraint in api_key_constraints - ): + if any(constraint["name"] == "uq_apikey_id" for constraint in api_key_constraints): with op.batch_alter_table("user", schema=None) as batch_op: batch_op.drop_constraint("uq_user_id", type_="unique") if any(constraint["name"] == "uq_flow_id" for constraint in flow_constraints): with op.batch_alter_table("flow", schema=None) as batch_op: batch_op.drop_constraint("uq_flow_id", type_="unique") if any(constraint["name"] == "uq_user_id" for constraint in user_constraints): - with op.batch_alter_table("apikey", schema=None) as batch_op: batch_op.drop_constraint("uq_apikey_id", type_="unique") except Exception as e: diff --git a/src/backend/langflow/alembic/versions/0b8757876a7c_.py b/src/backend/base/langflow/alembic/versions/0b8757876a7c_.py similarity index 100% rename from src/backend/langflow/alembic/versions/0b8757876a7c_.py rename to src/backend/base/langflow/alembic/versions/0b8757876a7c_.py diff --git a/src/backend/base/langflow/alembic/versions/1a110b568907_replace_credential_table_with_variable.py b/src/backend/base/langflow/alembic/versions/1a110b568907_replace_credential_table_with_variable.py new file mode 100644 index 000000000..e27d088d0 --- /dev/null +++ b/src/backend/base/langflow/alembic/versions/1a110b568907_replace_credential_table_with_variable.py @@ -0,0 +1,65 @@ +"""Replace Credential table with Variable + +Revision ID: 1a110b568907 +Revises: 63b9c451fd30 +Create Date: 2024-03-25 09:40:02.743453 + +""" +from typing import Sequence, Union + +import sqlalchemy as sa +import sqlmodel +from alembic import op +from sqlalchemy.engine.reflection import Inspector + +# revision identifiers, used by Alembic. +revision: str = "1a110b568907" +down_revision: Union[str, None] = "63b9c451fd30" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) # type: ignore + table_names = inspector.get_table_names() + # ### commands auto generated by Alembic - please adjust! ### + if "variable" not in table_names: + op.create_table( + "variable", + sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("value", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("type", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=True), + sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False), + sa.ForeignKeyConstraint(["user_id"], ["user.id"], name="fk_variable_user_id"), + sa.PrimaryKeyConstraint("id"), + ) + if "credential" in table_names: + op.drop_table("credential") + # ### end Alembic commands ### + + +def downgrade() -> None: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) # type: ignore + table_names = inspector.get_table_names() + # ### commands auto generated by Alembic - please adjust! ### + if "credential" not in table_names: + op.create_table( + "credential", + sa.Column("name", sa.VARCHAR(), nullable=True), + sa.Column("value", sa.VARCHAR(), nullable=True), + sa.Column("provider", sa.VARCHAR(), nullable=True), + sa.Column("user_id", sa.CHAR(length=32), nullable=False), + sa.Column("id", sa.CHAR(length=32), nullable=False), + sa.Column("created_at", sa.DATETIME(), nullable=False), + sa.Column("updated_at", sa.DATETIME(), nullable=True), + sa.ForeignKeyConstraint(["user_id"], ["user.id"], name="fk_credential_user_id"), + sa.PrimaryKeyConstraint("id"), + ) + if "variable" in table_names: + op.drop_table("variable") + # ### end Alembic commands ### diff --git a/src/backend/langflow/alembic/versions/1ef9c4f3765d_.py b/src/backend/base/langflow/alembic/versions/1ef9c4f3765d_.py similarity index 83% rename from src/backend/langflow/alembic/versions/1ef9c4f3765d_.py rename to src/backend/base/langflow/alembic/versions/1ef9c4f3765d_.py index df92f1f02..db78e33bd 100644 --- a/src/backend/langflow/alembic/versions/1ef9c4f3765d_.py +++ b/src/backend/base/langflow/alembic/versions/1ef9c4f3765d_.py @@ -24,10 +24,8 @@ def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### try: with op.batch_alter_table("apikey", schema=None) as batch_op: - batch_op.alter_column( - "name", existing_type=sqlmodel.sql.sqltypes.AutoString(), nullable=True - ) - except Exception as e: + batch_op.alter_column("name", existing_type=sqlmodel.sql.sqltypes.AutoString(), nullable=True) + except Exception: pass # ### end Alembic commands ### @@ -37,6 +35,6 @@ def downgrade() -> None: try: with op.batch_alter_table("apikey", schema=None) as batch_op: batch_op.alter_column("name", existing_type=sa.VARCHAR(), nullable=False) - except Exception as e: + except Exception: pass # ### end Alembic commands ### diff --git a/src/backend/langflow/alembic/versions/260dbcc8b680_adds_tables.py b/src/backend/base/langflow/alembic/versions/260dbcc8b680_adds_tables.py similarity index 81% rename from src/backend/langflow/alembic/versions/260dbcc8b680_adds_tables.py rename to src/backend/base/langflow/alembic/versions/260dbcc8b680_adds_tables.py index 0d7eed582..01db957fe 100644 --- a/src/backend/langflow/alembic/versions/260dbcc8b680_adds_tables.py +++ b/src/backend/base/langflow/alembic/versions/260dbcc8b680_adds_tables.py @@ -31,23 +31,16 @@ def upgrade() -> None: # and other related indices if "flowstyle" in existing_tables: op.drop_table("flowstyle") - if "ix_flowstyle_flow_id" in [ - index["name"] for index in inspector.get_indexes("flowstyle") - ]: - op.drop_index( - "ix_flowstyle_flow_id", table_name="flowstyle", if_exists=True - ) + if "ix_flowstyle_flow_id" in [index["name"] for index in inspector.get_indexes("flowstyle")]: + op.drop_index("ix_flowstyle_flow_id", table_name="flowstyle", if_exists=True) existing_indices_flow = [] existing_fks_flow = [] if "flow" in existing_tables: - existing_indices_flow = [ - index["name"] for index in inspector.get_indexes("flow") - ] + existing_indices_flow = [index["name"] for index in inspector.get_indexes("flow")] # Existing foreign keys for the 'flow' table, if it exists existing_fks_flow = [ - fk["referred_table"] + "." + fk["referred_columns"][0] - for fk in inspector.get_foreign_keys("flow") + fk["referred_table"] + "." + fk["referred_columns"][0] for fk in inspector.get_foreign_keys("flow") ] # Now check if the columns user_id exists in the 'flow' table # If it does not exist, we need to create the foreign key @@ -67,9 +60,7 @@ def upgrade() -> None: sa.UniqueConstraint("id", name="uq_user_id"), ) with op.batch_alter_table("user", schema=None) as batch_op: - batch_op.create_index( - batch_op.f("ix_user_username"), ["username"], unique=True - ) + batch_op.create_index(batch_op.f("ix_user_username"), ["username"], unique=True) if "apikey" not in existing_tables: op.create_table( @@ -82,20 +73,14 @@ def upgrade() -> None: sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False), sa.Column("api_key", sqlmodel.sql.sqltypes.AutoString(), nullable=False), sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False), - sa.ForeignKeyConstraint( - ["user_id"], ["user.id"], name="fk_apikey_user_id_user" - ), + sa.ForeignKeyConstraint(["user_id"], ["user.id"], name="fk_apikey_user_id_user"), sa.PrimaryKeyConstraint("id", name="pk_apikey"), sa.UniqueConstraint("id", name="uq_apikey_id"), ) with op.batch_alter_table("apikey", schema=None) as batch_op: - batch_op.create_index( - batch_op.f("ix_apikey_api_key"), ["api_key"], unique=True - ) + batch_op.create_index(batch_op.f("ix_apikey_api_key"), ["api_key"], unique=True) batch_op.create_index(batch_op.f("ix_apikey_name"), ["name"], unique=False) - batch_op.create_index( - batch_op.f("ix_apikey_user_id"), ["user_id"], unique=False - ) + batch_op.create_index(batch_op.f("ix_apikey_user_id"), ["user_id"], unique=False) if "flow" not in existing_tables: op.create_table( "flow", @@ -104,9 +89,7 @@ def upgrade() -> None: sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=True), sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False), sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False), - sa.ForeignKeyConstraint( - ["user_id"], ["user.id"], name="fk_flow_user_id_user" - ), + sa.ForeignKeyConstraint(["user_id"], ["user.id"], name="fk_flow_user_id_user"), sa.PrimaryKeyConstraint("id", name="pk_flow"), sa.UniqueConstraint("id", name="uq_flow_id"), ) @@ -129,16 +112,12 @@ def upgrade() -> None: if "user.id" not in existing_fks_flow: batch_op.create_foreign_key("fk_flow_user_id", "user", ["user_id"], ["id"]) if "ix_flow_description" not in existing_indices_flow: - batch_op.create_index( - batch_op.f("ix_flow_description"), ["description"], unique=False - ) + batch_op.create_index(batch_op.f("ix_flow_description"), ["description"], unique=False) if "ix_flow_name" not in existing_indices_flow: batch_op.create_index(batch_op.f("ix_flow_name"), ["name"], unique=False) with op.batch_alter_table("flow", schema=None) as batch_op: if "ix_flow_user_id" not in existing_indices_flow: - batch_op.create_index( - batch_op.f("ix_flow_user_id"), ["user_id"], unique=False - ) + batch_op.create_index(batch_op.f("ix_flow_user_id"), ["user_id"], unique=False) # ### end Alembic commands ### @@ -169,10 +148,4 @@ def downgrade() -> None: batch_op.drop_index(batch_op.f("ix_user_username"), if_exists=True) op.drop_table("user") - - if "flowstyle" in existing_tables: - op.drop_table("flowstyle") - - if "component" in existing_tables: - op.drop_table("component") # ### end Alembic commands ### diff --git a/src/backend/langflow/alembic/versions/2ac71eb9c3ae_adds_credential_table.py b/src/backend/base/langflow/alembic/versions/2ac71eb9c3ae_adds_credential_table.py similarity index 92% rename from src/backend/langflow/alembic/versions/2ac71eb9c3ae_adds_credential_table.py rename to src/backend/base/langflow/alembic/versions/2ac71eb9c3ae_adds_credential_table.py index ce2d2cd76..2250a8b8c 100644 --- a/src/backend/langflow/alembic/versions/2ac71eb9c3ae_adds_credential_table.py +++ b/src/backend/base/langflow/alembic/versions/2ac71eb9c3ae_adds_credential_table.py @@ -31,9 +31,7 @@ def upgrade() -> None: "credential", sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=True), sa.Column("value", sqlmodel.sql.sqltypes.AutoString(), nullable=True), - sa.Column( - "provider", sqlmodel.sql.sqltypes.AutoString(), nullable=True - ), + sa.Column("provider", sqlmodel.sql.sqltypes.AutoString(), nullable=True), sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False), sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False), sa.Column("created_at", sa.DateTime(), nullable=False), diff --git a/src/backend/base/langflow/alembic/versions/63b9c451fd30_add_icon_and_icon_bg_color_to_flow.py b/src/backend/base/langflow/alembic/versions/63b9c451fd30_add_icon_and_icon_bg_color_to_flow.py new file mode 100644 index 000000000..c6d6893e3 --- /dev/null +++ b/src/backend/base/langflow/alembic/versions/63b9c451fd30_add_icon_and_icon_bg_color_to_flow.py @@ -0,0 +1,50 @@ +"""Add icon and icon_bg_color to Flow + +Revision ID: 63b9c451fd30 +Revises: bc2f01c40e4a +Create Date: 2024-03-06 10:53:47.148658 + +""" + +from typing import Sequence, Union + +import sqlalchemy as sa +import sqlmodel +from alembic import op +from sqlalchemy.engine.reflection import Inspector + +# revision identifiers, used by Alembic. +revision: str = "63b9c451fd30" +down_revision: Union[str, None] = "bc2f01c40e4a" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) # type: ignore + table_names = inspector.get_table_names() # noqa + column_names = [column["name"] for column in inspector.get_columns("flow")] + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table("flow", schema=None) as batch_op: + if "icon" not in column_names: + batch_op.add_column(sa.Column("icon", sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + if "icon_bg_color" not in column_names: + batch_op.add_column(sa.Column("icon_bg_color", sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + + # ### end Alembic commands ### + + +def downgrade() -> None: + conn = op.get_bind() + inspector = Inspector.from_engine(conn) # type: ignore + table_names = inspector.get_table_names() # noqa + column_names = [column["name"] for column in inspector.get_columns("flow")] + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table("flow", schema=None) as batch_op: + if "icon" in column_names: + batch_op.drop_column("icon") + if "icon_bg_color" in column_names: + batch_op.drop_column("icon_bg_color") + + # ### end Alembic commands ### diff --git a/src/backend/langflow/alembic/versions/67cc006d50bf_add_profile_image_column.py b/src/backend/base/langflow/alembic/versions/67cc006d50bf_add_profile_image_column.py similarity index 100% rename from src/backend/langflow/alembic/versions/67cc006d50bf_add_profile_image_column.py rename to src/backend/base/langflow/alembic/versions/67cc006d50bf_add_profile_image_column.py diff --git a/src/backend/langflow/alembic/versions/7843803a87b5_store_updates.py b/src/backend/base/langflow/alembic/versions/7843803a87b5_store_updates.py similarity index 82% rename from src/backend/langflow/alembic/versions/7843803a87b5_store_updates.py rename to src/backend/base/langflow/alembic/versions/7843803a87b5_store_updates.py index b1565cd0f..d2a576410 100644 --- a/src/backend/langflow/alembic/versions/7843803a87b5_store_updates.py +++ b/src/backend/base/langflow/alembic/versions/7843803a87b5_store_updates.py @@ -29,18 +29,14 @@ def upgrade() -> None: try: if "is_component" not in flow_columns: with op.batch_alter_table("flow", schema=None) as batch_op: - batch_op.add_column( - sa.Column("is_component", sa.Boolean(), nullable=True) - ) - except Exception as e: + batch_op.add_column(sa.Column("is_component", sa.Boolean(), nullable=True)) + except Exception: pass try: if "store_api_key" not in user_columns: with op.batch_alter_table("user", schema=None) as batch_op: - batch_op.add_column( - sa.Column("store_api_key", sqlmodel.AutoString(), nullable=True) - ) - except Exception as e: + batch_op.add_column(sa.Column("store_api_key", sqlmodel.AutoString(), nullable=True)) + except Exception: pass # ### end Alembic commands ### diff --git a/src/backend/langflow/alembic/versions/7d2162acc8b2_adds_updated_at_and_folder_cols.py b/src/backend/base/langflow/alembic/versions/7d2162acc8b2_adds_updated_at_and_folder_cols.py similarity index 80% rename from src/backend/langflow/alembic/versions/7d2162acc8b2_adds_updated_at_and_folder_cols.py rename to src/backend/base/langflow/alembic/versions/7d2162acc8b2_adds_updated_at_and_folder_cols.py index 5ed929568..b46400899 100644 --- a/src/backend/langflow/alembic/versions/7d2162acc8b2_adds_updated_at_and_folder_cols.py +++ b/src/backend/base/langflow/alembic/versions/7d2162acc8b2_adds_updated_at_and_folder_cols.py @@ -30,9 +30,7 @@ def upgrade() -> None: try: if "name" in api_key_columns: with op.batch_alter_table("apikey", schema=None) as batch_op: - batch_op.alter_column( - "name", existing_type=sa.VARCHAR(), nullable=False - ) + batch_op.alter_column("name", existing_type=sa.VARCHAR(), nullable=False) except Exception as e: print(e) @@ -40,15 +38,9 @@ def upgrade() -> None: try: with op.batch_alter_table("flow", schema=None) as batch_op: if "updated_at" not in flow_columns: - batch_op.add_column( - sa.Column("updated_at", sa.DateTime(), nullable=True) - ) + batch_op.add_column(sa.Column("updated_at", sa.DateTime(), nullable=True)) if "folder" not in flow_columns: - batch_op.add_column( - sa.Column( - "folder", sqlmodel.sql.sqltypes.AutoString(), nullable=True - ) - ) + batch_op.add_column(sa.Column("folder", sqlmodel.sql.sqltypes.AutoString(), nullable=True)) except Exception as e: print(e) @@ -68,7 +60,6 @@ def downgrade() -> None: pass try: - with op.batch_alter_table("apikey", schema=None) as batch_op: batch_op.alter_column("name", existing_type=sa.VARCHAR(), nullable=True) except Exception as e: diff --git a/src/backend/langflow/alembic/versions/b2fa308044b5_add_unique_constraints.py b/src/backend/base/langflow/alembic/versions/b2fa308044b5_add_unique_constraints.py similarity index 81% rename from src/backend/langflow/alembic/versions/b2fa308044b5_add_unique_constraints.py rename to src/backend/base/langflow/alembic/versions/b2fa308044b5_add_unique_constraints.py index bb3c0c7cd..ed5317bf5 100644 --- a/src/backend/langflow/alembic/versions/b2fa308044b5_add_unique_constraints.py +++ b/src/backend/base/langflow/alembic/versions/b2fa308044b5_add_unique_constraints.py @@ -32,33 +32,19 @@ def upgrade() -> None: with op.batch_alter_table("flow", schema=None) as batch_op: flow_columns = [column["name"] for column in inspector.get_columns("flow")] if "is_component" not in flow_columns: - batch_op.add_column( - sa.Column("is_component", sa.Boolean(), nullable=True) - ) + batch_op.add_column(sa.Column("is_component", sa.Boolean(), nullable=True)) if "updated_at" not in flow_columns: - batch_op.add_column( - sa.Column("updated_at", sa.DateTime(), nullable=True) - ) + batch_op.add_column(sa.Column("updated_at", sa.DateTime(), nullable=True)) if "folder" not in flow_columns: - batch_op.add_column( - sa.Column( - "folder", sqlmodel.sql.sqltypes.AutoString(), nullable=True - ) - ) + batch_op.add_column(sa.Column("folder", sqlmodel.sql.sqltypes.AutoString(), nullable=True)) if "user_id" not in flow_columns: - batch_op.add_column( - sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True) - ) + batch_op.add_column(sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True)) indices = inspector.get_indexes("flow") indices_names = [index["name"] for index in indices] if "ix_flow_user_id" not in indices_names: - batch_op.create_index( - batch_op.f("ix_flow_user_id"), ["user_id"], unique=False - ) + batch_op.create_index(batch_op.f("ix_flow_user_id"), ["user_id"], unique=False) if "fk_flow_user_id_user" not in indices_names: - batch_op.create_foreign_key( - "fk_flow_user_id_user", "user", ["user_id"], ["id"] - ) + batch_op.create_foreign_key("fk_flow_user_id_user", "user", ["user_id"], ["id"]) except Exception: pass diff --git a/src/backend/langflow/alembic/versions/bc2f01c40e4a_new_fixes.py b/src/backend/base/langflow/alembic/versions/bc2f01c40e4a_new_fixes.py similarity index 82% rename from src/backend/langflow/alembic/versions/bc2f01c40e4a_new_fixes.py rename to src/backend/base/langflow/alembic/versions/bc2f01c40e4a_new_fixes.py index cfbf74f06..e0fa1add8 100644 --- a/src/backend/langflow/alembic/versions/bc2f01c40e4a_new_fixes.py +++ b/src/backend/base/langflow/alembic/versions/bc2f01c40e4a_new_fixes.py @@ -33,21 +33,13 @@ def upgrade() -> None: if "updated_at" not in flow_columns: batch_op.add_column(sa.Column("updated_at", sa.DateTime(), nullable=True)) if "folder" not in flow_columns: - batch_op.add_column( - sa.Column("folder", sqlmodel.sql.sqltypes.AutoString(), nullable=True) - ) + batch_op.add_column(sa.Column("folder", sqlmodel.sql.sqltypes.AutoString(), nullable=True)) if "user_id" not in flow_columns: - batch_op.add_column( - sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True) - ) + batch_op.add_column(sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=True)) if "ix_flow_user_id" not in flow_indexes: - batch_op.create_index( - batch_op.f("ix_flow_user_id"), ["user_id"], unique=False - ) + batch_op.create_index(batch_op.f("ix_flow_user_id"), ["user_id"], unique=False) if "flow_user_id_fkey" not in flow_fks: - batch_op.create_foreign_key( - "flow_user_id_fkey", "user", ["user_id"], ["id"] - ) + batch_op.create_foreign_key("flow_user_id_fkey", "user", ["user_id"], ["id"]) def downgrade() -> None: diff --git a/src/backend/langflow/alembic/versions/eb5866d51fd2_change_columns_to_be_nullable.py b/src/backend/base/langflow/alembic/versions/eb5866d51fd2_change_columns_to_be_nullable.py similarity index 94% rename from src/backend/langflow/alembic/versions/eb5866d51fd2_change_columns_to_be_nullable.py rename to src/backend/base/langflow/alembic/versions/eb5866d51fd2_change_columns_to_be_nullable.py index 4da04c325..acb09cd5f 100644 --- a/src/backend/langflow/alembic/versions/eb5866d51fd2_change_columns_to_be_nullable.py +++ b/src/backend/base/langflow/alembic/versions/eb5866d51fd2_change_columns_to_be_nullable.py @@ -19,7 +19,7 @@ depends_on: Union[str, Sequence[str], None] = None def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### - connection = op.get_bind() + connection = op.get_bind() # noqa pass # ### end Alembic commands ### diff --git a/src/backend/langflow/alembic/versions/f5ee9749d1a6_user_id_can_be_null_in_flow.py b/src/backend/base/langflow/alembic/versions/f5ee9749d1a6_user_id_can_be_null_in_flow.py similarity index 79% rename from src/backend/langflow/alembic/versions/f5ee9749d1a6_user_id_can_be_null_in_flow.py rename to src/backend/base/langflow/alembic/versions/f5ee9749d1a6_user_id_can_be_null_in_flow.py index 494de22ac..842c55857 100644 --- a/src/backend/langflow/alembic/versions/f5ee9749d1a6_user_id_can_be_null_in_flow.py +++ b/src/backend/base/langflow/alembic/versions/f5ee9749d1a6_user_id_can_be_null_in_flow.py @@ -22,9 +22,7 @@ def upgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### try: with op.batch_alter_table("flow", schema=None) as batch_op: - batch_op.alter_column( - "user_id", existing_type=sa.CHAR(length=32), nullable=True - ) + batch_op.alter_column("user_id", existing_type=sa.CHAR(length=32), nullable=True) except Exception as e: print(e) pass @@ -36,9 +34,7 @@ def downgrade() -> None: # ### commands auto generated by Alembic - please adjust! ### try: with op.batch_alter_table("flow", schema=None) as batch_op: - batch_op.alter_column( - "user_id", existing_type=sa.CHAR(length=32), nullable=False - ) + batch_op.alter_column("user_id", existing_type=sa.CHAR(length=32), nullable=False) except Exception as e: print(e) pass diff --git a/src/backend/langflow/alembic/versions/fd531f8868b1_fix_credential_table.py b/src/backend/base/langflow/alembic/versions/fd531f8868b1_fix_credential_table.py similarity index 93% rename from src/backend/langflow/alembic/versions/fd531f8868b1_fix_credential_table.py rename to src/backend/base/langflow/alembic/versions/fd531f8868b1_fix_credential_table.py index 77e6acd75..de69b491d 100644 --- a/src/backend/langflow/alembic/versions/fd531f8868b1_fix_credential_table.py +++ b/src/backend/base/langflow/alembic/versions/fd531f8868b1_fix_credential_table.py @@ -31,9 +31,7 @@ def upgrade() -> None: try: if "credential" in tables and "fk_credential_user_id" not in foreign_keys_names: with op.batch_alter_table("credential", schema=None) as batch_op: - batch_op.create_foreign_key( - "fk_credential_user_id", "user", ["user_id"], ["id"] - ) + batch_op.create_foreign_key("fk_credential_user_id", "user", ["user_id"], ["id"]) except Exception as e: print(e) pass diff --git a/src/backend/langflow/api/__init__.py b/src/backend/base/langflow/api/__init__.py similarity index 100% rename from src/backend/langflow/api/__init__.py rename to src/backend/base/langflow/api/__init__.py diff --git a/src/backend/langflow/api/router.py b/src/backend/base/langflow/api/router.py similarity index 76% rename from src/backend/langflow/api/router.py rename to src/backend/base/langflow/api/router.py index 24d64b401..569014acf 100644 --- a/src/backend/langflow/api/router.py +++ b/src/backend/base/langflow/api/router.py @@ -4,13 +4,15 @@ from fastapi import APIRouter from langflow.api.v1 import ( api_key_router, chat_router, - credentials_router, endpoints_router, + files_router, flows_router, login_router, + monitor_router, store_router, users_router, validate_router, + variables_router, ) router = APIRouter( @@ -24,4 +26,6 @@ router.include_router(flows_router) router.include_router(users_router) router.include_router(api_key_router) router.include_router(login_router) -router.include_router(credentials_router) +router.include_router(variables_router) +router.include_router(files_router) +router.include_router(monitor_router) diff --git a/src/backend/langflow/api/utils.py b/src/backend/base/langflow/api/utils.py similarity index 55% rename from src/backend/langflow/api/utils.py rename to src/backend/base/langflow/api/utils.py index 141c79229..0500917a3 100644 --- a/src/backend/langflow/api/utils.py +++ b/src/backend/base/langflow/api/utils.py @@ -1,14 +1,19 @@ import warnings from pathlib import Path -from typing import TYPE_CHECKING, List +from typing import TYPE_CHECKING, Optional from fastapi import HTTPException from platformdirs import user_cache_dir +from sqlmodel import Session +from langflow.graph.graph.base import Graph +from langflow.services.chat.service import ChatService +from langflow.services.database.models.flow import Flow from langflow.services.store.schema import StoreComponentCreate from langflow.services.store.utils import get_lf_version_from_pypi if TYPE_CHECKING: + from langflow.graph.vertex.base import Vertex from langflow.services.database.models.flow.model import Flow @@ -120,6 +125,9 @@ def update_template_field(frontend_template, key, value_dict): template_field["value"] = "" template_field["file_path"] = file_path_value + if "load_from_db" in value_dict and value_dict["load_from_db"]: + template_field["load_from_db"] = value_dict["load_from_db"] + def get_file_path_value(file_path): """Get the file path value if the file exists, else return empty string.""" @@ -137,7 +145,7 @@ def get_file_path_value(file_path): return file_path -def validate_is_component(flows: List["Flow"]): +def validate_is_component(flows: list["Flow"]): for flow in flows: if not flow.data or flow.is_component is not None: continue @@ -156,7 +164,7 @@ def get_is_component_from_data(data: dict): async def check_langflow_version(component: StoreComponentCreate): - from langflow import __version__ as current_version + from langflow.version.version import __version__ as current_version # type: ignore if not component.last_tested_version: component.last_tested_version = current_version @@ -171,19 +179,132 @@ async def check_langflow_version(component: StoreComponentCreate): ) -def format_elapsed_time(elapsed_time) -> str: - # Format elapsed time to human readable format coming from - # perf_counter() - # If the elapsed time is less than 1 second, return ms - # If the elapsed time is less than 1 minute, return seconds rounded to 2 decimals - time_str = "" +def format_elapsed_time(elapsed_time: float) -> str: + """Format elapsed time to a human-readable format coming from perf_counter(). + + - Less than 1 second: returns milliseconds + - Less than 1 minute: returns seconds rounded to 2 decimals + - 1 minute or more: returns minutes and seconds + """ if elapsed_time < 1: - elapsed_time = int(round(elapsed_time * 1000)) - time_str = f"{elapsed_time} ms" + milliseconds = int(round(elapsed_time * 1000)) + return f"{milliseconds} ms" elif elapsed_time < 60: - elapsed_time = round(elapsed_time, 2) - time_str = f"{elapsed_time} seconds" + seconds = round(elapsed_time, 2) + unit = "second" if seconds == 1 else "seconds" + return f"{seconds} {unit}" else: - elapsed_time = round(elapsed_time / 60, 2) - time_str = f"{elapsed_time} minutes" - return time_str + minutes = int(elapsed_time // 60) + seconds = round(elapsed_time % 60, 2) + minutes_unit = "minute" if minutes == 1 else "minutes" + seconds_unit = "second" if seconds == 1 else "seconds" + return f"{minutes} {minutes_unit}, {seconds} {seconds_unit}" + + +async def build_and_cache_graph( + flow_id: str, + session: Session, + chat_service: "ChatService", + graph: Optional[Graph] = None, +): + """Build and cache the graph.""" + flow: Optional[Flow] = session.get(Flow, flow_id) + if not flow or not flow.data: + raise ValueError("Invalid flow ID") + other_graph = Graph.from_payload(flow.data, flow_id) + if graph is None: + graph = other_graph + else: + graph = graph.update(other_graph) + await chat_service.set_cache(flow_id, graph) + return graph + + +def format_syntax_error_message(exc: SyntaxError) -> str: + """Format a SyntaxError message for returning to the frontend.""" + if exc.text is None: + return f"Syntax error in code. Error on line {exc.lineno}" + return f"Syntax error in code. Error on line {exc.lineno}: {exc.text.strip()}" + + +def get_causing_exception(exc: BaseException) -> BaseException: + """Get the causing exception from an exception.""" + if hasattr(exc, "__cause__") and exc.__cause__: + return get_causing_exception(exc.__cause__) + return exc + + +def format_exception_message(exc: Exception) -> str: + """Format an exception message for returning to the frontend.""" + # We need to check if the __cause__ is a SyntaxError + # If it is, we need to return the message of the SyntaxError + causing_exception = get_causing_exception(exc) + if isinstance(causing_exception, SyntaxError): + return format_syntax_error_message(causing_exception) + return str(exc) + + +async def get_next_runnable_vertices( + graph: Graph, + vertex: "Vertex", + vertex_id: str, + chat_service: ChatService, + flow_id: str, +): + """ + Retrieves the next runnable vertices in the graph for a given vertex. + + Args: + graph (Graph): The graph object representing the flow. + vertex (Vertex): The current vertex. + vertex_id (str): The ID of the current vertex. + chat_service (ChatService): The chat service object. + flow_id (str): The ID of the flow. + + Returns: + list: A list of IDs of the next runnable vertices. + + """ + async with chat_service._cache_locks[flow_id] as lock: + graph.remove_from_predecessors(vertex_id) + direct_successors_ready = [v for v in vertex.successors_ids if graph.is_vertex_runnable(v)] + if not direct_successors_ready: + # No direct successors ready, look for runnable predecessors of successors + next_runnable_vertices = graph.find_runnable_predecessors_for_successors(vertex_id) + else: + next_runnable_vertices = direct_successors_ready + + for v_id in set(next_runnable_vertices): # Use set to avoid duplicates + graph.vertices_to_run.remove(v_id) + graph.remove_from_predecessors(v_id) + await chat_service.set_cache(flow_id=flow_id, data=graph, lock=lock) + return next_runnable_vertices + + +def get_top_level_vertices(graph, vertices_ids): + """ + Retrieves the top-level vertices from the given graph based on the provided vertex IDs. + + Args: + graph (Graph): The graph object containing the vertices. + vertices_ids (list): A list of vertex IDs. + + Returns: + list: A list of top-level vertex IDs. + + """ + top_level_vertices = [] + for vertex_id in vertices_ids: + vertex = graph.get_vertex(vertex_id) + if vertex.parent_is_top_level: + top_level_vertices.append(vertex.parent_node_id) + else: + top_level_vertices.append(vertex_id) + return top_level_vertices + + +def parse_exception(exc): + """Parse the exception message.""" + if hasattr(exc, "body"): + return exc.body["message"] + return str(exc) diff --git a/src/backend/langflow/api/v1/__init__.py b/src/backend/base/langflow/api/v1/__init__.py similarity index 72% rename from src/backend/langflow/api/v1/__init__.py rename to src/backend/base/langflow/api/v1/__init__.py index 6368a0cc8..0342c76c8 100644 --- a/src/backend/langflow/api/v1/__init__.py +++ b/src/backend/base/langflow/api/v1/__init__.py @@ -1,12 +1,14 @@ from langflow.api.v1.api_key import router as api_key_router from langflow.api.v1.chat import router as chat_router -from langflow.api.v1.credential import router as credentials_router from langflow.api.v1.endpoints import router as endpoints_router +from langflow.api.v1.files import router as files_router from langflow.api.v1.flows import router as flows_router from langflow.api.v1.login import router as login_router +from langflow.api.v1.monitor import router as monitor_router from langflow.api.v1.store import router as store_router from langflow.api.v1.users import router as users_router from langflow.api.v1.validate import router as validate_router +from langflow.api.v1.variable import router as variables_router __all__ = [ "chat_router", @@ -17,5 +19,7 @@ __all__ = [ "users_router", "api_key_router", "login_router", - "credentials_router", + "variables_router", + "monitor_router", + "files_router", ] diff --git a/src/backend/langflow/api/v1/api_key.py b/src/backend/base/langflow/api/v1/api_key.py similarity index 89% rename from src/backend/langflow/api/v1/api_key.py rename to src/backend/base/langflow/api/v1/api_key.py index 48cf7d1ca..77466960d 100644 --- a/src/backend/langflow/api/v1/api_key.py +++ b/src/backend/base/langflow/api/v1/api_key.py @@ -8,20 +8,10 @@ from langflow.api.v1.schemas import ApiKeyCreateRequest, ApiKeysResponse from langflow.services.auth import utils as auth_utils # Assuming you have these methods in your service layer -from langflow.services.database.models.api_key.crud import ( - create_api_key, - delete_api_key, - get_api_keys, -) -from langflow.services.database.models.api_key.model import ( - ApiKeyCreate, - UnmaskedApiKeyRead, -) +from langflow.services.database.models.api_key.crud import create_api_key, delete_api_key, get_api_keys +from langflow.services.database.models.api_key.model import ApiKeyCreate, UnmaskedApiKeyRead from langflow.services.database.models.user.model import User -from langflow.services.deps import ( - get_session, - get_settings_service, -) +from langflow.services.deps import get_session, get_settings_service if TYPE_CHECKING: pass diff --git a/src/backend/base/langflow/api/v1/base.py b/src/backend/base/langflow/api/v1/base.py new file mode 100644 index 000000000..b040d51c4 --- /dev/null +++ b/src/backend/base/langflow/api/v1/base.py @@ -0,0 +1,165 @@ +from typing import Optional + +from pydantic import BaseModel, field_validator, model_serializer + +from langflow.template.frontend_node.base import FrontendNode + + +class CacheResponse(BaseModel): + data: dict + + +class Code(BaseModel): + code: str + + +class FrontendNodeRequest(FrontendNode): + template: dict # type: ignore + + @model_serializer(mode="wrap") + def serialize_model(self, handler): + # Override the default serialization method in FrontendNode + # because we don't need the name in the response (i.e. {name: {}}) + return handler(self) + + +class ValidatePromptRequest(BaseModel): + name: str + template: str + custom_fields: Optional[dict] = None + frontend_node: Optional[FrontendNodeRequest] = None + + +# Build ValidationResponse class for {"imports": {"errors": []}, "function": {"errors": []}} +class CodeValidationResponse(BaseModel): + imports: dict + function: dict + + @field_validator("imports") + @classmethod + def validate_imports(cls, v): + return v or {"errors": []} + + @field_validator("function") + @classmethod + def validate_function(cls, v): + return v or {"errors": []} + + +class PromptValidationResponse(BaseModel): + input_variables: list + # object return for tweak call + frontend_node: Optional[FrontendNodeRequest] = None + + +INVALID_CHARACTERS = { + " ", + ",", + ".", + ":", + ";", + "!", + "?", + "/", + "\\", + "(", + ")", + "[", + "]", +} + +INVALID_NAMES = { + "input_variables", + "output_parser", + "partial_variables", + "template", + "template_format", + "validate_template", +} + + +def is_json_like(var): + if var.startswith("{{") and var.endswith("}}"): + # If it is a double brance variable + # we don't want to validate any of its content + return True + # the above doesn't work on all cases because the json string can be multiline + # or indented which can add \n or spaces at the start or end of the string + # test_case_3 new_var == '\n{{\n "test": "hello",\n "text": "world"\n}}\n' + # what we can do is to remove the \n and spaces from the start and end of the string + # and then check if the string starts with {{ and ends with }} + var = var.strip() + var = var.replace("\n", "") + var = var.replace(" ", "") + # Now it should be a valid json string + return var.startswith("{{") and var.endswith("}}") + + +def fix_variable(var, invalid_chars, wrong_variables): + if not var: + return var, invalid_chars, wrong_variables + new_var = var + + # Handle variables starting with a number + if var[0].isdigit(): + invalid_chars.append(var[0]) + new_var, invalid_chars, wrong_variables = fix_variable(var[1:], invalid_chars, wrong_variables) + + # Temporarily replace {{ and }} to avoid treating them as invalid + new_var = new_var.replace("{{", "ᴛᴇᴍᴘᴏᴘᴇɴ").replace("}}", "ᴛᴇᴍᴘᴄʟᴏsᴇ") + + # Remove invalid characters + for char in new_var: + if char in INVALID_CHARACTERS: + invalid_chars.append(char) + new_var = new_var.replace(char, "") + if var not in wrong_variables: # Avoid duplicating entries + wrong_variables.append(var) + + # Restore {{ and }} + new_var = new_var.replace("ᴛᴇᴍᴘᴏᴘᴇɴ", "{{").replace("ᴛᴇᴍᴘᴄʟᴏsᴇ", "}}") + + return new_var, invalid_chars, wrong_variables + + +def check_variable(var, invalid_chars, wrong_variables, empty_variables): + if any(char in invalid_chars for char in var): + wrong_variables.append(var) + elif var == "": + empty_variables.append(var) + return wrong_variables, empty_variables + + +def check_for_errors(input_variables, fixed_variables, wrong_variables, empty_variables): + if any(var for var in input_variables if var not in fixed_variables): + error_message = ( + f"Error: Input variables contain invalid characters or formats. \n" + f"Invalid variables: {', '.join(wrong_variables)}.\n" + f"Empty variables: {', '.join(empty_variables)}. \n" + f"Fixed variables: {', '.join(fixed_variables)}." + ) + raise ValueError(error_message) + + +def check_input_variables(input_variables): + invalid_chars = [] + fixed_variables = [] + wrong_variables = [] + empty_variables = [] + variables_to_check = [] + + for var in input_variables: + # First, let's check if the variable is a JSON string + # because if it is, it won't be considered a variable + # and we don't need to validate it + if is_json_like(var): + continue + + new_var, wrong_variables, empty_variables = fix_variable(var, invalid_chars, wrong_variables) + wrong_variables, empty_variables = check_variable(var, INVALID_CHARACTERS, wrong_variables, empty_variables) + fixed_variables.append(new_var) + variables_to_check.append(var) + + check_for_errors(variables_to_check, fixed_variables, wrong_variables, empty_variables) + + return fixed_variables diff --git a/src/backend/langflow/api/v1/callback.py b/src/backend/base/langflow/api/v1/callback.py similarity index 69% rename from src/backend/langflow/api/v1/callback.py rename to src/backend/base/langflow/api/v1/callback.py index f9be5d56c..b326311ac 100644 --- a/src/backend/langflow/api/v1/callback.py +++ b/src/backend/base/langflow/api/v1/callback.py @@ -1,17 +1,20 @@ -import asyncio -from typing import Any, Dict, List, Optional +from typing import TYPE_CHECKING, Any, Dict, List, Optional from uuid import UUID from langchain.schema import AgentAction, AgentFinish -from langchain_core.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler -from langflow.api.v1.schemas import ChatResponse, PromptResponse -from langflow.services.deps import get_chat_service -from langflow.utils.util import remove_ansi_escape_codes +from langchain_core.callbacks.base import AsyncCallbackHandler from loguru import logger +from langflow.api.v1.schemas import ChatResponse, PromptResponse +from langflow.services.deps import get_chat_service, get_socket_service +from langflow.utils.util import remove_ansi_escape_codes + +if TYPE_CHECKING: + from langflow.services.socket.service import SocketIOService + # https://github.com/hwchase17/chat-langchain/blob/master/callback.py -class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler): +class AsyncStreamingLLMCallbackHandleSIO(AsyncCallbackHandler): """Callback handler for streaming LLM responses.""" @property @@ -19,14 +22,16 @@ class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler): """Whether to ignore chain callbacks.""" return False - def __init__(self, client_id: str): + def __init__(self, session_id: str): self.chat_service = get_chat_service() - self.client_id = client_id - self.websocket = self.chat_service.active_connections[self.client_id] + self.client_id = session_id + self.socketio_service: "SocketIOService" = get_socket_service() + self.sid = session_id + # self.socketio_service = self.chat_service.active_connections[self.client_id] async def on_llm_new_token(self, token: str, **kwargs: Any) -> None: resp = ChatResponse(message=token, type="stream", intermediate_steps="") - await self.websocket.send_json(resp.model_dump()) + await self.socketio_service.emit_token(to=self.sid, data=resp.model_dump()) async def on_tool_start(self, serialized: Dict[str, Any], input_str: str, **kwargs: Any) -> Any: """Run when tool starts running.""" @@ -35,7 +40,7 @@ class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler): type="stream", intermediate_steps=f"Tool input: {input_str}", ) - await self.websocket.send_json(resp.model_dump()) + await self.socketio_service.emit_token(to=self.sid, data=resp.model_dump()) async def on_tool_end(self, output: str, **kwargs: Any) -> Any: """Run when tool ends running.""" @@ -66,7 +71,7 @@ class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler): try: # This is to emulate the stream of tokens for resp in resps: - await self.websocket.send_json(resp.model_dump()) + await self.socketio_service.emit_token(to=self.sid, data=resp.model_dump()) except Exception as exc: logger.error(f"Error sending response: {exc}") @@ -92,8 +97,7 @@ class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler): resp = PromptResponse( prompt=text, ) - await self.websocket.send_json(resp.model_dump()) - self.chat_service.chat_history.add_message(self.client_id, resp) + await self.socketio_service.emit_message(to=self.sid, data=resp.model_dump()) async def on_agent_action(self, action: AgentAction, **kwargs: Any): log = f"Thought: {action.log}" @@ -103,10 +107,10 @@ class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler): logs = log.split("\n") for log in logs: resp = ChatResponse(message="", type="stream", intermediate_steps=log) - await self.websocket.send_json(resp.model_dump()) + await self.socketio_service.emit_token(to=self.sid, data=resp.model_dump()) else: resp = ChatResponse(message="", type="stream", intermediate_steps=log) - await self.websocket.send_json(resp.model_dump()) + await self.socketio_service.emit_token(to=self.sid, data=resp.model_dump()) async def on_agent_finish(self, finish: AgentFinish, **kwargs: Any) -> Any: """Run on agent end.""" @@ -115,20 +119,4 @@ class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler): type="stream", intermediate_steps=finish.log, ) - await self.websocket.send_json(resp.model_dump()) - - -class StreamingLLMCallbackHandler(BaseCallbackHandler): - """Callback handler for streaming LLM responses.""" - - def __init__(self, client_id: str): - self.chat_service = get_chat_service() - self.client_id = client_id - self.websocket = self.chat_service.active_connections[self.client_id] - - def on_llm_new_token(self, token: str, **kwargs: Any) -> None: - resp = ChatResponse(message=token, type="stream", intermediate_steps="") - - loop = asyncio.get_event_loop() - coroutine = self.websocket.send_json(resp.model_dump()) - asyncio.run_coroutine_threadsafe(coroutine, loop) + await self.socketio_service.emit_token(to=self.sid, data=resp.model_dump()) diff --git a/src/backend/base/langflow/api/v1/chat.py b/src/backend/base/langflow/api/v1/chat.py new file mode 100644 index 000000000..9ee4edb03 --- /dev/null +++ b/src/backend/base/langflow/api/v1/chat.py @@ -0,0 +1,329 @@ +import time +import uuid +from functools import partial +from typing import TYPE_CHECKING, Annotated, Optional + +from fastapi import APIRouter, BackgroundTasks, Body, Depends, HTTPException +from fastapi.responses import StreamingResponse +from loguru import logger + +from langflow.api.utils import ( + build_and_cache_graph, + format_elapsed_time, + format_exception_message, + get_top_level_vertices, + parse_exception, +) +from langflow.api.v1.schemas import ( + InputValueRequest, + ResultDataResponse, + StreamData, + VertexBuildResponse, + VerticesOrderResponse, +) +from langflow.services.auth.utils import get_current_active_user +from langflow.services.chat.service import ChatService +from langflow.services.deps import get_chat_service, get_session, get_session_service +from langflow.services.monitor.utils import log_vertex_build + +if TYPE_CHECKING: + from langflow.graph.vertex.types import ChatVertex + from langflow.services.session.service import SessionService + +router = APIRouter(tags=["Chat"]) + + +async def try_running_celery_task(vertex, user_id): + # Try running the task in celery + # and set the task_id to the local vertex + # if it fails, run the task locally + try: + from langflow.worker import build_vertex + + task = build_vertex.delay(vertex) + vertex.task_id = task.id + except Exception as exc: + logger.debug(f"Error running task in celery: {exc}") + vertex.task_id = None + await vertex.build(user_id=user_id) + return vertex + + +@router.get("/build/{flow_id}/vertices", response_model=VerticesOrderResponse) +async def get_vertices( + flow_id: str, + stop_component_id: Optional[str] = None, + start_component_id: Optional[str] = None, + chat_service: "ChatService" = Depends(get_chat_service), + session=Depends(get_session), +): + """ + Retrieve the vertices order for a given flow. + + Args: + flow_id (str): The ID of the flow. + stop_component_id (str, optional): The ID of the stop component. Defaults to None. + start_component_id (str, optional): The ID of the start component. Defaults to None. + chat_service (ChatService, optional): The chat service dependency. Defaults to Depends(get_chat_service). + session (Session, optional): The session dependency. Defaults to Depends(get_session). + + Returns: + VerticesOrderResponse: The response containing the ordered vertex IDs and the run ID. + + Raises: + HTTPException: If there is an error checking the build status. + """ + try: + # First, we need to check if the flow_id is in the cache + graph = None + if cache := await chat_service.get_cache(flow_id): + graph = cache.get("result") + graph = await build_and_cache_graph(flow_id, session, chat_service, graph) + if stop_component_id or start_component_id: + try: + first_layer = graph.sort_vertices(stop_component_id, start_component_id) + except Exception as exc: + logger.error(exc) + first_layer = graph.sort_vertices() + else: + first_layer = graph.sort_vertices() + # When we send vertices to the frontend + # we need to remove them from the predecessors + # so they are not considered for building again + # which duplicates the results + for vertex_id in first_layer: + graph.remove_from_predecessors(vertex_id) + + # Now vertices is a list of lists + # We need to get the id of each vertex + # and return the same structure but only with the ids + run_id = uuid.uuid4() + graph.set_run_id(run_id) + vertices_to_run = list(graph.vertices_to_run) + get_top_level_vertices(graph, graph.vertices_to_run) + return VerticesOrderResponse(ids=first_layer, run_id=run_id, vertices_to_run=vertices_to_run) + + except Exception as exc: + logger.error(f"Error checking build status: {exc}") + logger.exception(exc) + raise HTTPException(status_code=500, detail=str(exc)) from exc + + +@router.post("/build/{flow_id}/vertices/{vertex_id}") +async def build_vertex( + flow_id: str, + vertex_id: str, + background_tasks: BackgroundTasks, + inputs: Annotated[Optional[InputValueRequest], Body(embed=True)] = None, + chat_service: "ChatService" = Depends(get_chat_service), + current_user=Depends(get_current_active_user), +): + """Build a vertex instead of the entire graph. + + Args: + flow_id (str): The ID of the flow. + vertex_id (str): The ID of the vertex to build. + background_tasks (BackgroundTasks): The background tasks object for logging. + inputs (Optional[InputValueRequest], optional): The input values for the vertex. Defaults to None. + chat_service (ChatService, optional): The chat service dependency. Defaults to Depends(get_chat_service). + current_user (Any, optional): The current user dependency. Defaults to Depends(get_current_active_user). + + Returns: + VertexBuildResponse: The response containing the built vertex information. + + Raises: + HTTPException: If there is an error building the vertex. + + """ + + start_time = time.perf_counter() + next_runnable_vertices = [] + top_level_vertices = [] + try: + start_time = time.perf_counter() + cache = await chat_service.get_cache(flow_id) + if not cache: + # If there's no cache + logger.warning(f"No cache found for {flow_id}. Building graph starting at {vertex_id}") + graph = await build_and_cache_graph(flow_id=flow_id, session=next(get_session()), chat_service=chat_service) + else: + graph = cache.get("result") + result_data_response = ResultDataResponse(results={}) + duration = "" + vertex = graph.get_vertex(vertex_id) + try: + lock = chat_service._cache_locks[flow_id] + set_cache_coro = partial(chat_service.set_cache, flow_id=flow_id) + ( + next_runnable_vertices, + top_level_vertices, + result_dict, + params, + valid, + artifacts, + vertex, + ) = await graph.build_vertex( + lock=lock, + set_cache_coro=set_cache_coro, + vertex_id=vertex_id, + user_id=current_user.id, + inputs_dict=inputs.model_dump() if inputs else {}, + ) + result_data_response = ResultDataResponse(**result_dict.model_dump()) + + except Exception as exc: + logger.exception(f"Error building vertex: {exc}") + params = format_exception_message(exc) + valid = False + result_data_response = ResultDataResponse(results={}) + artifacts = {} + # If there's an error building the vertex + # we need to clear the cache + await chat_service.clear_cache(flow_id) + + # Log the vertex build + if not vertex.will_stream: + background_tasks.add_task( + log_vertex_build, + flow_id=flow_id, + vertex_id=vertex_id, + valid=valid, + params=params, + data=result_data_response, + artifacts=artifacts, + ) + + timedelta = time.perf_counter() - start_time + duration = format_elapsed_time(timedelta) + result_data_response.duration = duration + result_data_response.timedelta = timedelta + vertex.add_build_time(timedelta) + inactivated_vertices = None + inactivated_vertices = list(graph.inactivated_vertices) + graph.reset_inactivated_vertices() + graph.reset_activated_vertices() + await chat_service.set_cache(flow_id, graph) + + # graph.stop_vertex tells us if the user asked + # to stop the build of the graph at a certain vertex + # if it is in next_vertices_ids, we need to remove other + # vertices from next_vertices_ids + if graph.stop_vertex and graph.stop_vertex in next_runnable_vertices: + next_runnable_vertices = [graph.stop_vertex] + + build_response = VertexBuildResponse( + inactivated_vertices=inactivated_vertices, + next_vertices_ids=next_runnable_vertices, + top_level_vertices=top_level_vertices, + valid=valid, + params=params, + id=vertex.id, + data=result_data_response, + ) + return build_response + except Exception as exc: + logger.error(f"Error building vertex: {exc}") + logger.exception(exc) + message = parse_exception(exc) + raise HTTPException(status_code=500, detail=message) from exc + + +@router.get("/build/{flow_id}/{vertex_id}/stream", response_class=StreamingResponse) +async def build_vertex_stream( + flow_id: str, + vertex_id: str, + session_id: Optional[str] = None, + chat_service: "ChatService" = Depends(get_chat_service), + session_service: "SessionService" = Depends(get_session_service), +): + """Build a vertex instead of the entire graph. + + This function is responsible for building a single vertex instead of the entire graph. + It takes the `flow_id` and `vertex_id` as required parameters, and an optional `session_id`. + It also depends on the `ChatService` and `SessionService` services. + + If `session_id` is not provided, it retrieves the graph from the cache using the `chat_service`. + If `session_id` is provided, it loads the session data using the `session_service`. + + Once the graph is obtained, it retrieves the specified vertex using the `vertex_id`. + If the vertex does not support streaming, an error is raised. + If the vertex has a built result, it sends the result as a chunk. + If the vertex is not frozen or not built, it streams the vertex data. + If the vertex has a result, it sends the result as a chunk. + If none of the above conditions are met, an error is raised. + + If any exception occurs during the process, an error message is sent. + Finally, the stream is closed. + + Returns: + A `StreamingResponse` object with the streamed vertex data in text/event-stream format. + + Raises: + HTTPException: If an error occurs while building the vertex. + """ + try: + + async def stream_vertex(): + try: + if not session_id: + cache = await chat_service.get_cache(flow_id) + if not cache: + # If there's no cache + raise ValueError(f"No cache found for {flow_id}.") + else: + graph = cache.get("result") + else: + session_data = await session_service.load_session(session_id, flow_id=flow_id) + graph, artifacts = session_data if session_data else (None, None) + if not graph: + raise ValueError(f"No graph found for {flow_id}.") + + vertex: "ChatVertex" = graph.get_vertex(vertex_id) + if not hasattr(vertex, "stream"): + raise ValueError(f"Vertex {vertex_id} does not support streaming") + if isinstance(vertex._built_result, str) and vertex._built_result: + stream_data = StreamData( + event="message", + data={"message": f"Streaming vertex {vertex_id}"}, + ) + yield str(stream_data) + stream_data = StreamData( + event="message", + data={"chunk": vertex._built_result}, + ) + yield str(stream_data) + + elif not vertex.frozen or not vertex._built: + logger.debug(f"Streaming vertex {vertex_id}") + stream_data = StreamData( + event="message", + data={"message": f"Streaming vertex {vertex_id}"}, + ) + yield str(stream_data) + async for chunk in vertex.stream(): + stream_data = StreamData( + event="message", + data={"chunk": chunk}, + ) + yield str(stream_data) + elif vertex.result is not None: + stream_data = StreamData( + event="message", + data={"chunk": vertex._built_result}, + ) + yield str(stream_data) + else: + raise ValueError(f"No result found for vertex {vertex_id}") + + except Exception as exc: + logger.exception(f"Error building vertex: {exc}") + exc_message = parse_exception(exc) + if exc_message == "The message must be an iterator or an async iterator.": + exc_message = "This stream has already been closed." + yield str(StreamData(event="error", data={"error": exc_message})) + finally: + logger.debug("Closing stream") + yield str(StreamData(event="close", data={"message": "Stream closed"})) + + return StreamingResponse(stream_vertex(), media_type="text/event-stream") + except Exception as exc: + raise HTTPException(status_code=500, detail="Error building vertex") from exc diff --git a/src/backend/base/langflow/api/v1/endpoints.py b/src/backend/base/langflow/api/v1/endpoints.py new file mode 100644 index 000000000..4b032f6cb --- /dev/null +++ b/src/backend/base/langflow/api/v1/endpoints.py @@ -0,0 +1,455 @@ +from http import HTTPStatus +from typing import Annotated, List, Optional, Union + +import sqlalchemy as sa +from fastapi import APIRouter, Body, Depends, HTTPException, UploadFile, status +from loguru import logger +from sqlmodel import Session, select + +from langflow.api.utils import update_frontend_node_with_template_values +from langflow.api.v1.schemas import ( + CustomComponentRequest, + InputValueRequest, + ProcessResponse, + RunResponse, + SimplifiedAPIRequest, + TaskStatusResponse, + Tweaks, + UpdateCustomComponentRequest, + UploadFileResponse, +) +from langflow.graph.graph.base import Graph +from langflow.graph.schema import RunOutputs +from langflow.interface.custom.custom_component import CustomComponent +from langflow.interface.custom.directory_reader import DirectoryReader +from langflow.interface.custom.utils import build_custom_component_template +from langflow.processing.process import process_tweaks, run_graph_internal +from langflow.services.auth.utils import api_key_security, get_current_active_user +from langflow.services.cache.utils import save_uploaded_file +from langflow.services.database.models.flow import Flow +from langflow.services.database.models.user.model import User +from langflow.services.deps import get_session, get_session_service, get_settings_service, get_task_service +from langflow.services.session.service import SessionService +from langflow.services.task.service import TaskService + +# build router +router = APIRouter(tags=["Base"]) + + +@router.get("/all", dependencies=[Depends(get_current_active_user)]) +def get_all( + settings_service=Depends(get_settings_service), +): + from langflow.interface.types import get_all_types_dict + + logger.debug("Building langchain types dict") + try: + all_types_dict = get_all_types_dict(settings_service.settings.COMPONENTS_PATH) + return all_types_dict + except Exception as exc: + logger.exception(exc) + raise HTTPException(status_code=500, detail=str(exc)) from exc + + +@router.post("/run/{flow_id}", response_model=RunResponse, response_model_exclude_none=True) +async def simplified_run_flow( + db: Annotated[Session, Depends(get_session)], + flow_id: str, + input_request: SimplifiedAPIRequest = SimplifiedAPIRequest(), + stream: bool = False, + api_key_user: User = Depends(api_key_security), + session_service: SessionService = Depends(get_session_service), +): + """ + Executes a specified flow by ID with input customization, performance enhancements through caching, and optional data streaming. + + ### Parameters: + - `db` (Session): Database session for executing queries. + - `flow_id` (str): Unique identifier of the flow to be executed. + - `input_request` (SimplifiedAPIRequest): Request object containing input values, types, output selection, tweaks, and session ID. + - `api_key_user` (User): User object derived from the provided API key, used for authentication. + - `session_service` (SessionService): Service for managing flow sessions, essential for session reuse and caching. + + ### SimplifiedAPIRequest: + - `input_value` (Optional[str], default=""): Input value to pass to the flow. + - `input_type` (Optional[Literal["chat", "text", "any"]], default="chat"): Type of the input value, determining how the input is interpreted. + - `output_type` (Optional[Literal["chat", "text", "any", "debug"]], default="chat"): Desired type of output, affecting which components' outputs are included in the response. If set to "debug", all outputs are returned. + - `output_component` (Optional[str], default=None): Specific component output to retrieve. If provided, only the output of the specified component is returned. This overrides the `output_type` parameter. + - `tweaks` (Optional[Tweaks], default=None): Adjustments to the flow's behavior, allowing for custom execution parameters. + - `session_id` (Optional[str], default=None): An identifier for reusing session data, aiding in performance for subsequent requests. + + + ### Tweaks + A dictionary of tweaks to customize the flow execution. The tweaks can be used to modify the flow's parameters and components. Tweaks can be overridden by the input values. + You can use Component's `id` or Display Name as key to tweak a specific component (e.g., `{"Component Name": {"parameter_name": "value"}}`). + You can also use the parameter name as key to tweak all components with that parameter (e.g., `{"parameter_name": "value"}`). + + ### Returns: + - A `RunResponse` object containing the execution results, including selected (or all, based on `output_type`) outputs of the flow and the session ID, facilitating result retrieval and further interactions in a session context. + + ### Raises: + - HTTPException: 404 if the specified flow ID curl -X 'POST' \ + + ### Example: + ```bash + curl -X 'POST' \ + 'http:///run/{flow_id}' \ + -H 'accept: application/json' \ + -H 'Content-Type: application/json' \ + -H 'x-api-key: YOU_API_KEY' \ + -H ' + -d '{ + "input_value": "Sample input", + "input_type": "chat", + "output_type": "chat", + "tweaks": {}, + }' + ``` + + This endpoint provides a powerful interface for executing flows with enhanced flexibility and efficiency, supporting a wide range of applications by allowing for dynamic input and output configuration along with performance optimizations through session management and caching. + """ + session_id = input_request.session_id + try: + task_result: List[RunOutputs] = [] + artifacts = {} + if input_request.session_id: + session_data = await session_service.load_session(input_request.session_id, flow_id=flow_id) + graph, artifacts = session_data if session_data else (None, None) + if graph is None: + raise ValueError(f"Session {input_request.session_id} not found") + else: + # Get the flow that matches the flow_id and belongs to the user + # flow = session.query(Flow).filter(Flow.id == flow_id).filter(Flow.user_id == api_key_user.id).first() + flow = db.exec(select(Flow).where(Flow.id == flow_id).where(Flow.user_id == api_key_user.id)).first() + if flow is None: + raise ValueError(f"Flow {flow_id} not found") + + if flow.data is None: + raise ValueError(f"Flow {flow_id} has no data") + graph_data = flow.data + graph_data = process_tweaks(graph_data, input_request.tweaks or {}) + graph = Graph.from_payload(graph_data, flow_id=flow_id) + inputs = [ + InputValueRequest(components=[], input_value=input_request.input_value, type=input_request.input_type) + ] + # outputs is a list of all components that should return output + # we need to get them by checking their type + # if the output type is debug, we return all outputs + # if the output type is any, we return all outputs that are either chat or text + # if the output type is chat or text, we return only the outputs that match the type + if input_request.output_component: + outputs = [input_request.output_component] + else: + outputs = [ + vertex.id + for vertex in graph.vertices + if input_request.output_type == "debug" + or ( + vertex.is_output + and (input_request.output_type == "any" or input_request.output_type in vertex.id.lower()) + ) + ] + task_result, session_id = await run_graph_internal( + graph=graph, + flow_id=flow_id, + session_id=input_request.session_id, + inputs=inputs, + outputs=outputs, + artifacts=artifacts, + session_service=session_service, + stream=stream, + ) + + return RunResponse(outputs=task_result, session_id=session_id) + except sa.exc.StatementError as exc: + # StatementError('(builtins.ValueError) badly formed hexadecimal UUID string') + if "badly formed hexadecimal UUID string" in str(exc): + logger.error(f"Flow ID {flow_id} is not a valid UUID") + # This means the Flow ID is not a valid UUID which means it can't find the flow + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc + except ValueError as exc: + if f"Flow {flow_id} not found" in str(exc): + logger.error(f"Flow {flow_id} not found") + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc + elif f"Session {session_id} not found" in str(exc): + logger.error(f"Session {session_id} not found") + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc + else: + logger.exception(exc) + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc)) from exc + except Exception as exc: + logger.exception(exc) + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc)) from exc + + +@router.post("/run/advanced/{flow_id}", response_model=RunResponse, response_model_exclude_none=True) +async def experimental_run_flow( + session: Annotated[Session, Depends(get_session)], + flow_id: str, + inputs: Optional[List[InputValueRequest]] = [InputValueRequest(components=[], input_value="")], + outputs: Optional[List[str]] = [], + tweaks: Annotated[Optional[Tweaks], Body(embed=True)] = None, # noqa: F821 + stream: Annotated[bool, Body(embed=True)] = False, # noqa: F821 + session_id: Annotated[Union[None, str], Body(embed=True)] = None, # noqa: F821 + api_key_user: User = Depends(api_key_security), + session_service: SessionService = Depends(get_session_service), +): + """ + Executes a specified flow by ID with optional input values, output selection, tweaks, and streaming capability. + This endpoint supports running flows with caching to enhance performance and efficiency. + + ### Parameters: + - `flow_id` (str): The unique identifier of the flow to be executed. + - `inputs` (List[InputValueRequest], optional): A list of inputs specifying the input values and components for the flow. Each input can target specific components and provide custom values. + - `outputs` (List[str], optional): A list of output names to retrieve from the executed flow. If not provided, all outputs are returned. + - `tweaks` (Optional[Tweaks], optional): A dictionary of tweaks to customize the flow execution. The tweaks can be used to modify the flow's parameters and components. Tweaks can be overridden by the input values. + - `stream` (bool, optional): Specifies whether the results should be streamed. Defaults to False. + - `session_id` (Union[None, str], optional): An optional session ID to utilize existing session data for the flow execution. + - `api_key_user` (User): The user associated with the current API key. Automatically resolved from the API key. + - `session_service` (SessionService): The session service object for managing flow sessions. + + ### Returns: + A `RunResponse` object containing the selected outputs (or all if not specified) of the executed flow and the session ID. The structure of the response accommodates multiple inputs, providing a nested list of outputs for each input. + + ### Raises: + HTTPException: Indicates issues with finding the specified flow, invalid input formats, or internal errors during flow execution. + + ### Example usage: + ```json + POST /run/{flow_id} + x-api-key: YOUR_API_KEY + Payload: + { + "inputs": [ + {"components": ["component1"], "input_value": "value1"}, + {"components": ["component3"], "input_value": "value2"} + ], + "outputs": ["Component Name", "component_id"], + "tweaks": {"parameter_name": "value", "Component Name": {"parameter_name": "value"}, "component_id": {"parameter_name": "value"}} + "stream": false + } + ``` + + This endpoint facilitates complex flow executions with customized inputs, outputs, and configurations, catering to diverse application requirements. + """ + try: + if outputs is None: + outputs = [] + + task_result: List[RunOutputs] = [] + artifacts = {} + if session_id: + session_data = await session_service.load_session(session_id, flow_id=flow_id) + graph, artifacts = session_data if session_data else (None, None) + if graph is None: + raise ValueError(f"Session {session_id} not found") + else: + # Get the flow that matches the flow_id and belongs to the user + # flow = session.query(Flow).filter(Flow.id == flow_id).filter(Flow.user_id == api_key_user.id).first() + flow = session.exec(select(Flow).where(Flow.id == flow_id).where(Flow.user_id == api_key_user.id)).first() + if flow is None: + raise ValueError(f"Flow {flow_id} not found") + + if flow.data is None: + raise ValueError(f"Flow {flow_id} has no data") + graph_data = flow.data + graph_data = process_tweaks(graph_data, tweaks or {}) + graph = Graph.from_payload(graph_data, flow_id=flow_id) + task_result, session_id = await run_graph_internal( + graph=graph, + flow_id=flow_id, + session_id=session_id, + inputs=inputs, + outputs=outputs, + artifacts=artifacts, + session_service=session_service, + stream=stream, + ) + + return RunResponse(outputs=task_result, session_id=session_id) + except sa.exc.StatementError as exc: + # StatementError('(builtins.ValueError) badly formed hexadecimal UUID string') + if "badly formed hexadecimal UUID string" in str(exc): + logger.error(f"Flow ID {flow_id} is not a valid UUID") + # This means the Flow ID is not a valid UUID which means it can't find the flow + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc + except ValueError as exc: + if f"Flow {flow_id} not found" in str(exc): + logger.error(f"Flow {flow_id} not found") + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc + elif f"Session {session_id} not found" in str(exc): + logger.error(f"Session {session_id} not found") + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc + else: + logger.exception(exc) + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc)) from exc + except Exception as exc: + logger.exception(exc) + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc)) from exc + + +@router.post( + "/predict/{flow_id}", + response_model=ProcessResponse, + dependencies=[Depends(api_key_security)], +) +@router.post( + "/process/{flow_id}", + response_model=ProcessResponse, +) +async def process( + session: Annotated[Session, Depends(get_session)], + flow_id: str, + inputs: Optional[Union[List[dict], dict]] = None, + tweaks: Optional[dict] = None, + clear_cache: Annotated[bool, Body(embed=True)] = False, # noqa: F821 + session_id: Annotated[Union[None, str], Body(embed=True)] = None, # noqa: F821 + task_service: "TaskService" = Depends(get_task_service), + api_key_user: User = Depends(api_key_security), + sync: Annotated[bool, Body(embed=True)] = True, # noqa: F821 + session_service: SessionService = Depends(get_session_service), +): + """ + Endpoint to process an input with a given flow_id. + """ + # Raise a depreciation warning + logger.warning( + "The /process endpoint is deprecated and will be removed in a future version. " "Please use /run instead." + ) + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="The /process endpoint is deprecated and will be removed in a future version. " + "Please use /run instead.", + ) + + +@router.get("/task/{task_id}", response_model=TaskStatusResponse) +async def get_task_status(task_id: str): + task_service = get_task_service() + task = task_service.get_task(task_id) + result = None + if task is None: + raise HTTPException(status_code=404, detail="Task not found") + if task.ready(): + result = task.result + # If result isinstance of Exception, can we get the traceback? + if isinstance(result, Exception): + logger.exception(task.traceback) + + if isinstance(result, dict) and "result" in result: + result = result["result"] + elif hasattr(result, "result"): + result = result.result + + if task.status == "FAILURE": + result = str(task.result) + logger.error(f"Task {task_id} failed: {task.traceback}") + + return TaskStatusResponse(status=task.status, result=result) + + +@router.post( + "/upload/{flow_id}", + response_model=UploadFileResponse, + status_code=HTTPStatus.CREATED, +) +async def create_upload_file( + file: UploadFile, + flow_id: str, +): + try: + file_path = save_uploaded_file(file, folder_name=flow_id) + + return UploadFileResponse( + flowId=flow_id, + file_path=file_path, + ) + except Exception as exc: + logger.error(f"Error saving file: {exc}") + raise HTTPException(status_code=500, detail=str(exc)) from exc + + +# get endpoint to return version of langflow +@router.get("/version") +def get_version(): + try: + from langflow.version import __version__ + + version = __version__ + package = "Langflow" + except ImportError: + from importlib import metadata + + version = metadata.version("langflow-base") + package = "Langflow Base" + return {"version": version, "package": package} + + +@router.post("/custom_component", status_code=HTTPStatus.OK) +async def custom_component( + raw_code: CustomComponentRequest, + user: User = Depends(get_current_active_user), +): + component = CustomComponent(code=raw_code.code) + + built_frontend_node, _ = build_custom_component_template(component, user_id=user.id) + + built_frontend_node = update_frontend_node_with_template_values(built_frontend_node, raw_code.frontend_node) + return built_frontend_node + + +@router.post("/custom_component/reload", status_code=HTTPStatus.OK) +async def reload_custom_component(path: str, user: User = Depends(get_current_active_user)): + from langflow.interface.custom.utils import build_custom_component_template + + try: + reader = DirectoryReader("") + valid, content = reader.process_file(path) + if not valid: + raise ValueError(content) + + extractor = CustomComponent(code=content) + frontend_node, _ = build_custom_component_template(extractor, user_id=user.id) + return frontend_node + except Exception as exc: + raise HTTPException(status_code=400, detail=str(exc)) + + +@router.post("/custom_component/update", status_code=HTTPStatus.OK) +async def custom_component_update( + code_request: UpdateCustomComponentRequest, + user: User = Depends(get_current_active_user), +): + """ + Update a custom component with the provided code request. + + This endpoint generates the CustomComponentFrontendNode normally but then runs the `update_build_config` method + on the latest version of the template. This ensures that every time it runs, it has the latest version of the template. + + Args: + code_request (CustomComponentRequest): The code request containing the updated code for the custom component. + user (User, optional): The user making the request. Defaults to the current active user. + + Returns: + dict: The updated custom component node. + + """ + try: + component = CustomComponent(code=code_request.code) + + component_node, cc_instance = build_custom_component_template( + component, + user_id=user.id, + ) + + updated_build_config = cc_instance.update_build_config( + build_config=code_request.get_template(), + field_value=code_request.field_value, + field_name=code_request.field, + ) + component_node["template"] = updated_build_config + + return component_node + except Exception as exc: + logger.exception(exc) + raise HTTPException(status_code=400, detail=str(exc)) from exc diff --git a/src/backend/base/langflow/api/v1/files.py b/src/backend/base/langflow/api/v1/files.py new file mode 100644 index 000000000..435aea826 --- /dev/null +++ b/src/backend/base/langflow/api/v1/files.py @@ -0,0 +1,116 @@ +import hashlib +from http import HTTPStatus +from io import BytesIO + +from fastapi import APIRouter, Depends, HTTPException, UploadFile +from fastapi.responses import StreamingResponse + + +from langflow.api.v1.schemas import UploadFileResponse +from langflow.services.auth.utils import get_current_active_user +from langflow.services.database.models.flow import Flow +from langflow.services.deps import get_session, get_storage_service +from langflow.services.storage.service import StorageService +from langflow.services.storage.utils import build_content_type_from_extension + +router = APIRouter(tags=["Files"], prefix="/files") + + +# Create dep that gets the flow_id from the request +# then finds it in the database and returns it while +# using the current user as the owner +def get_flow_id( + flow_id: str, + current_user=Depends(get_current_active_user), + session=Depends(get_session), +): + # AttributeError: 'SelectOfScalar' object has no attribute 'first' + flow = session.get(Flow, flow_id) + if not flow: + raise HTTPException(status_code=404, detail="Flow not found") + if flow.user_id != current_user.id: + raise HTTPException(status_code=403, detail="You don't have access to this flow") + return flow_id + + +@router.post("/upload/{flow_id}", status_code=HTTPStatus.CREATED) +async def upload_file( + file: UploadFile, + flow_id: str = Depends(get_flow_id), + storage_service: StorageService = Depends(get_storage_service), +): + try: + file_content = await file.read() + file_name = file.filename or hashlib.sha256(file_content).hexdigest() + folder = flow_id + await storage_service.save_file(flow_id=folder, file_name=file_name, data=file_content) + return UploadFileResponse(flowId=flow_id, file_path=f"{folder}/{file_name}") + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/download/{flow_id}/{file_name}") +async def download_file(file_name: str, flow_id: str, storage_service: StorageService = Depends(get_storage_service)): + try: + extension = file_name.split(".")[-1] + + if not extension: + raise HTTPException(status_code=500, detail=f"Extension not found for file {file_name}") + + content_type = build_content_type_from_extension(extension) + + if not content_type: + raise HTTPException(status_code=500, detail=f"Content type not found for extension {extension}") + + file_content = await storage_service.get_file(flow_id=flow_id, file_name=file_name) + headers = { + "Content-Disposition": f"attachment; filename={file_name} filename*=UTF-8''{file_name}", + "Content-Type": "application/octet-stream", + "Content-Length": str(len(file_content)), + } + return StreamingResponse(BytesIO(file_content), media_type=content_type, headers=headers) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/images/{flow_id}/{file_name}") +async def download_image(file_name: str, flow_id: str, storage_service: StorageService = Depends(get_storage_service)): + try: + extension = file_name.split(".")[-1] + + if not extension: + raise HTTPException(status_code=500, detail=f"Extension not found for file {file_name}") + + content_type = build_content_type_from_extension(extension) + + if not content_type: + raise HTTPException(status_code=500, detail=f"Content type not found for extension {extension}") + elif not content_type.startswith("image"): + raise HTTPException(status_code=500, detail=f"Content type {content_type} is not an image") + + file_content = await storage_service.get_file(flow_id=flow_id, file_name=file_name) + return StreamingResponse(BytesIO(file_content), media_type=content_type) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/list/{flow_id}") +async def list_files( + flow_id: str = Depends(get_flow_id), storage_service: StorageService = Depends(get_storage_service) +): + try: + files = await storage_service.list_files(flow_id=flow_id) + return {"files": files} + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.delete("/delete/{flow_id}/{file_name}") +async def delete_file( + file_name: str, flow_id: str = Depends(get_flow_id), storage_service: StorageService = Depends(get_storage_service) +): + try: + await storage_service.delete_file(flow_id=flow_id, file_name=file_name) + return {"message": f"File {file_name} deleted successfully"} + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) diff --git a/src/backend/langflow/api/v1/flows.py b/src/backend/base/langflow/api/v1/flows.py similarity index 67% rename from src/backend/langflow/api/v1/flows.py rename to src/backend/base/langflow/api/v1/flows.py index 517ff33c1..bd055f120 100644 --- a/src/backend/langflow/api/v1/flows.py +++ b/src/backend/base/langflow/api/v1/flows.py @@ -5,14 +5,17 @@ from uuid import UUID import orjson from fastapi import APIRouter, Depends, File, HTTPException, UploadFile from fastapi.encoders import jsonable_encoder +from loguru import logger from sqlmodel import Session, select from langflow.api.utils import remove_api_keys, validate_is_component from langflow.api.v1.schemas import FlowListCreate, FlowListRead +from langflow.initial_setup.setup import STARTER_FOLDER_NAME from langflow.services.auth.utils import get_current_active_user from langflow.services.database.models.flow import Flow, FlowCreate, FlowRead, FlowUpdate from langflow.services.database.models.user.model import User from langflow.services.deps import get_session, get_settings_service +from langflow.services.settings.service import SettingsService # build router router = APIRouter(prefix="/flows", tags=["Flows"]) @@ -42,11 +45,36 @@ def create_flow( def read_flows( *, current_user: User = Depends(get_current_active_user), + session: Session = Depends(get_session), + settings_service: "SettingsService" = Depends(get_settings_service), ): """Read all flows.""" try: - flows = current_user.flows - flows = validate_is_component(flows) + auth_settings = settings_service.auth_settings + if auth_settings.AUTO_LOGIN: + flows = session.exec( + select(Flow).where( + (Flow.user_id == None) | (Flow.user_id == current_user.id) # noqa + ) + ).all() + else: + flows = current_user.flows + + flows = validate_is_component(flows) # type: ignore + flow_ids = [flow.id for flow in flows] + # with the session get the flows that DO NOT have a user_id + try: + example_flows = session.exec( + select(Flow).where( + Flow.user_id == None, # noqa + Flow.folder == STARTER_FOLDER_NAME, + ) + ).all() + for example_flow in example_flows: + if example_flow.id not in flow_ids: + flows.append(example_flow) # type: ignore + except Exception as e: + logger.error(e) except Exception as e: raise HTTPException(status_code=500, detail=str(e)) from e return [jsonable_encoder(flow) for flow in flows] @@ -58,9 +86,18 @@ def read_flow( session: Session = Depends(get_session), flow_id: UUID, current_user: User = Depends(get_current_active_user), + settings_service: "SettingsService" = Depends(get_settings_service), ): """Read a flow.""" - if user_flow := (session.exec(select(Flow).where(Flow.id == flow_id, Flow.user_id == current_user.id)).first()): + auth_settings = settings_service.auth_settings + stmt = select(Flow).where(Flow.id == flow_id) + if auth_settings.AUTO_LOGIN: + # If auto login is enable user_id can be current_user.id or None + # so write an OR + stmt = stmt.where( + (Flow.user_id == current_user.id) | (Flow.user_id == None) # noqa + ) # noqa + if user_flow := session.exec(stmt).first(): return user_flow else: raise HTTPException(status_code=404, detail="Flow not found") @@ -77,7 +114,12 @@ def update_flow( ): """Update a flow.""" - db_flow = read_flow(session=session, flow_id=flow_id, current_user=current_user) + db_flow = read_flow( + session=session, + flow_id=flow_id, + current_user=current_user, + settings_service=settings_service, + ) if not db_flow: raise HTTPException(status_code=404, detail="Flow not found") flow_data = flow.model_dump(exclude_unset=True) @@ -99,9 +141,15 @@ def delete_flow( session: Session = Depends(get_session), flow_id: UUID, current_user: User = Depends(get_current_active_user), + settings_service=Depends(get_settings_service), ): """Delete a flow.""" - flow = read_flow(session=session, flow_id=flow_id, current_user=current_user) + flow = read_flow( + session=session, + flow_id=flow_id, + current_user=current_user, + settings_service=settings_service, + ) if not flow: raise HTTPException(status_code=404, detail="Flow not found") session.delete(flow) @@ -109,9 +157,6 @@ def delete_flow( return {"message": "Flow deleted successfully"} -# Define a new model to handle multiple flows - - @router.post("/batch/", response_model=List[FlowRead], status_code=201) def create_flows( *, @@ -157,8 +202,9 @@ async def upload_file( async def download_file( *, session: Session = Depends(get_session), + settings_service: "SettingsService" = Depends(get_settings_service), current_user: User = Depends(get_current_active_user), ): """Download all flows as a file.""" - flows = read_flows(current_user=current_user) + flows = read_flows(current_user=current_user, session=session, settings_service=settings_service) return FlowListRead(flows=flows) diff --git a/src/backend/langflow/api/v1/login.py b/src/backend/base/langflow/api/v1/login.py similarity index 88% rename from src/backend/langflow/api/v1/login.py rename to src/backend/base/langflow/api/v1/login.py index b0946fd0a..fba61d3b4 100644 --- a/src/backend/langflow/api/v1/login.py +++ b/src/backend/base/langflow/api/v1/login.py @@ -10,6 +10,7 @@ from langflow.services.auth.utils import ( create_user_tokens, ) from langflow.services.deps import get_session, get_settings_service +from langflow.services.settings.manager import SettingsService router = APIRouter(tags=["Login"]) @@ -41,6 +42,7 @@ async def login_to_get_access_token( httponly=auth_settings.REFRESH_HTTPONLY, samesite=auth_settings.REFRESH_SAME_SITE, secure=auth_settings.REFRESH_SECURE, + expires=auth_settings.REFRESH_TOKEN_EXPIRE_SECONDS, ) response.set_cookie( "access_token_lf", @@ -48,6 +50,7 @@ async def login_to_get_access_token( httponly=auth_settings.ACCESS_HTTPONLY, samesite=auth_settings.ACCESS_SAME_SITE, secure=auth_settings.ACCESS_SECURE, + expires=auth_settings.ACCESS_TOKEN_EXPIRE_SECONDS, ) return tokens else: @@ -73,6 +76,7 @@ async def auto_login( httponly=auth_settings.ACCESS_HTTPONLY, samesite=auth_settings.ACCESS_SAME_SITE, secure=auth_settings.ACCESS_SECURE, + expires=None, # Set to None to make it a session cookie ) return tokens @@ -87,7 +91,7 @@ async def auto_login( @router.post("/refresh") async def refresh_token( - request: Request, response: Response, settings_service=Depends(get_settings_service) + request: Request, response: Response, settings_service: "SettingsService" = Depends(get_settings_service) ): auth_settings = settings_service.auth_settings @@ -101,6 +105,7 @@ async def refresh_token( httponly=auth_settings.REFRESH_HTTPONLY, samesite=auth_settings.REFRESH_SAME_SITE, secure=auth_settings.REFRESH_SECURE, + expires=auth_settings.REFRESH_TOKEN_EXPIRE_SECONDS, ) response.set_cookie( "access_token_lf", @@ -108,6 +113,7 @@ async def refresh_token( httponly=auth_settings.ACCESS_HTTPONLY, samesite=auth_settings.ACCESS_SAME_SITE, secure=auth_settings.ACCESS_SECURE, + expires=auth_settings.ACCESS_TOKEN_EXPIRE_SECONDS, ) return tokens else: diff --git a/src/backend/base/langflow/api/v1/monitor.py b/src/backend/base/langflow/api/v1/monitor.py new file mode 100644 index 000000000..dbefd1c3b --- /dev/null +++ b/src/backend/base/langflow/api/v1/monitor.py @@ -0,0 +1,73 @@ +from typing import Optional + +from fastapi import APIRouter, Depends, HTTPException, Query + + +from langflow.services.deps import get_monitor_service +from langflow.services.monitor.schema import VertexBuildMapModel +from langflow.services.monitor.service import MonitorService + +router = APIRouter(prefix="/monitor", tags=["Monitor"]) + + +# Get vertex_builds data from the monitor service +@router.get("/builds", response_model=VertexBuildMapModel) +async def get_vertex_builds( + flow_id: Optional[str] = Query(None), + vertex_id: Optional[str] = Query(None), + valid: Optional[bool] = Query(None), + order_by: Optional[str] = Query("timestamp"), + monitor_service: MonitorService = Depends(get_monitor_service), +): + try: + vertex_build_dicts = monitor_service.get_vertex_builds( + flow_id=flow_id, vertex_id=vertex_id, valid=valid, order_by=order_by + ) + vertex_build_map = VertexBuildMapModel.from_list_of_dicts(vertex_build_dicts) + return vertex_build_map + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.delete("/builds", status_code=204) +async def delete_vertex_builds( + flow_id: Optional[str] = Query(None), + monitor_service: MonitorService = Depends(get_monitor_service), +): + try: + monitor_service.delete_vertex_builds(flow_id=flow_id) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/messages") +async def get_messages( + session_id: Optional[str] = Query(None), + sender: Optional[str] = Query(None), + sender_name: Optional[str] = Query(None), + order_by: Optional[str] = Query("timestamp"), + monitor_service: MonitorService = Depends(get_monitor_service), +): + try: + return monitor_service.get_messages( + sender=sender, + sender_name=sender_name, + session_id=session_id, + order_by=order_by, + ) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/transactions") +async def get_transactions( + source: Optional[str] = Query(None), + target: Optional[str] = Query(None), + status: Optional[str] = Query(None), + order_by: Optional[str] = Query("timestamp"), + monitor_service: MonitorService = Depends(get_monitor_service), +): + try: + return monitor_service.get_transactions(source=source, target=target, status=status, order_by=order_by) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) diff --git a/src/backend/base/langflow/api/v1/schemas.py b/src/backend/base/langflow/api/v1/schemas.py new file mode 100644 index 000000000..631c37fbb --- /dev/null +++ b/src/backend/base/langflow/api/v1/schemas.py @@ -0,0 +1,325 @@ +from datetime import datetime +from enum import Enum +from pathlib import Path +from typing import Any, Dict, List, Optional, Union +from uuid import UUID + +from pydantic import BaseModel, ConfigDict, Field, RootModel, field_validator, model_serializer + +from langflow.graph.schema import RunOutputs +from langflow.schema import dotdict +from langflow.schema.schema import InputType, OutputType +from langflow.services.database.models.api_key.model import ApiKeyRead +from langflow.services.database.models.base import orjson_dumps +from langflow.services.database.models.flow import FlowCreate, FlowRead +from langflow.services.database.models.user import UserRead + + +class BuildStatus(Enum): + """Status of the build.""" + + SUCCESS = "success" + FAILURE = "failure" + STARTED = "started" + IN_PROGRESS = "in_progress" + + +class TweaksRequest(BaseModel): + tweaks: Optional[Dict[str, Dict[str, str]]] = Field(default_factory=dict) + + +class UpdateTemplateRequest(BaseModel): + template: dict + + +class TaskResponse(BaseModel): + """Task response schema.""" + + id: Optional[str] = Field(None) + href: Optional[str] = Field(None) + + +class ProcessResponse(BaseModel): + """Process response schema.""" + + result: Any + status: Optional[str] = None + task: Optional[TaskResponse] = None + session_id: Optional[str] = None + backend: Optional[str] = None + + +class RunResponse(BaseModel): + """Run response schema.""" + + outputs: Optional[List[RunOutputs]] = [] + session_id: Optional[str] = None + + @model_serializer(mode="plain") + def serialize(self): + # Serialize all the outputs if they are base models + serialized = {"session_id": self.session_id, "outputs": []} + if self.outputs: + serialized_outputs = [] + for output in self.outputs: + if isinstance(output, BaseModel) and not isinstance(output, RunOutputs): + serialized_outputs.append(output.model_dump(exclude_none=True)) + else: + serialized_outputs.append(output) + serialized["outputs"] = serialized_outputs + return serialized + + +class PreloadResponse(BaseModel): + """Preload response schema.""" + + session_id: Optional[str] = None + is_clear: Optional[bool] = None + + +class TaskStatusResponse(BaseModel): + """Task status response schema.""" + + status: str + result: Optional[Any] = None + + +class ChatMessage(BaseModel): + """Chat message schema.""" + + is_bot: bool = False + message: Union[str, None, dict] = None + chatKey: Optional[str] = None + type: str = "human" + + +class ChatResponse(ChatMessage): + """Chat response schema.""" + + intermediate_steps: str + + type: str + is_bot: bool = True + files: list = [] + + @field_validator("type") + @classmethod + def validate_message_type(cls, v): + if v not in ["start", "stream", "end", "error", "info", "file"]: + raise ValueError("type must be start, stream, end, error, info, or file") + return v + + +class PromptResponse(ChatMessage): + """Prompt response schema.""" + + prompt: str + type: str = "prompt" + is_bot: bool = True + + +class FileResponse(ChatMessage): + """File response schema.""" + + data: Any = None + data_type: str + type: str = "file" + is_bot: bool = True + + @field_validator("data_type") + @classmethod + def validate_data_type(cls, v): + if v not in ["image", "csv"]: + raise ValueError("data_type must be image or csv") + return v + + +class FlowListCreate(BaseModel): + flows: List[FlowCreate] + + +class FlowListRead(BaseModel): + flows: List[FlowRead] + + +class InitResponse(BaseModel): + flowId: str + + +class BuiltResponse(BaseModel): + built: bool + + +class UploadFileResponse(BaseModel): + """Upload file response schema.""" + + flowId: str + file_path: Path + + +class StreamData(BaseModel): + event: str + data: dict + + def __str__(self) -> str: + return f"event: {self.event}\ndata: {orjson_dumps(self.data, indent_2=False)}\n\n" + + +class CustomComponentRequest(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + code: str + frontend_node: Optional[dict] = None + + +class UpdateCustomComponentRequest(CustomComponentRequest): + field: str + field_value: Optional[Union[str, int, float, bool, dict, list]] = None + template: dict + + def get_template(self): + return dotdict(self.template) + + +class CustomComponentResponseError(BaseModel): + detail: str + traceback: str + + +class ComponentListCreate(BaseModel): + flows: List[FlowCreate] + + +class ComponentListRead(BaseModel): + flows: List[FlowRead] + + +class UsersResponse(BaseModel): + total_count: int + users: List[UserRead] + + +class ApiKeyResponse(BaseModel): + id: str + api_key: str + name: str + created_at: str + last_used_at: str + + +class ApiKeysResponse(BaseModel): + total_count: int + user_id: UUID + api_keys: List[ApiKeyRead] + + +class CreateApiKeyRequest(BaseModel): + name: str + + +class Token(BaseModel): + access_token: str + refresh_token: str + token_type: str + + +class ApiKeyCreateRequest(BaseModel): + api_key: str + + +class VerticesOrderResponse(BaseModel): + ids: List[str] + run_id: UUID + vertices_to_run: List[str] + + +class ResultDataResponse(BaseModel): + results: Optional[Any] = Field(default_factory=dict) + artifacts: Optional[Any] = Field(default_factory=dict) + timedelta: Optional[float] = None + duration: Optional[str] = None + + +class VertexBuildResponse(BaseModel): + id: Optional[str] = None + inactivated_vertices: Optional[List[str]] = None + next_vertices_ids: Optional[List[str]] = None + top_level_vertices: Optional[List[str]] = None + valid: bool + params: Optional[Any] = Field(default_factory=dict) + """JSON string of the params.""" + data: ResultDataResponse + """Mapping of vertex ids to result dict containing the param name and result value.""" + timestamp: Optional[datetime] = Field(default_factory=datetime.utcnow) + """Timestamp of the build.""" + + +class VerticesBuiltResponse(BaseModel): + vertices: List[VertexBuildResponse] + + +class InputValueRequest(BaseModel): + components: Optional[List[str]] = [] + input_value: Optional[str] = None + type: Optional[InputType] = Field( + "any", + description="Defines on which components the input value should be applied. 'any' applies to all input components.", + ) + + # add an example + model_config = ConfigDict( + json_schema_extra={ + "examples": [ + { + "components": ["components_id", "Component Name"], + "input_value": "input_value", + }, + {"components": ["Component Name"], "input_value": "input_value"}, + {"input_value": "input_value"}, + {"type": "chat", "input_value": "input_value"}, + {"type": "json", "input_value": '{"key": "value"}'}, + ] + }, + extra="forbid", + ) + + +class Tweaks(RootModel): + root: dict[str, Union[str, dict[str, str]]] = Field( + description="A dictionary of tweaks to adjust the flow's execution. Allows customizing flow behavior dynamically. All tweaks are overridden by the input values.", + ) + model_config = { + "json_schema_extra": { + "examples": [ + { + "parameter_name": "value", + "Component Name": {"parameter_name": "value"}, + "component_id": {"parameter_name": "value"}, + } + ] + } + } + + # This should behave like a dict + def __getitem__(self, key): + return self.root[key] + + def __setitem__(self, key, value): + self.root[key] = value + + def __delitem__(self, key): + del self.root[key] + + def items(self): + return self.root.items() + + +class SimplifiedAPIRequest(BaseModel): + input_value: Optional[str] = Field(default="", description="The input value") + input_type: Optional[InputType] = Field(default="chat", description="The input type") + output_type: Optional[OutputType] = Field(default="chat", description="The output type") + output_component: Optional[str] = Field( + default="", + description="If there are multiple output components, you can specify the component to get the output from.", + ) + tweaks: Optional[Tweaks] = Field(default=None, description="The tweaks") + session_id: Optional[str] = Field(default=None, description="The session id") diff --git a/src/backend/langflow/api/v1/store.py b/src/backend/base/langflow/api/v1/store.py similarity index 100% rename from src/backend/langflow/api/v1/store.py rename to src/backend/base/langflow/api/v1/store.py diff --git a/src/backend/langflow/api/v1/users.py b/src/backend/base/langflow/api/v1/users.py similarity index 100% rename from src/backend/langflow/api/v1/users.py rename to src/backend/base/langflow/api/v1/users.py diff --git a/src/backend/base/langflow/api/v1/validate.py b/src/backend/base/langflow/api/v1/validate.py new file mode 100644 index 000000000..97841bca6 --- /dev/null +++ b/src/backend/base/langflow/api/v1/validate.py @@ -0,0 +1,81 @@ +from collections import defaultdict + +from fastapi import APIRouter, HTTPException +from loguru import logger + +from langflow.api.v1.base import Code, CodeValidationResponse, PromptValidationResponse, ValidatePromptRequest +from langflow.base.prompts.utils import ( + add_new_variables_to_template, + get_old_custom_fields, + remove_old_variables_from_template, + update_input_variables_field, + validate_prompt, +) +from langflow.utils.validate import validate_code + +# build router +router = APIRouter(prefix="/validate", tags=["Validate"]) + + +@router.post("/code", status_code=200, response_model=CodeValidationResponse) +def post_validate_code(code: Code): + try: + errors = validate_code(code.code) + return CodeValidationResponse( + imports=errors.get("imports", {}), + function=errors.get("function", {}), + ) + except Exception as e: + return HTTPException(status_code=500, detail=str(e)) + + +@router.post("/prompt", status_code=200, response_model=PromptValidationResponse) +def post_validate_prompt(prompt_request: ValidatePromptRequest): + try: + input_variables = validate_prompt(prompt_request.template) + # Check if frontend_node is None before proceeding to avoid attempting to update a non-existent node. + if prompt_request.frontend_node is None: + return PromptValidationResponse( + input_variables=input_variables, + frontend_node=None, + ) + if not prompt_request.frontend_node.custom_fields: + prompt_request.frontend_node.custom_fields = defaultdict(list) + old_custom_fields = get_old_custom_fields(prompt_request.frontend_node.custom_fields, prompt_request.name) + + add_new_variables_to_template( + input_variables, + prompt_request.frontend_node.custom_fields, + prompt_request.frontend_node.template, + prompt_request.name, + ) + + remove_old_variables_from_template( + old_custom_fields, + input_variables, + prompt_request.frontend_node.custom_fields, + prompt_request.frontend_node.template, + prompt_request.name, + ) + + update_input_variables_field(input_variables, prompt_request.frontend_node.template) + + # If frontend_node.template contains only one field that is type == 'prompt', then we can remove all fields that are not + # 'code', and not in the input_variables list. + prompt_fields = [ + key + for key, field in prompt_request.frontend_node.template.items() + if isinstance(field, dict) and field["type"] == "prompt" + ] + + if len(prompt_fields) == 1: + for key, field in prompt_request.frontend_node.template.copy().items(): + if isinstance(field, dict) and field["type"] != "code" and key not in input_variables + prompt_fields: + del prompt_request.frontend_node.template[key] + return PromptValidationResponse( + input_variables=input_variables, + frontend_node=prompt_request.frontend_node, + ) + except Exception as e: + logger.exception(e) + raise HTTPException(status_code=500, detail=str(e)) from e diff --git a/src/backend/base/langflow/api/v1/variable.py b/src/backend/base/langflow/api/v1/variable.py new file mode 100644 index 000000000..123086676 --- /dev/null +++ b/src/backend/base/langflow/api/v1/variable.py @@ -0,0 +1,113 @@ +from datetime import datetime +from uuid import UUID + +from fastapi import APIRouter, Depends, HTTPException +from sqlmodel import Session, select + +from langflow.services.auth import utils as auth_utils +from langflow.services.auth.utils import get_current_active_user +from langflow.services.database.models.user.model import User +from langflow.services.database.models.variable import Variable, VariableCreate, VariableRead, VariableUpdate +from langflow.services.deps import get_session, get_settings_service + +router = APIRouter(prefix="/variables", tags=["Variables"]) + + +@router.post("/", response_model=VariableRead, status_code=201) +def create_variable( + *, + session: Session = Depends(get_session), + variable: VariableCreate, + current_user: User = Depends(get_current_active_user), + settings_service=Depends(get_settings_service), +): + """Create a new variable.""" + try: + # check if variable name already exists + variable_exists = session.exec( + select(Variable).where( + Variable.name == variable.name, + Variable.user_id == current_user.id, + ) + ).first() + if variable_exists: + raise HTTPException(status_code=400, detail="Variable name already exists") + + variable_dict = variable.model_dump() + variable_dict["user_id"] = current_user.id + + db_variable = Variable.model_validate(variable_dict) + if not db_variable.value: + raise HTTPException(status_code=400, detail="Variable value cannot be empty") + encrypted = auth_utils.encrypt_api_key(db_variable.value, settings_service=settings_service) + db_variable.value = encrypted + db_variable.user_id = current_user.id + session.add(db_variable) + session.commit() + session.refresh(db_variable) + return db_variable + except Exception as e: + if isinstance(e, HTTPException): + raise e + raise HTTPException(status_code=500, detail=str(e)) from e + + +@router.get("/", response_model=list[VariableRead], status_code=200) +def read_variables( + *, + session: Session = Depends(get_session), + current_user: User = Depends(get_current_active_user), +): + """Read all variables.""" + try: + variables = session.exec(select(Variable).where(Variable.user_id == current_user.id)).all() + return variables + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) from e + + +@router.patch("/{variable_id}", response_model=VariableRead, status_code=200) +def update_variable( + *, + session: Session = Depends(get_session), + variable_id: UUID, + variable: VariableUpdate, + current_user: User = Depends(get_current_active_user), +): + """Update a variable.""" + try: + db_variable = session.exec( + select(Variable).where(Variable.id == variable_id, Variable.user_id == current_user.id) + ).first() + if not db_variable: + raise HTTPException(status_code=404, detail="Variable not found") + + variable_data = variable.model_dump(exclude_unset=True) + for key, value in variable_data.items(): + setattr(db_variable, key, value) + db_variable.updated_at = datetime.utcnow() + session.commit() + session.refresh(db_variable) + return db_variable + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) from e + + +@router.delete("/{variable_id}", status_code=204) +def delete_variable( + *, + session: Session = Depends(get_session), + variable_id: UUID, + current_user: User = Depends(get_current_active_user), +): + """Delete a variable.""" + try: + db_variable = session.exec( + select(Variable).where(Variable.id == variable_id, Variable.user_id == current_user.id) + ).first() + if not db_variable: + raise HTTPException(status_code=404, detail="Variable not found") + session.delete(db_variable) + session.commit() + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) from e diff --git a/src/backend/langflow/interface/embeddings/__init__.py b/src/backend/base/langflow/base/__init__.py similarity index 100% rename from src/backend/langflow/interface/embeddings/__init__.py rename to src/backend/base/langflow/base/__init__.py diff --git a/src/backend/langflow/interface/initialize/__init__.py b/src/backend/base/langflow/base/agents/__init__.py similarity index 100% rename from src/backend/langflow/interface/initialize/__init__.py rename to src/backend/base/langflow/base/agents/__init__.py diff --git a/src/backend/base/langflow/base/agents/agent.py b/src/backend/base/langflow/base/agents/agent.py new file mode 100644 index 000000000..5ef498b66 --- /dev/null +++ b/src/backend/base/langflow/base/agents/agent.py @@ -0,0 +1,75 @@ +from typing import List, Optional, Union, cast + +from langchain.agents import AgentExecutor, BaseMultiActionAgent, BaseSingleActionAgent +from langchain_core.runnables import Runnable + +from langflow.custom import CustomComponent +from langflow.field_typing import BaseMemory, Text, Tool + + +class LCAgentComponent(CustomComponent): + def build_config(self): + return { + "lc": { + "display_name": "LangChain", + "info": "The LangChain to interact with.", + }, + "handle_parsing_errors": { + "display_name": "Handle Parsing Errors", + "info": "If True, the agent will handle parsing errors. If False, the agent will raise an error.", + "advanced": True, + }, + "output_key": { + "display_name": "Output Key", + "info": "The key to use to get the output from the agent.", + "advanced": True, + }, + "memory": { + "display_name": "Memory", + "info": "Memory to use for the agent.", + }, + "tools": { + "display_name": "Tools", + "info": "Tools the agent can use.", + }, + "input_value": { + "display_name": "Input", + "info": "Input text to pass to the agent.", + }, + } + + async def run_agent( + self, + agent: Union[Runnable, BaseSingleActionAgent, BaseMultiActionAgent, AgentExecutor], + inputs: str, + input_variables: list[str], + tools: List[Tool], + memory: Optional[BaseMemory] = None, + handle_parsing_errors: bool = True, + output_key: str = "output", + ) -> Text: + if isinstance(agent, AgentExecutor): + runnable = agent + else: + runnable = AgentExecutor.from_agent_and_tools( + agent=agent, # type: ignore + tools=tools, + verbose=True, + memory=memory, + handle_parsing_errors=handle_parsing_errors, + ) + input_dict = {"input": inputs} + for var in input_variables: + if var not in ["agent_scratchpad", "input"]: + input_dict[var] = "" + result = await runnable.ainvoke(input_dict) + self.status = result + if output_key in result: + return cast(str, result.get(output_key)) + elif "output" not in result: + if output_key != "output": + raise ValueError(f"Output key not found in result. Tried '{output_key}' and 'output'.") + else: + raise ValueError("Output key not found in result. Tried 'output'.") + + return cast(str, result.get("output")) diff --git a/src/backend/base/langflow/base/constants.py b/src/backend/base/langflow/base/constants.py new file mode 100644 index 000000000..a54d6eff5 --- /dev/null +++ b/src/backend/base/langflow/base/constants.py @@ -0,0 +1,27 @@ +""" +This module contains constants used in the Langflow base module. + +Constants: +- STREAM_INFO_TEXT: A string representing the information about streaming the response from the model. +- NODE_FORMAT_ATTRIBUTES: A list of attributes used for formatting nodes. +- FIELD_FORMAT_ATTRIBUTES: A list of attributes used for formatting fields. +""" +STREAM_INFO_TEXT = "Stream the response from the model. Streaming works only in Chat." + +NODE_FORMAT_ATTRIBUTES = ["beta", "icon", "display_name", "description"] + + +FIELD_FORMAT_ATTRIBUTES = [ + "info", + "display_name", + "required", + "list", + "multiline", + "fileTypes", + "password", + "input_types", + "title_case", + "real_time_refresh", + "refresh_button", + "refresh_button_text", +] diff --git a/src/backend/langflow/interface/output_parsers/__init__.py b/src/backend/base/langflow/base/data/__init__.py similarity index 100% rename from src/backend/langflow/interface/output_parsers/__init__.py rename to src/backend/base/langflow/base/data/__init__.py diff --git a/src/backend/base/langflow/base/data/utils.py b/src/backend/base/langflow/base/data/utils.py new file mode 100644 index 000000000..c3fda5e34 --- /dev/null +++ b/src/backend/base/langflow/base/data/utils.py @@ -0,0 +1,141 @@ +import json +import xml.etree.ElementTree as ET +from concurrent import futures +from pathlib import Path +from typing import Callable, List, Optional, Text + +import yaml + +from langflow.schema.schema import Record + +# Types of files that can be read simply by file.read() +# and have 100% to be completely readable +TEXT_FILE_TYPES = ["txt", "md", "mdx", "csv", "json", "yaml", "yml", "xml", "html", "htm", "pdf", "docx"] + + +def is_hidden(path: Path) -> bool: + return path.name.startswith(".") + + +def retrieve_file_paths( + path: str, + load_hidden: bool, + recursive: bool, + depth: int, + types: List[str] = TEXT_FILE_TYPES, +) -> List[str]: + path_obj = Path(path) + if not path_obj.exists() or not path_obj.is_dir(): + raise ValueError(f"Path {path} must exist and be a directory.") + + def match_types(p: Path) -> bool: + return any(p.suffix == f".{t}" for t in types) if types else True + + def is_not_hidden(p: Path) -> bool: + return not is_hidden(p) or load_hidden + + def walk_level(directory: Path, max_depth: int): + directory = directory.resolve() + prefix_length = len(directory.parts) + for p in directory.rglob("*" if recursive else "[!.]*"): + if len(p.parts) - prefix_length <= max_depth: + yield p + + glob = "**/*" if recursive else "*" + paths = walk_level(path_obj, depth) if depth else path_obj.glob(glob) + file_paths = [Text(p) for p in paths if p.is_file() and match_types(p) and is_not_hidden(p)] + + return file_paths + + +def partition_file_to_record(file_path: str, silent_errors: bool) -> Optional[Record]: + # Use the partition function to load the file + from unstructured.partition.auto import partition # type: ignore + + try: + elements = partition(file_path) + except Exception as e: + if not silent_errors: + raise ValueError(f"Error loading file {file_path}: {e}") from e + return None + + # Create a Record + text = "\n\n".join([Text(el) for el in elements]) + metadata = elements.metadata if hasattr(elements, "metadata") else {} + metadata["file_path"] = file_path + record = Record(text=text, data=metadata) + return record + + +def read_text_file(file_path: str) -> str: + with open(file_path, "r") as f: + return f.read() + + +def read_docx_file(file_path: str) -> str: + from docx import Document # type: ignore + + doc = Document(file_path) + return "\n\n".join([p.text for p in doc.paragraphs]) + + +def parse_pdf_to_text(file_path: str) -> str: + from pypdf import PdfReader # type: ignore + + with open(file_path, "rb") as f: + reader = PdfReader(f) + return "\n\n".join([page.extract_text() for page in reader.pages]) + + +def parse_text_file_to_record(file_path: str, silent_errors: bool) -> Optional[Record]: + try: + if file_path.endswith(".pdf"): + text = parse_pdf_to_text(file_path) + elif file_path.endswith(".docx"): + text = read_docx_file(file_path) + else: + text = read_text_file(file_path) + # if file is json, yaml, or xml, we can parse it + if file_path.endswith(".json"): + text = json.loads(text) + elif file_path.endswith(".yaml") or file_path.endswith(".yml"): + text = yaml.safe_load(text) + elif file_path.endswith(".xml"): + xml_element = ET.fromstring(text) + text = ET.tostring(xml_element, encoding="unicode") + except Exception as e: + if not silent_errors: + raise ValueError(f"Error loading file {file_path}: {e}") from e + return None + + record = Record(data={"file_path": file_path, "text": text}) + return record + + +def get_elements( + file_paths: List[str], + silent_errors: bool, + max_concurrency: int, + use_multithreading: bool, +) -> List[Optional[Record]]: + if use_multithreading: + records = parallel_load_records(file_paths, silent_errors, max_concurrency) + else: + records = [partition_file_to_record(file_path, silent_errors) for file_path in file_paths] + records = list(filter(None, records)) + return records + + +def parallel_load_records( + file_paths: List[str], + silent_errors: bool, + max_concurrency: int, + load_function: Callable = parse_text_file_to_record, +) -> List[Optional[Record]]: + with futures.ThreadPoolExecutor(max_workers=max_concurrency) as executor: + loaded_files = executor.map( + lambda file_path: load_function(file_path, silent_errors), + file_paths, + ) + # loaded_files is an iterator, so we need to convert it to a list + return list(loaded_files) diff --git a/src/backend/langflow/interface/retrievers/__init__.py b/src/backend/base/langflow/base/io/__init__.py similarity index 100% rename from src/backend/langflow/interface/retrievers/__init__.py rename to src/backend/base/langflow/base/io/__init__.py diff --git a/src/backend/base/langflow/base/io/chat.py b/src/backend/base/langflow/base/io/chat.py new file mode 100644 index 000000000..eb9787a79 --- /dev/null +++ b/src/backend/base/langflow/base/io/chat.py @@ -0,0 +1,118 @@ +import warnings +from typing import Optional, Union + +from langflow.field_typing import Text +from langflow.helpers.record import records_to_text +from langflow.interface.custom.custom_component import CustomComponent +from langflow.memory import add_messages +from langflow.schema import Record + + +class ChatComponent(CustomComponent): + display_name = "Chat Component" + description = "Use as base for chat components." + + def build_config(self): + return { + "input_value": { + "input_types": ["Text"], + "display_name": "Message", + "multiline": True, + }, + "sender": { + "options": ["Machine", "User"], + "display_name": "Sender Type", + "advanced": True, + }, + "sender_name": {"display_name": "Sender Name"}, + "session_id": { + "display_name": "Session ID", + "info": "If provided, the message will be stored in the memory.", + "advanced": True, + }, + "return_record": { + "display_name": "Return Record", + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "advanced": True, + }, + "record_template": { + "display_name": "Record Template", + "multiline": True, + "info": "In case of Message being a Record, this template will be used to convert it to text.", + "advanced": True, + }, + } + + def store_message( + self, + message: Union[str, Text, Record], + session_id: Optional[str] = None, + sender: Optional[str] = None, + sender_name: Optional[str] = None, + ) -> list[Record]: + if not message: + warnings.warn("No message provided.") + return [] + + if not session_id or not sender or not sender_name: + raise ValueError("All of session_id, sender, and sender_name must be provided.") + if isinstance(message, Record): + record = message + record.data.update( + { + "session_id": session_id, + "sender": sender, + "sender_name": sender_name, + } + ) + else: + record = Record( + data={ + "text": message, + "session_id": session_id, + "sender": sender, + "sender_name": sender_name, + }, + ) + + self.status = record + records = add_messages([record]) + return records[0] + + def build( + self, + sender: Optional[str] = "User", + sender_name: Optional[str] = "User", + input_value: Optional[Union[str, Record]] = None, + session_id: Optional[str] = None, + return_record: Optional[bool] = False, + record_template: str = "Text: {text}\nData: {data}", + ) -> Union[Text, Record]: + input_value_record: Optional[Record] = None + if return_record: + if isinstance(input_value, Record): + # Update the data of the record + input_value.data["sender"] = sender + input_value.data["sender_name"] = sender_name + input_value.data["session_id"] = session_id + else: + input_value_record = Record( + text=input_value, + data={ + "sender": sender, + "sender_name": sender_name, + "session_id": session_id, + }, + ) + elif isinstance(input_value, Record): + input_value = records_to_text(template=record_template, records=input_value) + if not input_value: + input_value = "" + if return_record and input_value_record: + result: Union[Text, Record] = input_value_record + else: + result = input_value + self.status = result + if session_id and isinstance(result, (Record, str)): + self.store_message(result, session_id, sender, sender_name) + return result diff --git a/src/backend/base/langflow/base/io/text.py b/src/backend/base/langflow/base/io/text.py new file mode 100644 index 000000000..5974e7d13 --- /dev/null +++ b/src/backend/base/langflow/base/io/text.py @@ -0,0 +1,42 @@ +from typing import Optional + +from langflow.field_typing import Text +from langflow.helpers.record import records_to_text +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema.schema import Record + + +class TextComponent(CustomComponent): + display_name = "Text Component" + description = "Used to pass text to the next component." + + def build_config(self): + return { + "input_value": { + "display_name": "Value", + "input_types": ["Text", "Record"], + "info": "Text or Record to be passed.", + }, + "record_template": { + "display_name": "Record Template", + "multiline": True, + "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.", + "advanced": True, + }, + } + + def build( + self, + input_value: Optional[Text] = "", + record_template: Optional[str] = "{text}", + ) -> Text: + if isinstance(input_value, Record): + if record_template == "": + # it should be dynamically set to the Record's .text_key value + # meaning, if text_key = "bacon", then record_template = "{bacon}" + record_template = "{" + input_value.text_key + "}" + input_value = records_to_text(template=record_template, records=input_value) + self.status = input_value + if not input_value: + input_value = "" + return input_value diff --git a/src/backend/base/langflow/base/models/__init__.py b/src/backend/base/langflow/base/models/__init__.py new file mode 100644 index 000000000..921f10336 --- /dev/null +++ b/src/backend/base/langflow/base/models/__init__.py @@ -0,0 +1,3 @@ +from .model import LCModelComponent + +__all__ = ["LCModelComponent"] diff --git a/src/backend/base/langflow/base/models/model.py b/src/backend/base/langflow/base/models/model.py new file mode 100644 index 000000000..b8ef38e07 --- /dev/null +++ b/src/backend/base/langflow/base/models/model.py @@ -0,0 +1,50 @@ +from typing import Optional, Union + +from langchain_core.language_models.chat_models import BaseChatModel +from langchain_core.language_models.llms import LLM +from langchain_core.messages import HumanMessage, SystemMessage + +from langflow.custom import CustomComponent + + +class LCModelComponent(CustomComponent): + display_name: str = "Model Name" + description: str = "Model Description" + + def get_result(self, runnable: LLM, stream: bool, input_value: str): + """ + Retrieves the result from the output of a Runnable object. + + Args: + output (Runnable): The output object to retrieve the result from. + stream (bool): Indicates whether to use streaming or invocation mode. + input_value (str): The input value to pass to the output object. + + Returns: + The result obtained from the output object. + """ + if stream: + result = runnable.stream(input_value) + else: + message = runnable.invoke(input_value) + result = message.content if hasattr(message, "content") else message + self.status = result + return result + + def get_chat_result( + self, runnable: BaseChatModel, stream: bool, input_value: str, system_message: Optional[str] = None + ): + messages: list[Union[HumanMessage, SystemMessage]] = [] + if not input_value and not system_message: + raise ValueError("The message you want to send to the model is empty.") + if system_message: + messages.append(SystemMessage(content=system_message)) + if input_value: + messages.append(HumanMessage(content=input_value)) + if stream: + return runnable.stream(messages) + else: + message = runnable.invoke(messages) + result = message.content + self.status = result + return result diff --git a/src/backend/langflow/interface/toolkits/__init__.py b/src/backend/base/langflow/base/prompts/__init__.py similarity index 100% rename from src/backend/langflow/interface/toolkits/__init__.py rename to src/backend/base/langflow/base/prompts/__init__.py diff --git a/src/backend/base/langflow/base/prompts/utils.py b/src/backend/base/langflow/base/prompts/utils.py new file mode 100644 index 000000000..ff4cf5079 --- /dev/null +++ b/src/backend/base/langflow/base/prompts/utils.py @@ -0,0 +1,137 @@ +from fastapi import HTTPException +from langchain.prompts import PromptTemplate +from langchain_core.documents import Document +from loguru import logger + +from langflow.api.v1.base import INVALID_NAMES, check_input_variables +from langflow.interface.utils import extract_input_variables_from_prompt +from langflow.schema import Record +from langflow.template.field.prompt import DefaultPromptField + + +def dict_values_to_string(d: dict) -> dict: + """ + Converts the values of a dictionary to strings. + + Args: + d (dict): The dictionary whose values need to be converted. + + Returns: + dict: The dictionary with values converted to strings. + """ + # Do something similar to the above + for key, value in d.items(): + # it could be a list of records or documents or strings + if isinstance(value, list): + for i, item in enumerate(value): + if isinstance(item, Record): + d[key][i] = record_to_string(item) + elif isinstance(item, Document): + d[key][i] = document_to_string(item) + elif isinstance(value, Record): + d[key] = record_to_string(value) + elif isinstance(value, Document): + d[key] = document_to_string(value) + return d + + +def record_to_string(record: Record) -> str: + """ + Convert a record to a string. + + Args: + record (Record): The record to convert. + + Returns: + str: The record as a string. + """ + return record.get_text() + + +def document_to_string(document: Document) -> str: + """ + Convert a document to a string. + + Args: + document (Document): The document to convert. + + Returns: + str: The document as a string. + """ + return document.page_content + + +def validate_prompt(prompt_template: str, silent_errors: bool = False) -> list[str]: + input_variables = extract_input_variables_from_prompt(prompt_template) + + # Check if there are invalid characters in the input_variables + input_variables = check_input_variables(input_variables) + if any(var in INVALID_NAMES for var in input_variables): + raise ValueError(f"Invalid input variables. None of the variables can be named {', '.join(input_variables)}. ") + + try: + PromptTemplate(template=prompt_template, input_variables=input_variables) + except Exception as exc: + logger.error(f"Invalid prompt: {exc}") + if not silent_errors: + raise ValueError(f"Invalid prompt: {exc}") from exc + + return input_variables + + +def get_old_custom_fields(custom_fields, name): + try: + if len(custom_fields) == 1 and name == "": + # If there is only one custom field and the name is empty string + # then we are dealing with the first prompt request after the node was created + name = list(custom_fields.keys())[0] + + old_custom_fields = custom_fields[name] + if not old_custom_fields: + old_custom_fields = [] + + old_custom_fields = old_custom_fields.copy() + except KeyError: + old_custom_fields = [] + custom_fields[name] = [] + return old_custom_fields + + +def add_new_variables_to_template(input_variables, custom_fields, template, name): + for variable in input_variables: + try: + template_field = DefaultPromptField(name=variable, display_name=variable) + if variable in template: + # Set the new field with the old value + template_field.value = template[variable]["value"] + + template[variable] = template_field.to_dict() + + # Check if variable is not already in the list before appending + if variable not in custom_fields[name]: + custom_fields[name].append(variable) + + except Exception as exc: + logger.exception(exc) + raise HTTPException(status_code=500, detail=str(exc)) from exc + + +def remove_old_variables_from_template(old_custom_fields, input_variables, custom_fields, template, name): + for variable in old_custom_fields: + if variable not in input_variables: + try: + # Remove the variable from custom_fields associated with the given name + if variable in custom_fields[name]: + custom_fields[name].remove(variable) + + # Remove the variable from the template + template.pop(variable, None) + + except Exception as exc: + logger.exception(exc) + raise HTTPException(status_code=500, detail=str(exc)) from exc + + +def update_input_variables_field(input_variables, template): + if "input_variables" in template: + template["input_variables"]["value"] = input_variables diff --git a/src/backend/base/langflow/components/__init__.py b/src/backend/base/langflow/components/__init__.py new file mode 100644 index 000000000..fdd1144c9 --- /dev/null +++ b/src/backend/base/langflow/components/__init__.py @@ -0,0 +1,17 @@ +__all__ = [ + "agents", + "chains", + "documentloaders", + "embeddings", + "experimental", + "inputs", + "memories", + "model_specs", + "outputs", + "retrievers", + "textsplitters", + "toolkits", + "tools", + "vectorsearch", + "vectorstores", +] diff --git a/src/backend/langflow/components/agents/AgentInitializer.py b/src/backend/base/langflow/components/agents/AgentInitializer.py similarity index 96% rename from src/backend/langflow/components/agents/AgentInitializer.py rename to src/backend/base/langflow/components/agents/AgentInitializer.py index 3d936df8a..d1f09d5cf 100644 --- a/src/backend/langflow/components/agents/AgentInitializer.py +++ b/src/backend/base/langflow/components/agents/AgentInitializer.py @@ -1,8 +1,9 @@ from typing import Callable, List, Optional, Union from langchain.agents import AgentExecutor, AgentType, initialize_agent, types -from langflow import CustomComponent + from langflow.field_typing import BaseChatMemory, BaseLanguageModel, Tool +from langflow.interface.custom.custom_component import CustomComponent class AgentInitializerComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/agents/CSVAgent.py b/src/backend/base/langflow/components/agents/CSVAgent.py new file mode 100644 index 000000000..57774568f --- /dev/null +++ b/src/backend/base/langflow/components/agents/CSVAgent.py @@ -0,0 +1,34 @@ +from langchain_experimental.agents.agent_toolkits.csv.base import create_csv_agent + +from langflow.custom import CustomComponent +from langflow.field_typing import AgentExecutor, BaseLanguageModel + + +class CSVAgentComponent(CustomComponent): + display_name = "CSVAgent" + description = "Construct a CSV agent from a CSV and tools." + documentation = "https://python.langchain.com/docs/modules/agents/toolkits/csv" + + def build_config(self): + return { + "llm": {"display_name": "LLM", "type": BaseLanguageModel}, + "path": {"display_name": "Path", "field_type": "file", "suffixes": [".csv"], "file_types": [".csv"]}, + "handle_parsing_errors": {"display_name": "Handle Parse Errors", "advanced": True}, + "agent_type": { + "display_name": "Agent Type", + "options": ["zero-shot-react-description", "openai-functions", "openai-tools"], + "advanced": True, + }, + } + + def build( + self, llm: BaseLanguageModel, path: str, handle_parsing_errors: bool = True, agent_type: str = "openai-tools" + ) -> AgentExecutor: + # Instantiate and return the CSV agent class with the provided llm and path + return create_csv_agent( + llm=llm, + path=path, + agent_type=agent_type, + verbose=True, + agent_executor_kwargs=dict(handle_parsing_errors=handle_parsing_errors), + ) diff --git a/src/backend/langflow/components/agents/JsonAgent.py b/src/backend/base/langflow/components/agents/JsonAgent.py similarity index 90% rename from src/backend/langflow/components/agents/JsonAgent.py rename to src/backend/base/langflow/components/agents/JsonAgent.py index 0197b9210..5fa342417 100644 --- a/src/backend/langflow/components/agents/JsonAgent.py +++ b/src/backend/base/langflow/components/agents/JsonAgent.py @@ -1,9 +1,10 @@ -from langflow import CustomComponent from langchain.agents import AgentExecutor, create_json_agent +from langchain_community.agent_toolkits.json.toolkit import JsonToolkit + from langflow.field_typing import ( BaseLanguageModel, ) -from langchain_community.agent_toolkits.json.toolkit import JsonToolkit +from langflow.interface.custom.custom_component import CustomComponent class JsonAgentComponent(CustomComponent): diff --git a/src/backend/langflow/components/agents/OpenAIConversationalAgent.py b/src/backend/base/langflow/components/agents/OpenAIConversationalAgent.py similarity index 95% rename from src/backend/langflow/components/agents/OpenAIConversationalAgent.py rename to src/backend/base/langflow/components/agents/OpenAIConversationalAgent.py index 5dfb53387..0edf8ea55 100644 --- a/src/backend/langflow/components/agents/OpenAIConversationalAgent.py +++ b/src/backend/base/langflow/components/agents/OpenAIConversationalAgent.py @@ -9,13 +9,15 @@ from langchain.prompts.chat import MessagesPlaceholder from langchain.schema.memory import BaseMemory from langchain.tools import Tool from langchain_community.chat_models import ChatOpenAI -from langflow import CustomComponent + from langflow.field_typing.range_spec import RangeSpec +from langflow.interface.custom.custom_component import CustomComponent class ConversationalAgent(CustomComponent): display_name: str = "OpenAI Conversational Agent" description: str = "Conversational Agent that can use OpenAI's function calling API" + icon = "OpenAI" def build_config(self): openai_function_models = [ @@ -40,7 +42,7 @@ class ConversationalAgent(CustomComponent): "temperature": { "display_name": "Temperature", "value": 0.2, - "range_spec": RangeSpec(min=0, max=2, step=0.1), + "rangeSpec": RangeSpec(min=0, max=2, step=0.1), }, } diff --git a/src/backend/langflow/components/agents/SQLAgent.py b/src/backend/base/langflow/components/agents/SQLAgent.py similarity index 90% rename from src/backend/langflow/components/agents/SQLAgent.py rename to src/backend/base/langflow/components/agents/SQLAgent.py index 42b1b48f3..68f65d18f 100644 --- a/src/backend/langflow/components/agents/SQLAgent.py +++ b/src/backend/base/langflow/components/agents/SQLAgent.py @@ -1,10 +1,12 @@ -from langflow import CustomComponent -from typing import Union, Callable +from typing import Callable, Union + from langchain.agents import AgentExecutor -from langflow.field_typing import BaseLanguageModel -from langchain_community.agent_toolkits.sql.base import create_sql_agent from langchain.sql_database import SQLDatabase from langchain_community.agent_toolkits import SQLDatabaseToolkit +from langchain_community.agent_toolkits.sql.base import create_sql_agent + +from langflow.field_typing import BaseLanguageModel +from langflow.interface.custom.custom_component import CustomComponent class SQLAgentComponent(CustomComponent): diff --git a/src/backend/langflow/components/agents/VectorStoreAgent.py b/src/backend/base/langflow/components/agents/VectorStoreAgent.py similarity index 87% rename from src/backend/langflow/components/agents/VectorStoreAgent.py rename to src/backend/base/langflow/components/agents/VectorStoreAgent.py index b70ea4d59..095f9da41 100644 --- a/src/backend/langflow/components/agents/VectorStoreAgent.py +++ b/src/backend/base/langflow/components/agents/VectorStoreAgent.py @@ -1,8 +1,10 @@ -from langflow import CustomComponent +from typing import Callable, Union + from langchain.agents import AgentExecutor, create_vectorstore_agent from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreToolkit -from typing import Union, Callable + from langflow.field_typing import BaseLanguageModel +from langflow.interface.custom.custom_component import CustomComponent class VectorStoreAgentComponent(CustomComponent): diff --git a/src/backend/langflow/components/agents/VectorStoreRouterAgent.py b/src/backend/base/langflow/components/agents/VectorStoreRouterAgent.py similarity index 91% rename from src/backend/langflow/components/agents/VectorStoreRouterAgent.py rename to src/backend/base/langflow/components/agents/VectorStoreRouterAgent.py index 3174d9513..514a9767c 100644 --- a/src/backend/langflow/components/agents/VectorStoreRouterAgent.py +++ b/src/backend/base/langflow/components/agents/VectorStoreRouterAgent.py @@ -1,9 +1,11 @@ -from langflow import CustomComponent -from langchain_core.language_models.base import BaseLanguageModel -from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreRouterToolkit -from langchain.agents import create_vectorstore_router_agent from typing import Callable +from langchain.agents import create_vectorstore_router_agent +from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreRouterToolkit +from langchain_core.language_models.base import BaseLanguageModel + +from langflow.interface.custom.custom_component import CustomComponent + class VectorStoreRouterAgentComponent(CustomComponent): display_name = "VectorStoreRouterAgent" diff --git a/src/backend/base/langflow/components/agents/XMLAgent.py b/src/backend/base/langflow/components/agents/XMLAgent.py new file mode 100644 index 000000000..687bfff6f --- /dev/null +++ b/src/backend/base/langflow/components/agents/XMLAgent.py @@ -0,0 +1,89 @@ +from typing import List, Optional + +from langchain.agents import create_xml_agent +from langchain_core.prompts import PromptTemplate + +from langflow.base.agents.agent import LCAgentComponent +from langflow.field_typing import BaseLLM, BaseMemory, Text, Tool + + +class XMLAgentComponent(LCAgentComponent): + display_name = "XMLAgent" + description = "Construct an XML agent from an LLM and tools." + + def build_config(self): + return { + "llm": {"display_name": "LLM"}, + "tools": {"display_name": "Tools"}, + "prompt": { + "display_name": "Prompt", + "multiline": True, + "info": "This prompt must contain 'tools' and 'agent_scratchpad' keys.", + "value": """You are a helpful assistant. Help the user answer any questions. + + You have access to the following tools: + + {tools} + + In order to use a tool, you can use and tags. You will then get back a response in the form + For example, if you have a tool called 'search' that could run a google search, in order to search for the weather in SF you would respond: + + searchweather in SF + 64 degrees + + When you are done, respond with a final answer between . For example: + + The weather in SF is 64 degrees + + Begin! + + Previous Conversation: + {chat_history} + + Question: {input} + {agent_scratchpad}""", + }, + "tool_template": { + "display_name": "Tool Template", + "info": "Template for rendering tools in the prompt. Tools have 'name' and 'description' keys.", + "advanced": True, + }, + "handle_parsing_errors": { + "display_name": "Handle Parsing Errors", + "info": "If True, the agent will handle parsing errors. If False, the agent will raise an error.", + "advanced": True, + }, + "memory": { + "display_name": "Memory", + "info": "Memory to use for the agent.", + }, + "input_value": { + "display_name": "Inputs", + "info": "Input text to pass to the agent.", + }, + } + + async def build( + self, + input_value: str, + llm: BaseLLM, + tools: List[Tool], + prompt: str, + memory: Optional[BaseMemory] = None, + tool_template: str = "{name}: {description}", + handle_parsing_errors: bool = True, + ) -> Text: + if "input" not in prompt: + raise ValueError("Prompt must contain 'input' key.") + + def render_tool_description(tools): + return "\n".join( + [tool_template.format(name=tool.name, description=tool.description, args=tool.args) for tool in tools] + ) + + prompt_template = PromptTemplate.from_template(prompt) + input_variables = prompt_template.input_variables + agent = create_xml_agent(llm, tools, prompt_template, tools_renderer=render_tool_description) + result = await self.run_agent(agent, input_value, input_variables, tools, memory, handle_parsing_errors) + self.status = result + return result diff --git a/src/backend/base/langflow/components/agents/__init__.py b/src/backend/base/langflow/components/agents/__init__.py new file mode 100644 index 000000000..f2a118b58 --- /dev/null +++ b/src/backend/base/langflow/components/agents/__init__.py @@ -0,0 +1,19 @@ +from .AgentInitializer import AgentInitializerComponent +from .CSVAgent import CSVAgentComponent +from .JsonAgent import JsonAgentComponent +from .OpenAIConversationalAgent import ConversationalAgent +from .SQLAgent import SQLAgentComponent +from .VectorStoreAgent import VectorStoreAgentComponent +from .VectorStoreRouterAgent import VectorStoreRouterAgentComponent +from .XMLAgent import XMLAgentComponent + +__all__ = [ + "AgentInitializerComponent", + "CSVAgentComponent", + "JsonAgentComponent", + "ConversationalAgent", + "SQLAgentComponent", + "VectorStoreAgentComponent", + "VectorStoreRouterAgentComponent", + "XMLAgentComponent", +] diff --git a/src/backend/base/langflow/components/chains/ConversationChain.py b/src/backend/base/langflow/components/chains/ConversationChain.py new file mode 100644 index 000000000..2b8dd09a3 --- /dev/null +++ b/src/backend/base/langflow/components/chains/ConversationChain.py @@ -0,0 +1,46 @@ +from typing import Optional + +from langchain.chains import ConversationChain + +from langflow.field_typing import BaseLanguageModel, BaseMemory, Text +from langflow.interface.custom.custom_component import CustomComponent + + +class ConversationChainComponent(CustomComponent): + display_name = "ConversationChain" + description = "Chain to have a conversation and load context from memory." + + def build_config(self): + return { + "prompt": {"display_name": "Prompt"}, + "llm": {"display_name": "LLM"}, + "memory": { + "display_name": "Memory", + "info": "Memory to load context from. If none is provided, a ConversationBufferMemory will be used.", + }, + "input_value": { + "display_name": "Input Value", + "info": "The input value to pass to the chain.", + }, + } + + def build( + self, + input_value: Text, + llm: BaseLanguageModel, + memory: Optional[BaseMemory] = None, + ) -> Text: + if memory is None: + chain = ConversationChain(llm=llm) + else: + chain = ConversationChain(llm=llm, memory=memory) + result = chain.invoke({"input": input_value}) + if isinstance(result, dict): + result = result.get(chain.output_key, "") # type: ignore + + elif isinstance(result, str): + result = result + else: + result = result.get("response") + self.status = result + return str(result) diff --git a/src/backend/langflow/components/chains/LLMChain.py b/src/backend/base/langflow/components/chains/LLMChain.py similarity index 53% rename from src/backend/langflow/components/chains/LLMChain.py rename to src/backend/base/langflow/components/chains/LLMChain.py index c6efd04c7..9c2b3eae2 100644 --- a/src/backend/langflow/components/chains/LLMChain.py +++ b/src/backend/base/langflow/components/chains/LLMChain.py @@ -1,14 +1,9 @@ -from typing import Callable, Optional, Union +from typing import Optional from langchain.chains import LLMChain -from langflow import CustomComponent -from langflow.field_typing import ( - BaseLanguageModel, - BaseMemory, - BasePromptTemplate, - Chain, -) +from langflow.field_typing import BaseLanguageModel, BaseMemory, BasePromptTemplate, Text +from langflow.interface.custom.custom_component import CustomComponent class LLMChainComponent(CustomComponent): @@ -20,7 +15,6 @@ class LLMChainComponent(CustomComponent): "prompt": {"display_name": "Prompt"}, "llm": {"display_name": "LLM"}, "memory": {"display_name": "Memory"}, - "code": {"show": False}, } def build( @@ -28,5 +22,10 @@ class LLMChainComponent(CustomComponent): prompt: BasePromptTemplate, llm: BaseLanguageModel, memory: Optional[BaseMemory] = None, - ) -> Union[Chain, Callable, LLMChain]: - return LLMChain(prompt=prompt, llm=llm, memory=memory) + ) -> Text: + runnable = LLMChain(prompt=prompt, llm=llm, memory=memory) + result_dict = runnable.invoke({}) + output_key = runnable.output_key + result = result_dict[output_key] + self.status = result + return result diff --git a/src/backend/base/langflow/components/chains/LLMCheckerChain.py b/src/backend/base/langflow/components/chains/LLMCheckerChain.py new file mode 100644 index 000000000..04e6fe67d --- /dev/null +++ b/src/backend/base/langflow/components/chains/LLMCheckerChain.py @@ -0,0 +1,31 @@ +from langchain.chains import LLMCheckerChain + +from langflow.field_typing import BaseLanguageModel, Text +from langflow.interface.custom.custom_component import CustomComponent + + +class LLMCheckerChainComponent(CustomComponent): + display_name = "LLMCheckerChain" + description = "" + documentation = "https://python.langchain.com/docs/modules/chains/additional/llm_checker" + + def build_config(self): + return { + "llm": {"display_name": "LLM"}, + "input_value": { + "display_name": "Input Value", + "info": "The input value to pass to the chain.", + }, + } + + def build( + self, + input_value: Text, + llm: BaseLanguageModel, + ) -> Text: + chain = LLMCheckerChain.from_llm(llm=llm) + response = chain.invoke({chain.input_key: input_value}) + result = response.get(chain.output_key, "") + result_str = Text(result) + self.status = result_str + return result_str diff --git a/src/backend/langflow/components/chains/LLMMathChain.py b/src/backend/base/langflow/components/chains/LLMMathChain.py similarity index 56% rename from src/backend/langflow/components/chains/LLMMathChain.py rename to src/backend/base/langflow/components/chains/LLMMathChain.py index 28f430e6d..0b7374da6 100644 --- a/src/backend/langflow/components/chains/LLMMathChain.py +++ b/src/backend/base/langflow/components/chains/LLMMathChain.py @@ -1,9 +1,9 @@ -from typing import Callable, Optional, Union +from typing import Optional from langchain.chains import LLMChain, LLMMathChain -from langflow import CustomComponent -from langflow.field_typing import BaseLanguageModel, BaseMemory, Chain +from langflow.field_typing import BaseLanguageModel, BaseMemory, Text +from langflow.interface.custom.custom_component import CustomComponent class LLMMathChainComponent(CustomComponent): @@ -18,14 +18,30 @@ class LLMMathChainComponent(CustomComponent): "memory": {"display_name": "Memory"}, "input_key": {"display_name": "Input Key"}, "output_key": {"display_name": "Output Key"}, + "input_value": { + "display_name": "Input Value", + "info": "The input value to pass to the chain.", + }, } def build( self, + input_value: Text, llm: BaseLanguageModel, llm_chain: LLMChain, input_key: str = "question", output_key: str = "answer", memory: Optional[BaseMemory] = None, - ) -> Union[LLMMathChain, Callable, Chain]: - return LLMMathChain(llm=llm, llm_chain=llm_chain, input_key=input_key, output_key=output_key, memory=memory) + ) -> Text: + chain = LLMMathChain( + llm=llm, + llm_chain=llm_chain, + input_key=input_key, + output_key=output_key, + memory=memory, + ) + response = chain.invoke({input_key: input_value}) + result = response.get(output_key) + result_str = Text(result) + self.status = result_str + return result_str diff --git a/src/backend/base/langflow/components/chains/RetrievalQA.py b/src/backend/base/langflow/components/chains/RetrievalQA.py new file mode 100644 index 000000000..7b6ba49e4 --- /dev/null +++ b/src/backend/base/langflow/components/chains/RetrievalQA.py @@ -0,0 +1,68 @@ +from typing import Optional + +from langchain.chains.retrieval_qa.base import RetrievalQA +from langchain_core.documents import Document + +from langflow.field_typing import BaseLanguageModel, BaseMemory, BaseRetriever, Text +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema.schema import Record + + +class RetrievalQAComponent(CustomComponent): + display_name = "Retrieval QA" + description = "Chain for question-answering against an index." + + def build_config(self): + return { + "llm": {"display_name": "LLM"}, + "chain_type": {"display_name": "Chain Type", "options": ["Stuff", "Map Reduce", "Refine", "Map Rerank"]}, + "retriever": {"display_name": "Retriever"}, + "memory": {"display_name": "Memory", "required": False}, + "input_key": {"display_name": "Input Key", "advanced": True}, + "output_key": {"display_name": "Output Key", "advanced": True}, + "return_source_documents": {"display_name": "Return Source Documents"}, + "input_value": { + "display_name": "Input", + "input_types": ["Record", "Document"], + }, + } + + def build( + self, + llm: BaseLanguageModel, + chain_type: str, + retriever: BaseRetriever, + input_value: str = "", + memory: Optional[BaseMemory] = None, + input_key: str = "query", + output_key: str = "result", + return_source_documents: bool = True, + ) -> Text: + chain_type = chain_type.lower().replace(" ", "_") + runnable = RetrievalQA.from_chain_type( + llm=llm, + chain_type=chain_type, + retriever=retriever, + memory=memory, + input_key=input_key, + output_key=output_key, + return_source_documents=return_source_documents, + ) + if isinstance(input_value, Document): + input_value = input_value.page_content + if isinstance(input_value, Record): + input_value = input_value.get_text() + self.status = runnable + result = runnable.invoke({input_key: input_value}) + result = result.content if hasattr(result, "content") else result + # Result is a dict with keys "query", "result" and "source_documents" + # for now we just return the result + records = self.to_records(result.get("source_documents")) + references_str = "" + if return_source_documents: + references_str = self.create_references_from_records(records) + result_str = result.get("result", "") + + final_result = "\n".join([Text(result_str), references_str]) + self.status = final_result + return final_result # OK diff --git a/src/backend/base/langflow/components/chains/RetrievalQAWithSourcesChain.py b/src/backend/base/langflow/components/chains/RetrievalQAWithSourcesChain.py new file mode 100644 index 000000000..75a9131f5 --- /dev/null +++ b/src/backend/base/langflow/components/chains/RetrievalQAWithSourcesChain.py @@ -0,0 +1,63 @@ +from typing import Optional + +from langchain.chains import RetrievalQAWithSourcesChain +from langchain_core.documents import Document + +from langflow.field_typing import BaseLanguageModel, BaseMemory, BaseRetriever, Text +from langflow.interface.custom.custom_component import CustomComponent + + +class RetrievalQAWithSourcesChainComponent(CustomComponent): + display_name = "RetrievalQAWithSourcesChain" + description = "Question-answering with sources over an index." + + def build_config(self): + return { + "llm": {"display_name": "LLM"}, + "chain_type": { + "display_name": "Chain Type", + "options": ["Stuff", "Map Reduce", "Refine", "Map Rerank"], + "info": "The type of chain to use to combined Documents.", + }, + "memory": {"display_name": "Memory"}, + "return_source_documents": {"display_name": "Return Source Documents"}, + "retriever": {"display_name": "Retriever"}, + "input_value": { + "display_name": "Input Value", + "info": "The input value to pass to the chain.", + }, + } + + def build( + self, + input_value: Text, + retriever: BaseRetriever, + llm: BaseLanguageModel, + chain_type: str, + memory: Optional[BaseMemory] = None, + return_source_documents: Optional[bool] = True, + ) -> Text: + chain_type = chain_type.lower().replace(" ", "_") + runnable = RetrievalQAWithSourcesChain.from_chain_type( + llm=llm, + chain_type=chain_type, + memory=memory, + return_source_documents=return_source_documents, + retriever=retriever, + ) + if isinstance(input_value, Document): + input_value = input_value.page_content + self.status = runnable + input_key = runnable.input_keys[0] + result = runnable.invoke({input_key: input_value}) + result = result.content if hasattr(result, "content") else result + # Result is a dict with keys "query", "result" and "source_documents" + # for now we just return the result + records = self.to_records(result.get("source_documents")) + references_str = "" + if return_source_documents: + references_str = self.create_references_from_records(records) + result_str = Text(result.get("answer", "")) + final_result = "\n".join([result_str, references_str]) + self.status = final_result + return final_result diff --git a/src/backend/base/langflow/components/chains/SQLGenerator.py b/src/backend/base/langflow/components/chains/SQLGenerator.py new file mode 100644 index 000000000..3b111ba71 --- /dev/null +++ b/src/backend/base/langflow/components/chains/SQLGenerator.py @@ -0,0 +1,61 @@ +from typing import Optional + +from langchain.chains import create_sql_query_chain +from langchain_community.utilities.sql_database import SQLDatabase +from langchain_core.prompts import PromptTemplate +from langchain_core.runnables import Runnable + +from langflow.field_typing import BaseLanguageModel, Text +from langflow.interface.custom.custom_component import CustomComponent + + +class SQLGeneratorComponent(CustomComponent): + display_name = "Natural Language to SQL" + description = "Generate SQL from natural language." + + def build_config(self): + return { + "db": {"display_name": "Database"}, + "llm": {"display_name": "LLM"}, + "prompt": { + "display_name": "Prompt", + "info": "The prompt must contain `{question}`.", + }, + "top_k": { + "display_name": "Top K", + "info": "The number of results per select statement to return. If 0, no limit.", + }, + "input_value": { + "display_name": "Input Value", + "info": "The input value to pass to the chain.", + }, + } + + def build( + self, + input_value: Text, + db: SQLDatabase, + llm: BaseLanguageModel, + top_k: int = 5, + prompt: Optional[Text] = None, + ) -> Text: + if prompt: + prompt_template = PromptTemplate.from_template(template=prompt) + else: + prompt_template = None + + if top_k < 1: + raise ValueError("Top K must be greater than 0.") + + if not prompt_template: + sql_query_chain = create_sql_query_chain(llm=llm, db=db, k=top_k) + else: + # Check if {question} is in the prompt + if "{question}" not in prompt_template.template or "question" not in prompt_template.input_variables: + raise ValueError("Prompt must contain `{question}` to be used with Natural Language to SQL.") + sql_query_chain = create_sql_query_chain(llm=llm, db=db, prompt=prompt_template, k=top_k) + query_writer: Runnable = sql_query_chain | {"query": lambda x: x.replace("SQLQuery:", "").strip()} + response = query_writer.invoke({"question": input_value}) + query = response.get("query") + self.status = query + return query diff --git a/src/backend/base/langflow/components/chains/__init__.py b/src/backend/base/langflow/components/chains/__init__.py new file mode 100644 index 000000000..365a80eb6 --- /dev/null +++ b/src/backend/base/langflow/components/chains/__init__.py @@ -0,0 +1,17 @@ +from .ConversationChain import ConversationChainComponent +from .LLMChain import LLMChainComponent +from .LLMCheckerChain import LLMCheckerChainComponent +from .LLMMathChain import LLMMathChainComponent +from .RetrievalQA import RetrievalQAComponent +from .RetrievalQAWithSourcesChain import RetrievalQAWithSourcesChainComponent +from .SQLGenerator import SQLGeneratorComponent + +__all__ = [ + "ConversationChainComponent", + "LLMChainComponent", + "LLMCheckerChainComponent", + "LLMMathChainComponent", + "RetrievalQAComponent", + "RetrievalQAWithSourcesChainComponent", + "SQLGeneratorComponent", +] diff --git a/src/backend/base/langflow/components/data/APIRequest.py b/src/backend/base/langflow/components/data/APIRequest.py new file mode 100644 index 000000000..988fc0d4c --- /dev/null +++ b/src/backend/base/langflow/components/data/APIRequest.py @@ -0,0 +1,121 @@ +import asyncio +import json +from typing import List, Optional + +import httpx + +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema import Record + + +class APIRequest(CustomComponent): + display_name: str = "API Request" + description: str = "Make HTTP requests given one or more URLs." + output_types: list[str] = ["Record"] + documentation: str = "https://docs.langflow.org/components/utilities#api-request" + icon = "Globe" + + field_config = { + "urls": {"display_name": "URLs", "info": "URLs to make requests to."}, + "method": { + "display_name": "Method", + "info": "The HTTP method to use.", + "field_type": "str", + "options": ["GET", "POST", "PATCH", "PUT"], + "value": "GET", + }, + "headers": { + "display_name": "Headers", + "info": "The headers to send with the request.", + "input_types": ["Record"], + }, + "body": { + "display_name": "Body", + "info": "The body to send with the request (for POST, PATCH, PUT).", + "input_types": ["Record"], + }, + "timeout": { + "display_name": "Timeout", + "field_type": "int", + "info": "The timeout to use for the request.", + "value": 5, + }, + } + + async def make_request( + self, + client: httpx.AsyncClient, + method: str, + url: str, + headers: Optional[Record] = None, + body: Optional[Record] = None, + timeout: int = 5, + ) -> Record: + method = method.upper() + if method not in ["GET", "POST", "PATCH", "PUT", "DELETE"]: + raise ValueError(f"Unsupported method: {method}") + + data = body if body else None + payload = json.dumps(data) + try: + response = await client.request(method, url, headers=headers, content=payload, timeout=timeout) + try: + result = response.json() + except Exception: + result = response.text + return Record( + data={ + "source": url, + "headers": headers, + "status_code": response.status_code, + "result": result, + }, + ) + except httpx.TimeoutException: + return Record( + data={ + "source": url, + "headers": headers, + "status_code": 408, + "error": "Request timed out", + }, + ) + except Exception as exc: + return Record( + data={ + "source": url, + "headers": headers, + "status_code": 500, + "error": str(exc), + }, + ) + + async def build( + self, + method: str, + urls: List[str], + _headers: Optional[Record] = None, + body: Optional[Record] = None, + timeout: int = 5, + ) -> List[Record]: + if _headers is None: + headers = {} + else: + headers = _headers.data + + bodies = [] + if body: + if isinstance(body, list): + bodies = [b.data for b in body] + else: + bodies = [body.data] + + if len(urls) != len(bodies): + # add bodies with None + bodies += [None] * (len(urls) - len(bodies)) # type: ignore + async with httpx.AsyncClient() as client: + results = await asyncio.gather( + *[self.make_request(client, method, u, headers, rec, timeout) for u, rec in zip(urls, bodies)] + ) + self.status = results + return results diff --git a/src/backend/base/langflow/components/data/Directory.py b/src/backend/base/langflow/components/data/Directory.py new file mode 100644 index 000000000..87dc99287 --- /dev/null +++ b/src/backend/base/langflow/components/data/Directory.py @@ -0,0 +1,63 @@ +from typing import Any, Dict, List, Optional + +from langflow.base.data.utils import parallel_load_records, parse_text_file_to_record, retrieve_file_paths +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema import Record + + +class DirectoryComponent(CustomComponent): + display_name = "Directory" + description = "Recursively load files from a directory." + icon = "folder" + + def build_config(self) -> Dict[str, Any]: + return { + "path": {"display_name": "Path"}, + "types": { + "display_name": "Types", + "info": "File types to load. Leave empty to load all types.", + }, + "depth": {"display_name": "Depth", "info": "Depth to search for files."}, + "max_concurrency": {"display_name": "Max Concurrency", "advanced": True}, + "load_hidden": { + "display_name": "Load Hidden", + "advanced": True, + "info": "If true, hidden files will be loaded.", + }, + "recursive": { + "display_name": "Recursive", + "advanced": True, + "info": "If true, the search will be recursive.", + }, + "silent_errors": { + "display_name": "Silent Errors", + "advanced": True, + "info": "If true, errors will not raise an exception.", + }, + "use_multithreading": { + "display_name": "Use Multithreading", + "advanced": True, + }, + } + + def build( + self, + path: str, + depth: int = 0, + max_concurrency: int = 2, + load_hidden: bool = False, + recursive: bool = True, + silent_errors: bool = False, + use_multithreading: bool = True, + ) -> List[Optional[Record]]: + resolved_path = self.resolve_path(path) + file_paths = retrieve_file_paths(resolved_path, load_hidden, recursive, depth) + loaded_records = [] + + if use_multithreading: + loaded_records = parallel_load_records(file_paths, silent_errors, max_concurrency) + else: + loaded_records = [parse_text_file_to_record(file_path, silent_errors) for file_path in file_paths] + loaded_records = list(filter(None, loaded_records)) + self.status = loaded_records + return loaded_records diff --git a/src/backend/base/langflow/components/data/File.py b/src/backend/base/langflow/components/data/File.py new file mode 100644 index 000000000..70fe1dccc --- /dev/null +++ b/src/backend/base/langflow/components/data/File.py @@ -0,0 +1,48 @@ +from pathlib import Path +from typing import Any, Dict + +from langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_record +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema import Record + + +class FileComponent(CustomComponent): + display_name = "File" + description = "A generic file loader." + icon = "file-text" + + def build_config(self) -> Dict[str, Any]: + return { + "path": { + "display_name": "Path", + "field_type": "file", + "file_types": TEXT_FILE_TYPES, + "info": f"Supported file types: {', '.join(TEXT_FILE_TYPES)}", + }, + "silent_errors": { + "display_name": "Silent Errors", + "advanced": True, + "info": "If true, errors will not raise an exception.", + }, + } + + def load_file(self, path: str, silent_errors: bool = False) -> Record: + resolved_path = self.resolve_path(path) + path_obj = Path(resolved_path) + extension = path_obj.suffix[1:].lower() + if extension == "doc": + raise ValueError("doc files are not supported. Please save as .docx") + if extension not in TEXT_FILE_TYPES: + raise ValueError(f"Unsupported file type: {extension}") + record = parse_text_file_to_record(resolved_path, silent_errors) + self.status = record if record else "No data" + return record or Record() + + def build( + self, + path: str, + silent_errors: bool = False, + ) -> Record: + record = self.load_file(path, silent_errors) + self.status = record + return record diff --git a/src/backend/base/langflow/components/data/URL.py b/src/backend/base/langflow/components/data/URL.py new file mode 100644 index 000000000..2b286e126 --- /dev/null +++ b/src/backend/base/langflow/components/data/URL.py @@ -0,0 +1,27 @@ +from typing import Any, Dict + +from langchain_community.document_loaders.web_base import WebBaseLoader + +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema import Record + + +class URLComponent(CustomComponent): + display_name = "URL" + description = "Fetch content from one or more URLs." + icon = "layout-template" + + def build_config(self) -> Dict[str, Any]: + return { + "urls": {"display_name": "URL"}, + } + + def build( + self, + urls: list[str], + ) -> list[Record]: + loader = WebBaseLoader(web_paths=urls) + docs = loader.load() + records = self.to_records(docs) + self.status = records + return records diff --git a/src/backend/base/langflow/components/data/__init__.py b/src/backend/base/langflow/components/data/__init__.py new file mode 100644 index 000000000..ca82e3eb8 --- /dev/null +++ b/src/backend/base/langflow/components/data/__init__.py @@ -0,0 +1,7 @@ +from .APIRequest import APIRequest +from .Directory import DirectoryComponent +from .File import FileComponent + +from .URL import URLComponent + +__all__ = ["APIRequest", "DirectoryComponent", "FileComponent", "URLComponent"] diff --git a/src/backend/langflow/interface/utilities/__init__.py b/src/backend/base/langflow/components/documentloaders/__init__.py similarity index 100% rename from src/backend/langflow/interface/utilities/__init__.py rename to src/backend/base/langflow/components/documentloaders/__init__.py diff --git a/src/backend/langflow/components/embeddings/AmazonBedrockEmbeddings.py b/src/backend/base/langflow/components/embeddings/AmazonBedrockEmbeddings.py similarity index 84% rename from src/backend/langflow/components/embeddings/AmazonBedrockEmbeddings.py rename to src/backend/base/langflow/components/embeddings/AmazonBedrockEmbeddings.py index 450e5ee3b..d4330d9e4 100644 --- a/src/backend/langflow/components/embeddings/AmazonBedrockEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/AmazonBedrockEmbeddings.py @@ -1,19 +1,15 @@ from typing import Optional -from langchain.embeddings import BedrockEmbeddings from langchain.embeddings.base import Embeddings -from langflow import CustomComponent +from langchain_community.embeddings import BedrockEmbeddings + +from langflow.interface.custom.custom_component import CustomComponent class AmazonBedrockEmeddingsComponent(CustomComponent): - """ - A custom component for implementing an Embeddings Model using Amazon Bedrock. - """ - display_name: str = "Amazon Bedrock Embeddings" - description: str = "Embeddings model from Amazon Bedrock." + description: str = "Generate embeddings using Amazon Bedrock models." documentation = "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock" - beta = True def build_config(self): return { diff --git a/src/backend/langflow/components/embeddings/AzureOpenAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/AzureOpenAIEmbeddings.py similarity index 90% rename from src/backend/langflow/components/embeddings/AzureOpenAIEmbeddings.py rename to src/backend/base/langflow/components/embeddings/AzureOpenAIEmbeddings.py index 09b03e17b..d8aec24cd 100644 --- a/src/backend/langflow/components/embeddings/AzureOpenAIEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/AzureOpenAIEmbeddings.py @@ -1,14 +1,15 @@ from langchain.embeddings.base import Embeddings from langchain_community.embeddings import AzureOpenAIEmbeddings -from langflow import CustomComponent +from langflow.interface.custom.custom_component import CustomComponent class AzureOpenAIEmbeddingsComponent(CustomComponent): - display_name: str = "AzureOpenAIEmbeddings" - description: str = "Embeddings model from Azure OpenAI." + display_name: str = "Azure OpenAI Embeddings" + description: str = "Generate embeddings using Azure OpenAI models." documentation: str = "https://python.langchain.com/docs/integrations/text_embedding/azureopenai" beta = False + icon = "Azure" API_VERSION_OPTIONS = [ "2022-12-01", diff --git a/src/backend/langflow/components/embeddings/CohereEmbeddings.py b/src/backend/base/langflow/components/embeddings/CohereEmbeddings.py similarity index 83% rename from src/backend/langflow/components/embeddings/CohereEmbeddings.py rename to src/backend/base/langflow/components/embeddings/CohereEmbeddings.py index d3f82b041..23d855f40 100644 --- a/src/backend/langflow/components/embeddings/CohereEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/CohereEmbeddings.py @@ -2,12 +2,12 @@ from typing import Optional from langchain_community.embeddings.cohere import CohereEmbeddings -from langflow import CustomComponent +from langflow.custom import CustomComponent class CohereEmbeddingsComponent(CustomComponent): - display_name = "CohereEmbeddings" - description = "Cohere embedding models." + display_name = "Cohere Embeddings" + description = "Generate embeddings using Cohere models." def build_config(self): return { @@ -16,6 +16,7 @@ class CohereEmbeddingsComponent(CustomComponent): "truncate": {"display_name": "Truncate", "advanced": True}, "max_retries": {"display_name": "Max Retries", "advanced": True}, "user_agent": {"display_name": "User Agent", "advanced": True}, + "request_timeout": {"display_name": "Request Timeout", "advanced": True}, } def build( diff --git a/src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py b/src/backend/base/langflow/components/embeddings/HuggingFaceEmbeddings.py similarity index 84% rename from src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py rename to src/backend/base/langflow/components/embeddings/HuggingFaceEmbeddings.py index 6f3540358..849cd9bba 100644 --- a/src/backend/langflow/components/embeddings/HuggingFaceEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/HuggingFaceEmbeddings.py @@ -1,14 +1,17 @@ -from langflow import CustomComponent -from typing import Optional, Dict +from typing import Dict, Optional + from langchain_community.embeddings.huggingface import HuggingFaceEmbeddings +from langflow.interface.custom.custom_component import CustomComponent + class HuggingFaceEmbeddingsComponent(CustomComponent): - display_name = "HuggingFaceEmbeddings" - description = "HuggingFace sentence_transformers embedding models." + display_name = "Hugging Face Embeddings" + description = "Generate embeddings using HuggingFace models." documentation = ( "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/sentence_transformers" ) + icon = "HuggingFace" def build_config(self): return { diff --git a/src/backend/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py b/src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py similarity index 87% rename from src/backend/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py rename to src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py index acc828d7f..37a08ccc6 100644 --- a/src/backend/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/HuggingFaceInferenceAPIEmbeddings.py @@ -1,14 +1,16 @@ from typing import Dict, Optional from langchain_community.embeddings.huggingface import HuggingFaceInferenceAPIEmbeddings -from langflow import CustomComponent from pydantic.v1.types import SecretStr +from langflow.interface.custom.custom_component import CustomComponent + class HuggingFaceInferenceAPIEmbeddingsComponent(CustomComponent): - display_name = "HuggingFaceInferenceAPIEmbeddings" - description = "HuggingFace sentence_transformers embedding models, API version." + display_name = "Hugging Face API Embeddings" + description = "Generate embeddings using Hugging Face Inference API models." documentation = "https://github.com/huggingface/text-embeddings-inference" + icon = "HuggingFace" def build_config(self): return { diff --git a/src/backend/langflow/components/embeddings/OllamaEmbeddings.py b/src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py similarity index 84% rename from src/backend/langflow/components/embeddings/OllamaEmbeddings.py rename to src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py index 65c6aca3b..63ddc6fd4 100644 --- a/src/backend/langflow/components/embeddings/OllamaEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/OllamaEmbeddings.py @@ -1,19 +1,15 @@ from typing import Optional -from langflow import CustomComponent from langchain.embeddings.base import Embeddings from langchain_community.embeddings import OllamaEmbeddings +from langflow.interface.custom.custom_component import CustomComponent + class OllamaEmbeddingsComponent(CustomComponent): - """ - A custom component for implementing an Embeddings Model using Ollama. - """ - display_name: str = "Ollama Embeddings" - description: str = "Embeddings model from Ollama." + description: str = "Generate embeddings using Ollama models." documentation = "https://python.langchain.com/docs/integrations/text_embedding/ollama" - beta = True def build_config(self): return { diff --git a/src/backend/langflow/components/embeddings/OpenAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py similarity index 78% rename from src/backend/langflow/components/embeddings/OpenAIEmbeddings.py rename to src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py index ba6d634b8..f177fa1f1 100644 --- a/src/backend/langflow/components/embeddings/OpenAIEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/OpenAIEmbeddings.py @@ -1,14 +1,14 @@ -from typing import Any, Callable, Dict, List, Optional, Union +from typing import Any, Dict, List, Optional from langchain_openai.embeddings.base import OpenAIEmbeddings -from langflow import CustomComponent -from langflow.field_typing import NestedDict -from pydantic.v1.types import SecretStr + +from langflow.field_typing import Embeddings, NestedDict +from langflow.interface.custom.custom_component import CustomComponent class OpenAIEmbeddingsComponent(CustomComponent): - display_name = "OpenAIEmbeddings" - description = "OpenAI embedding models" + display_name = "OpenAI Embeddings" + description = "Generate embeddings using OpenAI models." def build_config(self): return { @@ -45,12 +45,24 @@ class OpenAIEmbeddingsComponent(CustomComponent): "model": { "display_name": "Model", "advanced": False, - "options": ["text-embedding-3-small", "text-embedding-3-large", "text-embedding-ada-002"], + "options": [ + "text-embedding-3-small", + "text-embedding-3-large", + "text-embedding-ada-002", + ], }, "model_kwargs": {"display_name": "Model Kwargs", "advanced": True}, - "openai_api_base": {"display_name": "OpenAI API Base", "password": True, "advanced": True}, + "openai_api_base": { + "display_name": "OpenAI API Base", + "password": True, + "advanced": True, + }, "openai_api_key": {"display_name": "OpenAI API Key", "password": True}, - "openai_api_type": {"display_name": "OpenAI API Type", "advanced": True, "password": True}, + "openai_api_type": { + "display_name": "OpenAI API Type", + "advanced": True, + "password": True, + }, "openai_api_version": { "display_name": "OpenAI API Version", "advanced": True, @@ -66,25 +78,28 @@ class OpenAIEmbeddingsComponent(CustomComponent): "advanced": True, }, "skip_empty": {"display_name": "Skip Empty", "advanced": True}, - "tiktoken_model_name": {"display_name": "TikToken Model Name"}, - "tikToken_enable": {"display_name": "TikToken Enable", "advanced": True}, + "tiktoken_model_name": { + "display_name": "TikToken Model Name", + "advanced": True, + }, + "tiktoken_enable": {"display_name": "TikToken Enable", "advanced": True}, } def build( self, + openai_api_key: str, default_headers: Optional[Dict[str, str]] = None, default_query: Optional[NestedDict] = {}, allowed_special: List[str] = [], disallowed_special: List[str] = ["all"], chunk_size: int = 1000, client: Optional[Any] = None, - deployment: str = "text-embedding-3-small", + deployment: str = "text-embedding-ada-002", embedding_ctx_length: int = 8191, max_retries: int = 6, - model: str = "text-embedding-3-small", + model: str = "text-embedding-ada-002", model_kwargs: NestedDict = {}, openai_api_base: Optional[str] = None, - openai_api_key: Optional[str] = "", openai_api_type: Optional[str] = None, openai_api_version: Optional[str] = None, openai_organization: Optional[str] = None, @@ -94,13 +109,11 @@ class OpenAIEmbeddingsComponent(CustomComponent): skip_empty: bool = False, tiktoken_enable: bool = True, tiktoken_model_name: Optional[str] = None, - ) -> Union[OpenAIEmbeddings, Callable]: + ) -> Embeddings: # This is to avoid errors with Vector Stores (e.g Chroma) if disallowed_special == ["all"]: disallowed_special = "all" # type: ignore - api_key = SecretStr(openai_api_key) if openai_api_key else None - return OpenAIEmbeddings( tiktoken_enabled=tiktoken_enable, default_headers=default_headers, @@ -115,7 +128,7 @@ class OpenAIEmbeddingsComponent(CustomComponent): model=model, model_kwargs=model_kwargs, base_url=openai_api_base, - api_key=api_key, + api_key=openai_api_key, openai_api_type=openai_api_type, api_version=openai_api_version, organization=openai_organization, diff --git a/src/backend/langflow/components/embeddings/VertexAIEmbeddings.py b/src/backend/base/langflow/components/embeddings/VertexAIEmbeddings.py similarity index 54% rename from src/backend/langflow/components/embeddings/VertexAIEmbeddings.py rename to src/backend/base/langflow/components/embeddings/VertexAIEmbeddings.py index 053fd6c7f..a88b403c3 100644 --- a/src/backend/langflow/components/embeddings/VertexAIEmbeddings.py +++ b/src/backend/base/langflow/components/embeddings/VertexAIEmbeddings.py @@ -1,25 +1,55 @@ -from langflow import CustomComponent -from langchain.embeddings import VertexAIEmbeddings -from typing import Optional, List +from typing import List, Optional + +from langchain_community.embeddings import VertexAIEmbeddings + +from langflow.interface.custom.custom_component import CustomComponent class VertexAIEmbeddingsComponent(CustomComponent): - display_name = "VertexAIEmbeddings" - description = "Google Cloud VertexAI embedding models." + display_name = "VertexAI Embeddings" + description = "Generate embeddings using Google Cloud VertexAI models." def build_config(self): return { - "credentials": {"display_name": "Credentials", "value": "", "file_types": [".json"], "field_type": "file"}, - "instance": {"display_name": "instance", "advanced": True, "field_type": "dict"}, - "location": {"display_name": "Location", "value": "us-central1", "advanced": True}, + "credentials": { + "display_name": "Credentials", + "value": "", + "file_types": [".json"], + "field_type": "file", + }, + "instance": { + "display_name": "instance", + "advanced": True, + "field_type": "dict", + }, + "location": { + "display_name": "Location", + "value": "us-central1", + "advanced": True, + }, "max_output_tokens": {"display_name": "Max Output Tokens", "value": 128}, - "max_retries": {"display_name": "Max Retries", "value": 6, "advanced": True}, - "model_name": {"display_name": "Model Name", "value": "textembedding-gecko"}, + "max_retries": { + "display_name": "Max Retries", + "value": 6, + "advanced": True, + }, + "model_name": { + "display_name": "Model Name", + "value": "textembedding-gecko", + }, "n": {"display_name": "N", "value": 1, "advanced": True}, "project": {"display_name": "Project", "advanced": True}, - "request_parallelism": {"display_name": "Request Parallelism", "value": 5, "advanced": True}, + "request_parallelism": { + "display_name": "Request Parallelism", + "value": 5, + "advanced": True, + }, "stop": {"display_name": "Stop", "advanced": True}, - "streaming": {"display_name": "Streaming", "value": False, "advanced": True}, + "streaming": { + "display_name": "Streaming", + "value": False, + "advanced": True, + }, "temperature": {"display_name": "Temperature", "value": 0.0}, "top_k": {"display_name": "Top K", "value": 40, "advanced": True}, "top_p": {"display_name": "Top P", "value": 0.95, "advanced": True}, diff --git a/src/backend/base/langflow/components/embeddings/__init__.py b/src/backend/base/langflow/components/embeddings/__init__.py new file mode 100644 index 000000000..f2d1bc48e --- /dev/null +++ b/src/backend/base/langflow/components/embeddings/__init__.py @@ -0,0 +1,19 @@ +from .AmazonBedrockEmbeddings import AmazonBedrockEmeddingsComponent +from .AzureOpenAIEmbeddings import AzureOpenAIEmbeddingsComponent +from .CohereEmbeddings import CohereEmbeddingsComponent +from .HuggingFaceEmbeddings import HuggingFaceEmbeddingsComponent +from .HuggingFaceInferenceAPIEmbeddings import HuggingFaceInferenceAPIEmbeddingsComponent +from .OllamaEmbeddings import OllamaEmbeddingsComponent +from .OpenAIEmbeddings import OpenAIEmbeddingsComponent +from .VertexAIEmbeddings import VertexAIEmbeddingsComponent + +__all__ = [ + "AmazonBedrockEmeddingsComponent", + "AzureOpenAIEmbeddingsComponent", + "CohereEmbeddingsComponent", + "HuggingFaceEmbeddingsComponent", + "HuggingFaceInferenceAPIEmbeddingsComponent", + "OllamaEmbeddingsComponent", + "OpenAIEmbeddingsComponent", + "VertexAIEmbeddingsComponent", +] diff --git a/src/backend/base/langflow/components/experimental/ClearMessageHistory.py b/src/backend/base/langflow/components/experimental/ClearMessageHistory.py new file mode 100644 index 000000000..3c62b7ea3 --- /dev/null +++ b/src/backend/base/langflow/components/experimental/ClearMessageHistory.py @@ -0,0 +1,26 @@ +from langflow.interface.custom.custom_component import CustomComponent +from langflow.memory import delete_messages, get_messages + + +class ClearMessageHistoryComponent(CustomComponent): + display_name = "Clear Message History" + description = "A component to clear the message history." + icon = "ClearMessageHistory" + beta: bool = True + + def build_config(self): + return { + "session_id": { + "display_name": "Session ID", + "info": "The session ID to clear the message history.", + } + } + + def build( + self, + session_id: str, + ) -> None: + delete_messages(session_id=session_id) + records = get_messages(session_id=session_id) + self.records = records + return records diff --git a/src/backend/base/langflow/components/experimental/ExtractDataFromRecord.py b/src/backend/base/langflow/components/experimental/ExtractDataFromRecord.py new file mode 100644 index 000000000..5d816f112 --- /dev/null +++ b/src/backend/base/langflow/components/experimental/ExtractDataFromRecord.py @@ -0,0 +1,45 @@ +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema import Record + + +class ExtractKeyFromRecordComponent(CustomComponent): + display_name = "Extract Key From Record" + description = "Extracts a key from a record." + beta: bool = True + + field_config = { + "record": {"display_name": "Record"}, + "keys": { + "display_name": "Keys", + "info": "The keys to extract from the record.", + "input_types": [], + }, + "silent_error": { + "display_name": "Silent Errors", + "info": "If True, errors will not be raised.", + "advanced": True, + }, + } + + def build(self, record: Record, keys: list[str], silent_error: bool = True) -> Record: + """ + Extracts the keys from a record. + + Args: + record (Record): The record from which to extract the keys. + keys (list[str]): The keys to extract from the record. + silent_error (bool): If True, errors will not be raised. + + Returns: + dict: The extracted keys. + """ + extracted_keys = {} + for key in keys: + try: + extracted_keys[key] = getattr(record, key) + except AttributeError: + if not silent_error: + raise KeyError(f"The key '{key}' does not exist in the record.") + return_record = Record(data=extracted_keys) + self.status = return_record + return return_record diff --git a/src/backend/base/langflow/components/experimental/FlowTool.py b/src/backend/base/langflow/components/experimental/FlowTool.py new file mode 100644 index 000000000..ac48a0f03 --- /dev/null +++ b/src/backend/base/langflow/components/experimental/FlowTool.py @@ -0,0 +1,86 @@ +from typing import Any, List, Optional + +from langchain_core.tools import StructuredTool +from loguru import logger + +from langflow.custom import CustomComponent +from langflow.field_typing import Tool +from langflow.graph.graph.base import Graph +from langflow.helpers.flow import build_function_and_schema +from langflow.schema.dotdict import dotdict +from langflow.schema.schema import Record + + +class FlowToolComponent(CustomComponent): + display_name = "Flow as Tool" + description = "Construct a Tool from a function that runs the loaded Flow." + field_order = ["flow_name", "name", "description", "return_direct"] + + def get_flow_names(self) -> List[str]: + flow_records = self.list_flows() + return [flow_record.data["name"] for flow_record in flow_records] + + def get_flow(self, flow_name: str) -> Optional[Record]: + """ + Retrieves a flow by its name. + + Args: + flow_name (str): The name of the flow to retrieve. + + Returns: + Optional[Text]: The flow record if found, None otherwise. + """ + flow_records = self.list_flows() + for flow_record in flow_records: + if flow_record.data["name"] == flow_name: + return flow_record + return None + + def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): + logger.debug(f"Updating build config with field value {field_value} and field name {field_name}") + if field_name == "flow_name": + build_config["flow_name"]["options"] = self.get_flow_names() + + return build_config + + def build_config(self): + return { + "flow_name": { + "display_name": "Flow Name", + "info": "The name of the flow to run.", + "options": [], + "real_time_refresh": True, + "refresh_button": True, + }, + "name": { + "display_name": "Name", + "description": "The name of the tool.", + }, + "description": { + "display_name": "Description", + "description": "The description of the tool.", + }, + "return_direct": { + "display_name": "Return Direct", + "description": "Return the result directly from the Tool.", + "advanced": True, + }, + } + + async def build(self, flow_name: str, name: str, description: str, return_direct: bool = False) -> Tool: + flow_record = self.get_flow(flow_name) + if not flow_record: + raise ValueError("Flow not found.") + graph = Graph.from_payload(flow_record.data["data"]) + dynamic_flow_function, schema = build_function_and_schema(flow_record, graph) + tool = StructuredTool.from_function( + coroutine=dynamic_flow_function, + name=name, + description=description, + return_direct=return_direct, + args_schema=schema, + ) + description_repr = repr(tool.description).strip("'") + args_str = "\n".join([f"- {arg_name}: {arg_data['description']}" for arg_name, arg_data in tool.args.items()]) + self.status = f"{description_repr}\nArguments:\n{args_str}" + return tool # type: ignore diff --git a/src/backend/base/langflow/components/experimental/ListFlows.py b/src/backend/base/langflow/components/experimental/ListFlows.py new file mode 100644 index 000000000..c7b421d15 --- /dev/null +++ b/src/backend/base/langflow/components/experimental/ListFlows.py @@ -0,0 +1,21 @@ +from typing import List + +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema import Record + + +class ListFlowsComponent(CustomComponent): + display_name = "List Flows" + description = "A component to list all available flows." + icon = "ListFlows" + beta: bool = True + + def build_config(self): + return {} + + def build( + self, + ) -> List[Record]: + flows = self.list_flows() + self.status = flows + return flows diff --git a/src/backend/base/langflow/components/experimental/Listen.py b/src/backend/base/langflow/components/experimental/Listen.py new file mode 100644 index 000000000..cab979f70 --- /dev/null +++ b/src/backend/base/langflow/components/experimental/Listen.py @@ -0,0 +1,21 @@ +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema import Record + + +class ListenComponent(CustomComponent): + display_name = "Listen" + description = "A component to listen for a notification." + beta: bool = True + + def build_config(self): + return { + "name": { + "display_name": "Name", + "info": "The name of the notification to listen for.", + }, + } + + def build(self, name: str) -> Record: + state = self.get_state(name) + self.status = state + return state diff --git a/src/backend/base/langflow/components/experimental/MergeRecords.py b/src/backend/base/langflow/components/experimental/MergeRecords.py new file mode 100644 index 000000000..60e5ffe20 --- /dev/null +++ b/src/backend/base/langflow/components/experimental/MergeRecords.py @@ -0,0 +1,36 @@ +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema import Record + + +class MergeRecordsComponent(CustomComponent): + display_name = "Merge Records" + description = "Merges records." + beta: bool = True + + field_config = { + "records": {"display_name": "Records"}, + } + + def build(self, records: list[Record]) -> Record: + if not records: + return Record() + if len(records) == 1: + return records[0] + merged_record = Record() + for record in records: + if merged_record is None: + merged_record = record + else: + merged_record += record + self.status = merged_record + return merged_record + + +if __name__ == "__main__": + records = [ + Record(data={"key1": "value1"}), + Record(data={"key2": "value2"}), + ] + component = MergeRecordsComponent() + result = component.build(records) + print(result) diff --git a/src/backend/base/langflow/components/experimental/Notify.py b/src/backend/base/langflow/components/experimental/Notify.py new file mode 100644 index 000000000..9af7f8ec6 --- /dev/null +++ b/src/backend/base/langflow/components/experimental/Notify.py @@ -0,0 +1,41 @@ +from typing import Optional + +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema import Record + + +class NotifyComponent(CustomComponent): + display_name = "Notify" + description = "A component to generate a notification to Get Notified component." + icon = "Notify" + beta: bool = True + + def build_config(self): + return { + "name": {"display_name": "Name", "info": "The name of the notification."}, + "record": {"display_name": "Record", "info": "The record to store."}, + "append": { + "display_name": "Append", + "info": "If True, the record will be appended to the notification.", + }, + } + + def build(self, name: str, record: Optional[Record] = None, append: bool = False) -> Record: + if record and not isinstance(record, Record): + if isinstance(record, str): + record = Record(text=record) + elif isinstance(record, dict): + record = Record(data=record) + else: + record = Record(text=str(record)) + elif not record: + record = Record(text="") + if record: + if append: + self.append_state(name, record) + else: + self.update_state(name, record) + else: + self.status = "No record provided." + self.status = record + return record diff --git a/src/backend/base/langflow/components/experimental/PythonFunction.py b/src/backend/base/langflow/components/experimental/PythonFunction.py new file mode 100644 index 000000000..28e902abe --- /dev/null +++ b/src/backend/base/langflow/components/experimental/PythonFunction.py @@ -0,0 +1,25 @@ +from typing import Callable + +from langflow.field_typing import Code +from langflow.interface.custom.custom_component import CustomComponent +from langflow.interface.custom.utils import get_function + + +class PythonFunctionComponent(CustomComponent): + display_name = "Python Function" + description = "Define a Python function." + icon = "Python" + + def build_config(self): + return { + "function_code": { + "display_name": "Code", + "info": "The code for the function.", + "show": True, + }, + } + + def build(self, function_code: Code) -> Callable: + self.status = function_code + func = get_function(function_code) + return func diff --git a/src/backend/base/langflow/components/experimental/RunFlow.py b/src/backend/base/langflow/components/experimental/RunFlow.py new file mode 100644 index 000000000..d3769de7a --- /dev/null +++ b/src/backend/base/langflow/components/experimental/RunFlow.py @@ -0,0 +1,66 @@ +from typing import Any, List, Optional + +from langflow.custom import CustomComponent +from langflow.field_typing import NestedDict, Text +from langflow.graph.schema import ResultData +from langflow.schema import Record, dotdict + + +class RunFlowComponent(CustomComponent): + display_name = "Run Flow" + description = "A component to run a flow." + beta: bool = True + + def get_flow_names(self) -> List[str]: + flow_records = self.list_flows() + return [flow_record.data["name"] for flow_record in flow_records] + + def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): + if field_name == "flow_name": + build_config["flow_name"]["options"] = self.get_flow_names() + + return build_config + + def build_config(self): + return { + "input_value": { + "display_name": "Input Value", + "multiline": True, + }, + "flow_name": { + "display_name": "Flow Name", + "info": "The name of the flow to run.", + "options": [], + "refresh_button": True, + }, + "tweaks": { + "display_name": "Tweaks", + "info": "Tweaks to apply to the flow.", + }, + } + + def build_records_from_result_data(self, result_data: ResultData) -> List[Record]: + messages = result_data.messages + if not messages: + return [] + records = [] + for message in messages: + message_dict = message if isinstance(message, dict) else message.model_dump() + record = Record(text=message_dict.get("text", ""), data={"result": result_data}) + records.append(record) + return records + + async def build(self, input_value: Text, flow_name: str, tweaks: NestedDict) -> List[Record]: + results: List[Optional[ResultData]] = await self.run_flow( + inputs={"input_value": input_value}, flow_name=flow_name, tweaks=tweaks + ) + if isinstance(results, list): + records = [] + for result in results: + if result: + records.extend(self.build_records_from_result_data(result)) + else: + records = self.build_records_from_result_data(results) + + self.status = records + return records diff --git a/src/backend/base/langflow/components/experimental/RunnableExecutor.py b/src/backend/base/langflow/components/experimental/RunnableExecutor.py new file mode 100644 index 000000000..0a3593a66 --- /dev/null +++ b/src/backend/base/langflow/components/experimental/RunnableExecutor.py @@ -0,0 +1,122 @@ +from langchain_core.runnables import Runnable + +from langflow.field_typing import Text +from langflow.interface.custom.custom_component import CustomComponent + + +class RunnableExecComponent(CustomComponent): + description = "Execute a runnable. It will try to guess the input and output keys." + display_name = "Runnable Executor" + beta: bool = True + field_order = [ + "input_key", + "output_key", + "input_value", + "runnable", + ] + + def build_config(self): + return { + "input_key": { + "display_name": "Input Key", + "info": "The key to use for the input.", + "advanced": True, + }, + "input_value": { + "display_name": "Inputs", + "info": "The inputs to pass to the runnable.", + }, + "runnable": { + "display_name": "Runnable", + "info": "The runnable to execute.", + "input_types": ["Chain", "AgentExecutor", "Agent", "Runnable"], + }, + "output_key": { + "display_name": "Output Key", + "info": "The key to use for the output.", + "advanced": True, + }, + } + + def get_output(self, result, input_key, output_key): + """ + Retrieves the output value from the given result dictionary based on the specified input and output keys. + + Args: + result (dict): The result dictionary containing the output value. + input_key (str): The key used to retrieve the input value from the result dictionary. + output_key (str): The key used to retrieve the output value from the result dictionary. + + Returns: + tuple: A tuple containing the output value and the status message. + + """ + possible_output_keys = ["answer", "response", "output", "result", "text"] + status = "" + result_value = None + + if output_key in result: + result_value = result.get(output_key) + elif len(result) == 2 and input_key in result: + # get the other key from the result dict + other_key = [k for k in result if k != input_key][0] + if other_key == output_key: + result_value = result.get(output_key) + else: + status += f"Warning: The output key is not '{output_key}'. The output key is '{other_key}'." + result_value = result.get(other_key) + elif len(result) == 1: + result_value = list(result.values())[0] + elif any(k in result for k in possible_output_keys): + for key in possible_output_keys: + if key in result: + result_value = result.get(key) + status += f"Output key: '{key}'." + break + if result_value is None: + result_value = result + status += f"Warning: The output key is not '{output_key}'." + else: + result_value = result + status += f"Warning: The output key is not '{output_key}'." + + return result_value, status + + def get_input_dict(self, runnable, input_key, input_value): + """ + Returns a dictionary containing the input key-value pair for the given runnable. + + Args: + runnable: The runnable object. + input_key: The key for the input value. + input_value: The value for the input key. + + Returns: + input_dict: A dictionary containing the input key-value pair. + status: A status message indicating if the input key is not in the runnable's input keys. + """ + input_dict = {} + status = "" + if hasattr(runnable, "input_keys"): + # Check if input_key is in the runnable's input_keys + if input_key in runnable.input_keys: + input_dict[input_key] = input_value + else: + input_dict = {k: input_value for k in runnable.input_keys} + status = f"Warning: The input key is not '{input_key}'. The input key is '{runnable.input_keys}'." + return input_dict, status + + def build( + self, + input_value: Text, + runnable: Runnable, + input_key: str = "input", + output_key: str = "output", + ) -> Text: + input_dict, status = self.get_input_dict(runnable, input_key, input_value) + result = runnable.invoke(input_dict) + result_value, _status = self.get_output(result, input_key, output_key) + status += _status + status += f"\n\nOutput: {result_value}\n\nRaw Output: {result}" + self.status = status + return result_value diff --git a/src/backend/base/langflow/components/experimental/SQLExecutor.py b/src/backend/base/langflow/components/experimental/SQLExecutor.py new file mode 100644 index 000000000..2289514a6 --- /dev/null +++ b/src/backend/base/langflow/components/experimental/SQLExecutor.py @@ -0,0 +1,69 @@ +from langchain_community.tools.sql_database.tool import QuerySQLDataBaseTool +from langchain_experimental.sql.base import SQLDatabase + +from langflow.field_typing import Text +from langflow.interface.custom.custom_component import CustomComponent + + +class SQLExecutorComponent(CustomComponent): + display_name = "SQL Executor" + description = "Execute SQL query." + beta: bool = True + + def build_config(self): + return { + "database_url": { + "display_name": "Database URL", + "info": "The URL of the database.", + }, + "include_columns": { + "display_name": "Include Columns", + "info": "Include columns in the result.", + }, + "passthrough": { + "display_name": "Passthrough", + "info": "If an error occurs, return the query instead of raising an exception.", + }, + "add_error": { + "display_name": "Add Error", + "info": "Add the error to the result.", + }, + } + + def clean_up_uri(self, uri: str) -> str: + if uri.startswith("postgresql://"): + uri = uri.replace("postgresql://", "postgres://") + return uri.strip() + + def build( + self, + query: str, + database_url: str, + include_columns: bool = False, + passthrough: bool = False, + add_error: bool = False, + ) -> Text: + error = None + try: + database = SQLDatabase.from_uri(database_url) + except Exception as e: + raise ValueError(f"An error occurred while connecting to the database: {e}") + try: + tool = QuerySQLDataBaseTool(db=database) + result = tool.run(query, include_columns=include_columns) + self.status = result + except Exception as e: + result = Text(e) + self.status = result + if not passthrough: + raise e + error = repr(e) + + if add_error and error is not None: + result = f"{result}\n\nError: {error}\n\nQuery: {query}" + elif error is not None: + # Then we won't add the error to the result + # but since we are in passthrough mode, we will return the query + result = query + + return result diff --git a/src/backend/base/langflow/components/experimental/SubFlow.py b/src/backend/base/langflow/components/experimental/SubFlow.py new file mode 100644 index 000000000..72e07c8e8 --- /dev/null +++ b/src/backend/base/langflow/components/experimental/SubFlow.py @@ -0,0 +1,119 @@ +from typing import Any, List, Optional + +from loguru import logger + +from langflow.custom import CustomComponent +from langflow.graph.graph.base import Graph +from langflow.graph.schema import ResultData, RunOutputs +from langflow.graph.vertex.base import Vertex +from langflow.helpers.flow import get_flow_inputs +from langflow.schema import Record +from langflow.schema.dotdict import dotdict +from langflow.template.field.base import TemplateField + + +class SubFlowComponent(CustomComponent): + display_name = "Sub Flow" + description = "Dynamically Generates a Component from a Flow. The output is a list of records with keys 'result' and 'message'." + beta: bool = True + field_order = ["flow_name"] + + def get_flow_names(self) -> List[str]: + flow_records = self.list_flows() + return [flow_record.data["name"] for flow_record in flow_records] + + def get_flow(self, flow_name: str) -> Optional[Record]: + flow_records = self.list_flows() + for flow_record in flow_records: + if flow_record.data["name"] == flow_name: + return flow_record + return None + + def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): + logger.debug(f"Updating build config with field value {field_value} and field name {field_name}") + if field_name == "flow_name": + build_config["flow_name"]["options"] = self.get_flow_names() + # Clean up the build config + for key in list(build_config.keys()): + if key not in self.field_order + ["code", "_type"]: + del build_config[key] + if field_value is not None and field_name == "flow_name": + try: + flow_record = self.get_flow(field_value) + if not flow_record: + raise ValueError(f"Flow {field_value} not found.") + graph = Graph.from_payload(flow_record.data["data"]) + # Get all inputs from the graph + inputs = get_flow_inputs(graph) + # Add inputs to the build config + build_config = self.add_inputs_to_build_config(inputs, build_config) + except Exception as e: + logger.error(f"Error getting flow {field_value}: {str(e)}") + + return build_config + + def add_inputs_to_build_config(self, inputs: List[Vertex], build_config: dotdict): + new_fields: list[TemplateField] = [] + for vertex in inputs: + field = TemplateField( + display_name=vertex.display_name, + name=vertex.id, + info=vertex.description, + field_type="str", + default=None, + ) + new_fields.append(field) + logger.debug(new_fields) + for field in new_fields: + build_config[field.name] = field.to_dict() + return build_config + + def build_config(self): + return { + "input_value": { + "display_name": "Input Value", + "multiline": True, + }, + "flow_name": { + "display_name": "Flow Name", + "info": "The name of the flow to run.", + "options": [], + "real_time_refresh": True, + "refresh_button": True, + }, + "tweaks": { + "display_name": "Tweaks", + "info": "Tweaks to apply to the flow.", + }, + } + + def build_records_from_result_data(self, result_data: ResultData) -> List[Record]: + messages = result_data.messages + if not messages: + return [] + records = [] + for message in messages: + message_dict = message if isinstance(message, dict) else message.model_dump() + record = Record(data={"result": result_data.model_dump(), "message": message_dict.get("message", "")}) + records.append(record) + return records + + async def build(self, flow_name: str, **kwargs) -> List[Record]: + tweaks = {key: {"input_value": value} for key, value in kwargs.items()} + run_outputs: List[Optional[RunOutputs]] = await self.run_flow( + tweaks=tweaks, + flow_name=flow_name, + ) + if not run_outputs: + return [] + run_output = run_outputs[0] + + records = [] + if run_output is not None: + for output in run_output.outputs: + if output: + records.extend(self.build_records_from_result_data(output)) + + self.status = records + logger.debug(records) + return records diff --git a/src/backend/base/langflow/components/experimental/__init__.py b/src/backend/base/langflow/components/experimental/__init__.py new file mode 100644 index 000000000..54ca60a3b --- /dev/null +++ b/src/backend/base/langflow/components/experimental/__init__.py @@ -0,0 +1,28 @@ +from .ClearMessageHistory import ClearMessageHistoryComponent +from .ExtractDataFromRecord import ExtractKeyFromRecordComponent +from .FlowTool import FlowToolComponent +from .ListFlows import ListFlowsComponent +from .Listen import ListenComponent +from .MergeRecords import MergeRecordsComponent +from .Notify import NotifyComponent +from .PythonFunction import PythonFunctionComponent +from .RunFlow import RunFlowComponent +from .RunnableExecutor import RunnableExecComponent +from .SQLExecutor import SQLExecutorComponent +from .SubFlow import SubFlowComponent + +__all__ = [ + "ClearMessageHistoryComponent", + "ExtractKeyFromRecordComponent", + "FlowToolComponent", + "ListFlowsComponent", + "ListenComponent", + "MergeRecordsComponent", + "NotifyComponent", + "PythonFunctionComponent", + "RunFlowComponent", + "RunnableExecComponent", + "SQLExecutorComponent", + "SubFlowComponent", + "PythonFunctionComponent", +] diff --git a/src/backend/base/langflow/components/helpers/CombineText.py b/src/backend/base/langflow/components/helpers/CombineText.py new file mode 100644 index 000000000..fcd23c188 --- /dev/null +++ b/src/backend/base/langflow/components/helpers/CombineText.py @@ -0,0 +1,29 @@ +from langflow.interface.custom.custom_component import CustomComponent +from langflow.field_typing import Text + + +class CombineTextComponent(CustomComponent): + display_name = "Combine Text" + description = "Concatenate two text sources into a single text chunk using a specified delimiter." + icon = "merge" + + def build_config(self): + return { + "text1": { + "display_name": "First Text", + "info": "The first text input to concatenate.", + }, + "text2": { + "display_name": "Second Text", + "info": "The second text input to concatenate.", + }, + "delimiter": { + "display_name": "Delimiter", + "info": "A string used to separate the two text inputs. Defaults to a whitespace.", + }, + } + + def build(self, text1: str, text2: str, delimiter: str = " ") -> Text: + combined = delimiter.join([text1, text2]) + self.status = combined + return combined diff --git a/src/backend/base/langflow/components/helpers/CreateRecord.py b/src/backend/base/langflow/components/helpers/CreateRecord.py new file mode 100644 index 000000000..a4a02e76b --- /dev/null +++ b/src/backend/base/langflow/components/helpers/CreateRecord.py @@ -0,0 +1,81 @@ +from typing import Any + +from langflow.custom import CustomComponent +from langflow.field_typing.range_spec import RangeSpec +from langflow.schema import Record +from langflow.schema.dotdict import dotdict +from langflow.template.field.base import TemplateField + + +class CreateRecordComponent(CustomComponent): + display_name = "Create Record" + description = "Dynamically create a Record with a specified number of fields." + field_order = ["number_of_fields", "text_key"] + + def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None): + if field_name == "number_of_fields": + default_keys = ["code", "_type", "number_of_fields", "text_key"] + try: + field_value_int = int(field_value) + except TypeError: + return build_config + existing_fields = {} + if field_value_int > 15: + build_config["number_of_fields"]["value"] = 15 + raise ValueError("Number of fields cannot exceed 15. Try using a Component to combine two Records.") + if len(build_config) > len(default_keys) + field_value_int: + # back up the existing template fields + for key in build_config.copy(): + if key not in default_keys: + existing_fields[key] = build_config.pop(key) + + for i in range(1, field_value_int + 1): + key = f"field_{i}_key" + if key in existing_fields: + field = existing_fields[key] + build_config[key] = field + else: + field = TemplateField( + display_name=f"Field {i}", + name=key, + info=f"Key for field {i}.", + field_type="dict", + input_types=["Text", "Record"], + ) + build_config[field.name] = field.to_dict() + + build_config["number_of_fields"]["value"] = field_value_int + return build_config + + def build_config(self): + return { + "number_of_fields": { + "display_name": "Number of Fields", + "info": "Number of fields to be added to the record.", + "real_time_refresh": True, + "rangeSpec": RangeSpec(min=1, max=15, step=1, step_type="int"), + }, + "text_key": { + "display_name": "Text Key", + "info": "Key to be used as text.", + "advanced": True, + }, + } + + def build( + self, + number_of_fields: int = 0, + text_key: str = "text", + **kwargs, + ) -> Record: + data = {} + for value_dict in kwargs.values(): + if isinstance(value_dict, dict): + # Check if the value of the value_dict is a Record + value_dict = { + key: value.get_text() if isinstance(value, Record) else value for key, value in value_dict.items() + } + data.update(value_dict) + return_record = Record(data=data, text_key=text_key) + self.status = return_record + return return_record diff --git a/src/backend/base/langflow/components/helpers/CustomComponent.py b/src/backend/base/langflow/components/helpers/CustomComponent.py new file mode 100644 index 000000000..ce3d8c62c --- /dev/null +++ b/src/backend/base/langflow/components/helpers/CustomComponent.py @@ -0,0 +1,16 @@ +# from langflow.field_typing import Data +from langflow.schema import Record +from langflow.interface.custom.custom_component import CustomComponent + + +class Component(CustomComponent): + display_name = "Custom Component" + description = "Use as a template to create your own component." + documentation: str = "http://docs.langflow.org/components/custom" + icon = "custom_components" + + def build_config(self): + return {"param": {"display_name": "Parameter"}} + + def build(self, param: str) -> Record: + return Record(data=param) diff --git a/src/backend/base/langflow/components/helpers/DocumentToRecord.py b/src/backend/base/langflow/components/helpers/DocumentToRecord.py new file mode 100644 index 000000000..362c0a9c1 --- /dev/null +++ b/src/backend/base/langflow/components/helpers/DocumentToRecord.py @@ -0,0 +1,22 @@ +from typing import List + +from langchain_core.documents import Document + +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema import Record + + +class DocumentToRecordComponent(CustomComponent): + display_name = "Documents To Records" + description = "Convert LangChain Documents into Records." + + field_config = { + "documents": {"display_name": "Documents"}, + } + + def build(self, documents: List[Document]) -> List[Record]: + if isinstance(documents, Document): + documents = [documents] + records = [Record.from_document(document) for document in documents] + self.status = records + return records diff --git a/src/backend/base/langflow/components/helpers/IDGenerator.py b/src/backend/base/langflow/components/helpers/IDGenerator.py new file mode 100644 index 000000000..5778c4c22 --- /dev/null +++ b/src/backend/base/langflow/components/helpers/IDGenerator.py @@ -0,0 +1,31 @@ +import uuid +from typing import Any, Optional + +from langflow.interface.custom.custom_component import CustomComponent + + +class UUIDGeneratorComponent(CustomComponent): + documentation: str = "http://docs.langflow.org/components/custom" + display_name = "ID Generator" + description = "Generates a unique ID." + + def update_build_config( + self, + build_config: dict, + field_value: Any, + field_name: Optional[str] = None, + ): + if field_name == "unique_id": + build_config[field_name]["value"] = str(uuid.uuid4()) + return build_config + + def build_config(self): + return { + "unique_id": { + "display_name": "Value", + "real_time_refresh": True, + } + } + + def build(self, unique_id: str) -> str: + return unique_id diff --git a/src/backend/base/langflow/components/helpers/MemoryComponent.py b/src/backend/base/langflow/components/helpers/MemoryComponent.py new file mode 100644 index 000000000..670954b25 --- /dev/null +++ b/src/backend/base/langflow/components/helpers/MemoryComponent.py @@ -0,0 +1,66 @@ +from typing import Optional + +from langflow.field_typing import Text +from langflow.helpers.record import records_to_text +from langflow.interface.custom.custom_component import CustomComponent +from langflow.memory import get_messages + + +class MemoryComponent(CustomComponent): + display_name = "Chat Memory" + description = "Retrieves stored chat messages given a specific Session ID." + beta: bool = True + icon = "history" + + def build_config(self): + return { + "sender": { + "options": ["Machine", "User", "Machine and User"], + "display_name": "Sender Type", + }, + "sender_name": {"display_name": "Sender Name", "advanced": True}, + "n_messages": { + "display_name": "Number of Messages", + "info": "Number of messages to retrieve.", + }, + "session_id": { + "display_name": "Session ID", + "info": "Session ID of the chat history.", + "input_types": ["Text"], + }, + "order": { + "options": ["Ascending", "Descending"], + "display_name": "Order", + "info": "Order of the messages.", + "advanced": True, + }, + "record_template": { + "display_name": "Record Template", + "multiline": True, + "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.", + "advanced": True, + }, + } + + def build( + self, + sender: Optional[str] = "Machine and User", + sender_name: Optional[str] = None, + session_id: Optional[str] = None, + n_messages: int = 5, + order: Optional[str] = "Descending", + record_template: Optional[str] = "{sender_name}: {text}", + ) -> Text: + order = "DESC" if order == "Descending" else "ASC" + if sender == "Machine and User": + sender = None + messages = get_messages( + sender=sender, + sender_name=sender_name, + session_id=session_id, + limit=n_messages, + order=order, + ) + messages_str = records_to_text(template=record_template, records=messages) + self.status = messages_str + return messages_str diff --git a/src/backend/base/langflow/components/helpers/MessageHistory.py b/src/backend/base/langflow/components/helpers/MessageHistory.py new file mode 100644 index 000000000..0f208e6eb --- /dev/null +++ b/src/backend/base/langflow/components/helpers/MessageHistory.py @@ -0,0 +1,56 @@ +from typing import List, Optional + +from langflow.interface.custom.custom_component import CustomComponent +from langflow.memory import get_messages +from langflow.schema import Record + + +class MessageHistoryComponent(CustomComponent): + display_name = "Message History" + description = "Retrieves stored chat messages given a specific Session ID." + beta: bool = True + + def build_config(self): + return { + "sender": { + "options": ["Machine", "User", "Machine and User"], + "display_name": "Sender Type", + }, + "sender_name": {"display_name": "Sender Name", "advanced": True}, + "n_messages": { + "display_name": "Number of Messages", + "info": "Number of messages to retrieve.", + }, + "session_id": { + "display_name": "Session ID", + "info": "Session ID of the chat history.", + "input_types": ["Text"], + }, + "order": { + "options": ["Ascending", "Descending"], + "display_name": "Order", + "info": "Order of the messages.", + "advanced": True, + }, + } + + def build( + self, + sender: Optional[str] = "Machine and User", + sender_name: Optional[str] = None, + session_id: Optional[str] = None, + n_messages: int = 5, + order: Optional[str] = "Descending", + ) -> List[Record]: + order = "DESC" if order == "Descending" else "ASC" + if sender == "Machine and User": + sender = None + messages = get_messages( + sender=sender, + sender_name=sender_name, + session_id=session_id, + limit=n_messages, + order=order, + ) + self.status = messages + return messages diff --git a/src/backend/base/langflow/components/helpers/RecordsToText.py b/src/backend/base/langflow/components/helpers/RecordsToText.py new file mode 100644 index 000000000..8f4fed311 --- /dev/null +++ b/src/backend/base/langflow/components/helpers/RecordsToText.py @@ -0,0 +1,35 @@ +from langflow.field_typing import Text +from langflow.helpers.record import records_to_text +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema import Record + + +class RecordsToTextComponent(CustomComponent): + display_name = "Records To Text" + description = "Convert Records into plain text following a specified template." + + def build_config(self): + return { + "records": { + "display_name": "Records", + "info": "The records to convert to text.", + }, + "template": { + "display_name": "Template", + "info": "The template to use for formatting the records. It can contain the keys {text}, {data} or any other key in the Record.", + }, + } + + def build( + self, + records: list[Record], + template: str = "Text: {text}\nData: {data}", + ) -> Text: + if not records: + return "" + if isinstance(records, Record): + records = [records] + + result_string = records_to_text(template, records) + self.status = result_string + return result_string diff --git a/src/backend/base/langflow/components/helpers/SplitText.py b/src/backend/base/langflow/components/helpers/SplitText.py new file mode 100644 index 000000000..a141bbddc --- /dev/null +++ b/src/backend/base/langflow/components/helpers/SplitText.py @@ -0,0 +1,85 @@ +from typing import Optional + +from langchain.text_splitter import CharacterTextSplitter, RecursiveCharacterTextSplitter +from langchain_core.documents import Document + +from langflow.field_typing import Text +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema import Record +from langflow.utils.util import unescape_string + + +class SplitTextComponent(CustomComponent): + display_name: str = "Split Text" + description: str = "Split text into chunks of a specified length." + + def build_config(self): + return { + "inputs": { + "display_name": "Inputs", + "info": "Texts to split.", + "input_types": ["Record", "Text"], + }, + "separators": { + "display_name": "Separators", + "info": 'The characters to split on. Defaults to [" "].', + "is_list": True, + }, + "chunk_size": { + "display_name": "Max Chunk Size", + "info": "The maximum length (in number of characters) of each chunk.", + "field_type": "int", + "value": 1000, + }, + "chunk_overlap": { + "display_name": "Chunk Overlap", + "info": "The amount of character overlap between chunks.", + "field_type": "int", + "value": 200, + }, + "recursive": { + "display_name": "Recursive", + }, + "code": {"show": False}, + } + + def build( + self, + inputs: list[Text], + separators: Optional[list[str]] = [" "], + chunk_size: Optional[int] = 1000, + chunk_overlap: Optional[int] = 200, + recursive: bool = False, + ) -> list[Record]: + separators = [unescape_string(x) for x in separators] + + # Make sure chunk_size and chunk_overlap are ints + if isinstance(chunk_size, str): + chunk_size = int(chunk_size) + if isinstance(chunk_overlap, str): + chunk_overlap = int(chunk_overlap) + + if recursive: + splitter = RecursiveCharacterTextSplitter( + separators=separators, + chunk_size=chunk_size, + chunk_overlap=chunk_overlap, + ) + + else: + splitter = CharacterTextSplitter( + separator=separators[0], + chunk_size=chunk_size, + chunk_overlap=chunk_overlap, + ) + + documents = [] + for _input in inputs: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(Document(page_content=_input)) + + records = self.to_records(splitter.split_documents(documents)) + self.status = records + return records diff --git a/src/backend/base/langflow/components/helpers/UpdateRecord.py b/src/backend/base/langflow/components/helpers/UpdateRecord.py new file mode 100644 index 000000000..9f165e146 --- /dev/null +++ b/src/backend/base/langflow/components/helpers/UpdateRecord.py @@ -0,0 +1,39 @@ +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema import Record + + +class UpdateRecordComponent(CustomComponent): + display_name = "Update Record" + description = "Update Record with text-based key/value pairs, similar to updating a Python dictionary." + + def build_config(self): + return { + "record": { + "display_name": "Record", + "info": "The record to update.", + }, + "new_data": { + "display_name": "New Data", + "info": "The new data to update the record with.", + "input_types": ["Text"], + }, + } + + def build( + self, + record: Record, + new_data: dict, + ) -> Record: + """ + Updates a record with new data. + + Args: + record (Record): The record to update. + new_data (dict): The new data to update the record with. + + Returns: + Record: The updated record. + """ + record.data.update(new_data) + self.status = record + return record diff --git a/src/backend/base/langflow/components/helpers/__init__.py b/src/backend/base/langflow/components/helpers/__init__.py new file mode 100644 index 000000000..af3524d8e --- /dev/null +++ b/src/backend/base/langflow/components/helpers/__init__.py @@ -0,0 +1,17 @@ +from .CreateRecord import CreateRecordComponent +from .CustomComponent import Component +from .DocumentToRecord import DocumentToRecordComponent +from .IDGenerator import UUIDGeneratorComponent +from .MessageHistory import MessageHistoryComponent +from .UpdateRecord import UpdateRecordComponent +from .RecordsToText import RecordsToTextComponent + +__all__ = [ + "Component", + "UpdateRecordComponent", + "DocumentToRecordComponent", + "UUIDGeneratorComponent", + "RecordsToTextComponent", + "CreateRecordComponent", + "MessageHistoryComponent", +] diff --git a/src/backend/base/langflow/components/inputs/ChatInput.py b/src/backend/base/langflow/components/inputs/ChatInput.py new file mode 100644 index 000000000..00eaab98c --- /dev/null +++ b/src/backend/base/langflow/components/inputs/ChatInput.py @@ -0,0 +1,37 @@ +from typing import Optional, Union + +from langflow.base.io.chat import ChatComponent +from langflow.field_typing import Text +from langflow.schema import Record + + +class ChatInput(ChatComponent): + display_name = "Chat Input" + description = "Get chat inputs from the Interaction Panel." + icon = "ChatInput" + + def build_config(self): + build_config = super().build_config() + build_config["input_value"] = { + "input_types": [], + "display_name": "Message", + "multiline": True, + } + + return build_config + + def build( + self, + sender: Optional[str] = "User", + sender_name: Optional[str] = "User", + input_value: Optional[str] = None, + session_id: Optional[str] = None, + return_record: Optional[bool] = False, + ) -> Union[Text, Record]: + return super().build( + sender=sender, + sender_name=sender_name, + input_value=input_value, + session_id=session_id, + return_record=return_record, + ) diff --git a/src/backend/base/langflow/components/inputs/Prompt.py b/src/backend/base/langflow/components/inputs/Prompt.py new file mode 100644 index 000000000..3b44a6d12 --- /dev/null +++ b/src/backend/base/langflow/components/inputs/Prompt.py @@ -0,0 +1,33 @@ +from langchain_core.prompts import PromptTemplate + +from langflow.field_typing import Prompt, TemplateField, Text +from langflow.interface.custom.custom_component import CustomComponent + + +class PromptComponent(CustomComponent): + display_name: str = "Prompt" + description: str = "Create a prompt template with dynamic variables." + icon = "prompts" + + def build_config(self): + return { + "template": TemplateField(display_name="Template"), + "code": TemplateField(advanced=True), + } + + def build( + self, + template: Prompt, + **kwargs, + ) -> Text: + from langflow.base.prompts.utils import dict_values_to_string + + prompt_template = PromptTemplate.from_template(Text(template)) + kwargs = dict_values_to_string(kwargs) + kwargs = {k: "\n".join(v) if isinstance(v, list) else v for k, v in kwargs.items()} + try: + formated_prompt = prompt_template.format(**kwargs) + except Exception as exc: + raise ValueError(f"Error formatting prompt: {exc}") from exc + self.status = f'Prompt:\n"{formated_prompt}"' + return formated_prompt diff --git a/src/backend/base/langflow/components/inputs/TextInput.py b/src/backend/base/langflow/components/inputs/TextInput.py new file mode 100644 index 000000000..5408c03b2 --- /dev/null +++ b/src/backend/base/langflow/components/inputs/TextInput.py @@ -0,0 +1,32 @@ +from typing import Optional + +from langflow.base.io.text import TextComponent +from langflow.field_typing import Text + + +class TextInput(TextComponent): + display_name = "Text Input" + description = "Get text inputs from the Interaction Panel." + icon = "type" + + def build_config(self): + return { + "input_value": { + "display_name": "Value", + "input_types": ["Record", "Text"], + "info": "Text or Record to be passed as input.", + }, + "record_template": { + "display_name": "Record Template", + "multiline": True, + "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.", + "advanced": True, + }, + } + + def build( + self, + input_value: Optional[str] = "", + record_template: Optional[str] = "", + ) -> Text: + return super().build(input_value=input_value, record_template=record_template) diff --git a/src/backend/base/langflow/components/inputs/__init__.py b/src/backend/base/langflow/components/inputs/__init__.py new file mode 100644 index 000000000..4dff479de --- /dev/null +++ b/src/backend/base/langflow/components/inputs/__init__.py @@ -0,0 +1,5 @@ +from .ChatInput import ChatInput +from .Prompt import PromptComponent +from .TextInput import TextInput + +__all__ = ["ChatInput", "PromptComponent", "TextInput"] diff --git a/src/backend/langflow/components/utilities/BingSearchAPIWrapper.py b/src/backend/base/langflow/components/langchain_utilities/BingSearchAPIWrapper.py similarity index 94% rename from src/backend/langflow/components/utilities/BingSearchAPIWrapper.py rename to src/backend/base/langflow/components/langchain_utilities/BingSearchAPIWrapper.py index b9dc4a2ef..23eebee10 100644 --- a/src/backend/langflow/components/utilities/BingSearchAPIWrapper.py +++ b/src/backend/base/langflow/components/langchain_utilities/BingSearchAPIWrapper.py @@ -1,10 +1,10 @@ -from langflow import CustomComponent - # Assuming `BingSearchAPIWrapper` is a class that exists in the context # and has the appropriate methods and attributes. # We need to make sure this class is importable from the context where this code will be running. from langchain_community.utilities.bing_search import BingSearchAPIWrapper +from langflow.interface.custom.custom_component import CustomComponent + class BingSearchAPIWrapperComponent(CustomComponent): display_name = "BingSearchAPIWrapper" diff --git a/src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py b/src/backend/base/langflow/components/langchain_utilities/GoogleSearchAPIWrapper.py similarity index 91% rename from src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py rename to src/backend/base/langflow/components/langchain_utilities/GoogleSearchAPIWrapper.py index 1d7123dc3..3f2f67faf 100644 --- a/src/backend/langflow/components/utilities/GoogleSearchAPIWrapper.py +++ b/src/backend/base/langflow/components/langchain_utilities/GoogleSearchAPIWrapper.py @@ -1,7 +1,8 @@ from typing import Callable, Union from langchain_community.utilities.google_search import GoogleSearchAPIWrapper -from langflow import CustomComponent + +from langflow.interface.custom.custom_component import CustomComponent class GoogleSearchAPIWrapperComponent(CustomComponent): diff --git a/src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py b/src/backend/base/langflow/components/langchain_utilities/GoogleSerperAPIWrapper.py similarity index 83% rename from src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py rename to src/backend/base/langflow/components/langchain_utilities/GoogleSerperAPIWrapper.py index be5efe738..e70e85cf4 100644 --- a/src/backend/langflow/components/utilities/GoogleSerperAPIWrapper.py +++ b/src/backend/base/langflow/components/langchain_utilities/GoogleSerperAPIWrapper.py @@ -3,7 +3,8 @@ from typing import Dict # Assuming the existence of GoogleSerperAPIWrapper class in the serper module # If this class does not exist, you would need to create it or import the appropriate class from another module from langchain_community.utilities.google_serper import GoogleSerperAPIWrapper -from langflow import CustomComponent + +from langflow.interface.custom.custom_component import CustomComponent class GoogleSerperAPIWrapperComponent(CustomComponent): @@ -17,20 +18,23 @@ class GoogleSerperAPIWrapperComponent(CustomComponent): "show": True, "multiline": False, "password": False, - "name": "result_key_for_type", "advanced": False, "dynamic": False, "info": "", "field_type": "dict", "list": False, - "value": {"news": "news", "places": "places", "images": "images", "search": "organic"}, + "value": { + "news": "news", + "places": "places", + "images": "images", + "search": "organic", + }, }, "serper_api_key": { "display_name": "Serper API Key", "show": True, "multiline": False, "password": True, - "name": "serper_api_key", "advanced": False, "dynamic": False, "info": "", diff --git a/src/backend/langflow/components/utilities/JSONDocumentBuilder.py b/src/backend/base/langflow/components/langchain_utilities/JSONDocumentBuilder.py similarity index 95% rename from src/backend/langflow/components/utilities/JSONDocumentBuilder.py rename to src/backend/base/langflow/components/langchain_utilities/JSONDocumentBuilder.py index 33cc73d73..df783470a 100644 --- a/src/backend/langflow/components/utilities/JSONDocumentBuilder.py +++ b/src/backend/base/langflow/components/langchain_utilities/JSONDocumentBuilder.py @@ -12,7 +12,8 @@ # - **Document:** The Document containing the JSON object. from langchain_core.documents import Document -from langflow import CustomComponent + +from langflow.interface.custom.custom_component import CustomComponent from langflow.services.database.models.base import orjson_dumps @@ -20,7 +21,6 @@ class JSONDocumentBuilder(CustomComponent): display_name: str = "JSON Document Builder" description: str = "Build a Document containing a JSON object using a key and another Document page content." output_types: list[str] = ["Document"] - beta = True documentation: str = "https://docs.langflow.org/components/utilities#json-document-builder" field_config = { diff --git a/src/backend/base/langflow/components/langchain_utilities/SQLDatabase.py b/src/backend/base/langflow/components/langchain_utilities/SQLDatabase.py new file mode 100644 index 000000000..7bbbdb870 --- /dev/null +++ b/src/backend/base/langflow/components/langchain_utilities/SQLDatabase.py @@ -0,0 +1,22 @@ +from langchain_experimental.sql.base import SQLDatabase + +from langflow.interface.custom.custom_component import CustomComponent + + +class SQLDatabaseComponent(CustomComponent): + display_name = "SQLDatabase" + description = "SQL Database" + + def build_config(self): + return { + "uri": {"display_name": "URI", "info": "URI to the database."}, + } + + def clean_up_uri(self, uri: str) -> str: + if uri.startswith("postgresql://"): + uri = uri.replace("postgresql://", "postgres://") + return uri.strip() + + def build(self, uri: str) -> SQLDatabase: + uri = self.clean_up_uri(uri) + return SQLDatabase.from_uri(uri) diff --git a/src/backend/langflow/components/tools/SearchApi.py b/src/backend/base/langflow/components/langchain_utilities/SearchApi.py similarity index 87% rename from src/backend/langflow/components/tools/SearchApi.py rename to src/backend/base/langflow/components/langchain_utilities/SearchApi.py index 53da6c741..3dcd48d9f 100644 --- a/src/backend/langflow/components/tools/SearchApi.py +++ b/src/backend/base/langflow/components/langchain_utilities/SearchApi.py @@ -1,9 +1,11 @@ -from langflow import CustomComponent -from langchain.schema import Document -from langflow.services.database.models.base import orjson_dumps -from langchain_community.utilities.searchapi import SearchApiAPIWrapper from typing import Optional +from langchain_community.utilities.searchapi import SearchApiAPIWrapper + +from langflow.custom import CustomComponent +from langflow.schema.schema import Record +from langflow.services.database.models.base import orjson_dumps + class SearchApi(CustomComponent): display_name: str = "SearchApi" @@ -35,7 +37,7 @@ class SearchApi(CustomComponent): engine: str, api_key: str, params: Optional[dict] = None, - ) -> Document: + ) -> Record: if params is None: params = {} @@ -46,6 +48,6 @@ class SearchApi(CustomComponent): result = orjson_dumps(results, indent_2=False) - document = Document(page_content=result) - - return document + record = Record(data=result) + self.status = record + return record diff --git a/src/backend/langflow/components/utilities/SearxSearchWrapper.py b/src/backend/base/langflow/components/langchain_utilities/SearxSearchWrapper.py similarity index 90% rename from src/backend/langflow/components/utilities/SearxSearchWrapper.py rename to src/backend/base/langflow/components/langchain_utilities/SearxSearchWrapper.py index b406f3882..d255f34b7 100644 --- a/src/backend/langflow/components/utilities/SearxSearchWrapper.py +++ b/src/backend/base/langflow/components/langchain_utilities/SearxSearchWrapper.py @@ -1,7 +1,9 @@ -from langflow import CustomComponent -from typing import Optional, Dict +from typing import Dict, Optional + from langchain_community.utilities.searx_search import SearxSearchWrapper +from langflow.interface.custom.custom_component import CustomComponent + class SearxSearchWrapperComponent(CustomComponent): display_name = "SearxSearchWrapper" diff --git a/src/backend/langflow/components/utilities/SerpAPIWrapper.py b/src/backend/base/langflow/components/langchain_utilities/SerpAPIWrapper.py similarity index 93% rename from src/backend/langflow/components/utilities/SerpAPIWrapper.py rename to src/backend/base/langflow/components/langchain_utilities/SerpAPIWrapper.py index 924f5628d..c64a26e79 100644 --- a/src/backend/langflow/components/utilities/SerpAPIWrapper.py +++ b/src/backend/base/langflow/components/langchain_utilities/SerpAPIWrapper.py @@ -1,7 +1,8 @@ from typing import Callable, Union from langchain_community.utilities.serpapi import SerpAPIWrapper -from langflow import CustomComponent + +from langflow.interface.custom.custom_component import CustomComponent class SerpAPIWrapperComponent(CustomComponent): diff --git a/src/backend/langflow/components/utilities/WikipediaAPIWrapper.py b/src/backend/base/langflow/components/langchain_utilities/WikipediaAPIWrapper.py similarity index 93% rename from src/backend/langflow/components/utilities/WikipediaAPIWrapper.py rename to src/backend/base/langflow/components/langchain_utilities/WikipediaAPIWrapper.py index 00820881b..144792315 100644 --- a/src/backend/langflow/components/utilities/WikipediaAPIWrapper.py +++ b/src/backend/base/langflow/components/langchain_utilities/WikipediaAPIWrapper.py @@ -1,7 +1,8 @@ from typing import Callable, Union from langchain_community.utilities.wikipedia import WikipediaAPIWrapper -from langflow import CustomComponent + +from langflow.interface.custom.custom_component import CustomComponent # Assuming WikipediaAPIWrapper is a class that needs to be imported. # The import statement is not included as it is not provided in the JSON diff --git a/src/backend/langflow/components/utilities/WolframAlphaAPIWrapper.py b/src/backend/base/langflow/components/langchain_utilities/WolframAlphaAPIWrapper.py similarity index 90% rename from src/backend/langflow/components/utilities/WolframAlphaAPIWrapper.py rename to src/backend/base/langflow/components/langchain_utilities/WolframAlphaAPIWrapper.py index 2e71a161c..bc224e83c 100644 --- a/src/backend/langflow/components/utilities/WolframAlphaAPIWrapper.py +++ b/src/backend/base/langflow/components/langchain_utilities/WolframAlphaAPIWrapper.py @@ -1,7 +1,8 @@ from typing import Callable, Union from langchain_community.utilities.wolfram_alpha import WolframAlphaAPIWrapper -from langflow import CustomComponent + +from langflow.interface.custom.custom_component import CustomComponent # Since all the fields in the JSON have show=False, we will only create a basic component # without any configurable fields. diff --git a/src/backend/langflow/interface/vector_store/__init__.py b/src/backend/base/langflow/components/memories/__init__.py similarity index 100% rename from src/backend/langflow/interface/vector_store/__init__.py rename to src/backend/base/langflow/components/memories/__init__.py diff --git a/src/backend/langflow/components/llms/AmazonBedrock.py b/src/backend/base/langflow/components/model_specs/AmazonBedrockSpecs.py similarity index 93% rename from src/backend/langflow/components/llms/AmazonBedrock.py rename to src/backend/base/langflow/components/model_specs/AmazonBedrockSpecs.py index 5b931bb94..1a18f5c08 100644 --- a/src/backend/langflow/components/llms/AmazonBedrock.py +++ b/src/backend/base/langflow/components/model_specs/AmazonBedrockSpecs.py @@ -1,13 +1,15 @@ from typing import Optional from langchain.llms.base import BaseLLM -from langchain.llms.bedrock import Bedrock -from langflow import CustomComponent +from langchain_community.llms.bedrock import Bedrock + +from langflow.interface.custom.custom_component import CustomComponent class AmazonBedrockComponent(CustomComponent): display_name: str = "Amazon Bedrock" description: str = "LLM model from Amazon Bedrock." + icon = "Amazon" def build_config(self): return { diff --git a/src/backend/langflow/components/llms/AnthropicLLM.py b/src/backend/base/langflow/components/model_specs/AnthropicLLMSpecs.py similarity index 96% rename from src/backend/langflow/components/llms/AnthropicLLM.py rename to src/backend/base/langflow/components/model_specs/AnthropicLLMSpecs.py index 94c4ed8f5..2ea78162b 100644 --- a/src/backend/langflow/components/llms/AnthropicLLM.py +++ b/src/backend/base/langflow/components/model_specs/AnthropicLLMSpecs.py @@ -1,15 +1,16 @@ from typing import Optional -from langchain_community.chat_models.anthropic import ChatAnthropic from langchain.llms.base import BaseLanguageModel +from langchain_community.chat_models.anthropic import ChatAnthropic from pydantic.v1 import SecretStr -from langflow import CustomComponent +from langflow.interface.custom.custom_component import CustomComponent class AnthropicLLM(CustomComponent): display_name: str = "AnthropicLLM" description: str = "Anthropic Chat&Completion large language models." + icon = "Anthropic" def build_config(self): return { diff --git a/src/backend/langflow/components/llms/Anthropic.py b/src/backend/base/langflow/components/model_specs/AnthropicSpecs.py similarity index 93% rename from src/backend/langflow/components/llms/Anthropic.py rename to src/backend/base/langflow/components/model_specs/AnthropicSpecs.py index c1b776617..23c284888 100644 --- a/src/backend/langflow/components/llms/Anthropic.py +++ b/src/backend/base/langflow/components/model_specs/AnthropicSpecs.py @@ -3,13 +3,14 @@ from typing import Optional from langchain_community.llms.anthropic import Anthropic from pydantic.v1 import SecretStr -from langflow import CustomComponent from langflow.field_typing import BaseLanguageModel, NestedDict +from langflow.interface.custom.custom_component import CustomComponent class AnthropicComponent(CustomComponent): display_name = "Anthropic" description = "Anthropic large language models." + icon = "Anthropic" def build_config(self): return { diff --git a/src/backend/langflow/components/llms/AzureChatOpenAI.py b/src/backend/base/langflow/components/model_specs/AzureChatOpenAISpecs.py similarity index 95% rename from src/backend/langflow/components/llms/AzureChatOpenAI.py rename to src/backend/base/langflow/components/model_specs/AzureChatOpenAISpecs.py index d43abb3dc..6f468bbed 100644 --- a/src/backend/langflow/components/llms/AzureChatOpenAI.py +++ b/src/backend/base/langflow/components/model_specs/AzureChatOpenAISpecs.py @@ -1,14 +1,17 @@ from typing import Optional -from langflow import CustomComponent + from langchain.llms.base import BaseLanguageModel from langchain_community.chat_models.azure_openai import AzureChatOpenAI +from langflow.interface.custom.custom_component import CustomComponent -class AzureChatOpenAIComponent(CustomComponent): + +class AzureChatOpenAISpecsComponent(CustomComponent): display_name: str = "AzureChatOpenAI" description: str = "LLM model from Azure OpenAI." documentation: str = "https://python.langchain.com/docs/integrations/llms/azure_openai" beta = False + icon = "Azure" AZURE_OPENAI_MODELS = [ "gpt-35-turbo", diff --git a/src/backend/langflow/components/llms/BaiduQianfanChatEndpoints.py b/src/backend/base/langflow/components/model_specs/BaiduQianfanChatEndpointsSpecs.py similarity index 98% rename from src/backend/langflow/components/llms/BaiduQianfanChatEndpoints.py rename to src/backend/base/langflow/components/model_specs/BaiduQianfanChatEndpointsSpecs.py index fd7341e15..9c39acdf6 100644 --- a/src/backend/langflow/components/llms/BaiduQianfanChatEndpoints.py +++ b/src/backend/base/langflow/components/model_specs/BaiduQianfanChatEndpointsSpecs.py @@ -1,10 +1,10 @@ from typing import Optional -from langchain_community.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint from langchain.llms.base import BaseLLM +from langchain_community.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint from pydantic.v1 import SecretStr -from langflow import CustomComponent +from langflow.interface.custom.custom_component import CustomComponent class QianfanChatEndpointComponent(CustomComponent): diff --git a/src/backend/langflow/components/llms/BaiduQianfanLLMEndpoints.py b/src/backend/base/langflow/components/model_specs/BaiduQianfanLLMEndpointsSpecs.py similarity index 97% rename from src/backend/langflow/components/llms/BaiduQianfanLLMEndpoints.py rename to src/backend/base/langflow/components/model_specs/BaiduQianfanLLMEndpointsSpecs.py index 786c4516b..acbf8ba28 100644 --- a/src/backend/langflow/components/llms/BaiduQianfanLLMEndpoints.py +++ b/src/backend/base/langflow/components/model_specs/BaiduQianfanLLMEndpointsSpecs.py @@ -1,8 +1,10 @@ from typing import Optional -from langflow import CustomComponent + from langchain.llms.baidu_qianfan_endpoint import QianfanLLMEndpoint from langchain.llms.base import BaseLLM +from langflow.interface.custom.custom_component import CustomComponent + class QianfanLLMEndpointComponent(CustomComponent): display_name: str = "QianfanLLMEndpoint" diff --git a/src/backend/langflow/components/llms/ChatAnthropic.py b/src/backend/base/langflow/components/model_specs/ChatAnthropicSpecs.py similarity index 94% rename from src/backend/langflow/components/llms/ChatAnthropic.py rename to src/backend/base/langflow/components/model_specs/ChatAnthropicSpecs.py index 02866d366..a4a37b283 100644 --- a/src/backend/langflow/components/llms/ChatAnthropic.py +++ b/src/backend/base/langflow/components/model_specs/ChatAnthropicSpecs.py @@ -1,9 +1,9 @@ from typing import Callable, Optional, Union -from langchain_anthropic import ChatAnthropic +from langchain_community.chat_models.anthropic import ChatAnthropic from pydantic.v1.types import SecretStr -from langflow import CustomComponent +from langflow.custom import CustomComponent from langflow.field_typing import BaseLanguageModel @@ -11,6 +11,7 @@ class ChatAnthropicComponent(CustomComponent): display_name = "ChatAnthropic" description = "`Anthropic` chat large language models." documentation = "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/anthropic" + icon = "Anthropic" def build_config(self): return { diff --git a/src/backend/langflow/components/llms/ChatLiteLLM.py b/src/backend/base/langflow/components/model_specs/ChatLiteLLMSpecs.py similarity index 72% rename from src/backend/langflow/components/llms/ChatLiteLLM.py rename to src/backend/base/langflow/components/model_specs/ChatLiteLLMSpecs.py index f2b7f7dc3..439266fbb 100644 --- a/src/backend/langflow/components/llms/ChatLiteLLM.py +++ b/src/backend/base/langflow/components/model_specs/ChatLiteLLMSpecs.py @@ -1,9 +1,8 @@ -import os from typing import Any, Callable, Dict, Optional, Union from langchain_community.chat_models.litellm import ChatLiteLLM, ChatLiteLLMException -from langflow import CustomComponent from langflow.field_typing import BaseLanguageModel +from langflow.interface.custom.custom_component import CustomComponent class ChatLiteLLMComponent(CustomComponent): @@ -27,6 +26,18 @@ class ChatLiteLLMComponent(CustomComponent): "required": False, "password": True, }, + "provider": { + "display_name": "Provider", + "info": "The provider of the API key.", + "options": [ + "OpenAI", + "Azure", + "Anthropic", + "Replicate", + "Cohere", + "OpenRouter", + ], + }, "streaming": { "display_name": "Streaming", "field_type": "bool", @@ -96,7 +107,8 @@ class ChatLiteLLMComponent(CustomComponent): def build( self, model: str, - api_key: str, + provider: str, + api_key: Optional[str] = None, streaming: bool = True, temperature: Optional[float] = 0.7, model_kwargs: Optional[Dict[str, Any]] = {}, @@ -106,7 +118,7 @@ class ChatLiteLLMComponent(CustomComponent): max_tokens: int = 256, max_retries: int = 6, verbose: bool = False, - ) -> Union[BaseLanguageModel, Callable]: + ) -> BaseLanguageModel: try: import litellm # type: ignore @@ -116,11 +128,23 @@ class ChatLiteLLMComponent(CustomComponent): raise ChatLiteLLMException( "Could not import litellm python package. " "Please install it with `pip install litellm`" ) - if api_key: - if "perplexity" in model: - os.environ["PERPLEXITYAI_API_KEY"] = api_key - elif "replicate" in model: - os.environ["REPLICATE_API_KEY"] = api_key + provider_map = { + "OpenAI": "openai_api_key", + "Azure": "azure_api_key", + "Anthropic": "anthropic_api_key", + "Replicate": "replicate_api_key", + "Cohere": "cohere_api_key", + "OpenRouter": "openrouter_api_key", + } + # Set the API key based on the provider + api_keys: dict[str, Optional[str]] = {v: None for v in provider_map.values()} + + if variable_name := provider_map.get(provider): + api_keys[variable_name] = api_key + else: + raise ChatLiteLLMException( + f"Provider {provider} is not supported. Supported providers are: {', '.join(provider_map.keys())}" + ) LLM = ChatLiteLLM( model=model, @@ -133,5 +157,11 @@ class ChatLiteLLMComponent(CustomComponent): n=n, max_tokens=max_tokens, max_retries=max_retries, + openai_api_key=api_keys["openai_api_key"], + azure_api_key=api_keys["azure_api_key"], + anthropic_api_key=api_keys["anthropic_api_key"], + replicate_api_key=api_keys["replicate_api_key"], + cohere_api_key=api_keys["cohere_api_key"], + openrouter_api_key=api_keys["openrouter_api_key"], ) return LLM diff --git a/src/backend/langflow/components/llms/ChatOllamaEndpoint.py b/src/backend/base/langflow/components/model_specs/ChatOllamaEndpointSpecs.py similarity index 98% rename from src/backend/langflow/components/llms/ChatOllamaEndpoint.py rename to src/backend/base/langflow/components/model_specs/ChatOllamaEndpointSpecs.py index 34e9e0bd7..6afde420c 100644 --- a/src/backend/langflow/components/llms/ChatOllamaEndpoint.py +++ b/src/backend/base/langflow/components/model_specs/ChatOllamaEndpointSpecs.py @@ -5,9 +5,8 @@ from langchain_community.chat_models import ChatOllama from langchain_core.language_models.chat_models import BaseChatModel # from langchain.chat_models import ChatOllama -from langflow import CustomComponent +from langflow.interface.custom.custom_component import CustomComponent -# whe When a callback component is added to Langflow, the comment must be uncommented. # from langchain.callbacks.manager import CallbackManager diff --git a/src/backend/langflow/components/llms/ChatOpenAI.py b/src/backend/base/langflow/components/model_specs/ChatOpenAISpecs.py similarity index 90% rename from src/backend/langflow/components/llms/ChatOpenAI.py rename to src/backend/base/langflow/components/model_specs/ChatOpenAISpecs.py index 9f9d61ef8..fcaf80965 100644 --- a/src/backend/langflow/components/llms/ChatOpenAI.py +++ b/src/backend/base/langflow/components/model_specs/ChatOpenAISpecs.py @@ -3,31 +3,29 @@ from typing import Optional, Union from langchain.llms import BaseLLM from langchain_community.chat_models.openai import ChatOpenAI -from langflow import CustomComponent from langflow.field_typing import BaseLanguageModel, NestedDict +from langflow.interface.custom.custom_component import CustomComponent class ChatOpenAIComponent(CustomComponent): display_name = "ChatOpenAI" description = "`OpenAI` Chat large language models API." + icon = "OpenAI" def build_config(self): return { "max_tokens": { "display_name": "Max Tokens", - "field_type": "int", "advanced": False, "required": False, }, "model_kwargs": { "display_name": "Model Kwargs", - "field_type": "NestedDict", "advanced": True, "required": False, }, "model_name": { "display_name": "Model Name", - "field_type": "str", "advanced": False, "required": False, "options": [ @@ -41,7 +39,6 @@ class ChatOpenAIComponent(CustomComponent): }, "openai_api_base": { "display_name": "OpenAI API Base", - "field_type": "str", "advanced": False, "required": False, "info": ( @@ -51,14 +48,12 @@ class ChatOpenAIComponent(CustomComponent): }, "openai_api_key": { "display_name": "OpenAI API Key", - "field_type": "str", "advanced": False, "required": False, "password": True, }, "temperature": { "display_name": "Temperature", - "field_type": "float", "advanced": False, "required": False, "value": 0.7, diff --git a/src/backend/langflow/components/llms/ChatVertexAI.py b/src/backend/base/langflow/components/model_specs/ChatVertexAISpecs.py similarity index 96% rename from src/backend/langflow/components/llms/ChatVertexAI.py rename to src/backend/base/langflow/components/model_specs/ChatVertexAISpecs.py index 66235a1d8..fd8b5b427 100644 --- a/src/backend/langflow/components/llms/ChatVertexAI.py +++ b/src/backend/base/langflow/components/model_specs/ChatVertexAISpecs.py @@ -3,13 +3,15 @@ from typing import List, Optional, Union from langchain.llms import BaseLLM from langchain_community.chat_models.vertexai import ChatVertexAI from langchain_core.messages.base import BaseMessage -from langflow import CustomComponent + from langflow.field_typing import BaseLanguageModel +from langflow.interface.custom.custom_component import CustomComponent class ChatVertexAIComponent(CustomComponent): display_name = "ChatVertexAI" description = "`Vertex AI` Chat large language models API." + icon = "VertexAI" def build_config(self): return { diff --git a/src/backend/langflow/components/llms/Cohere.py b/src/backend/base/langflow/components/model_specs/CohereSpecs.py similarity index 91% rename from src/backend/langflow/components/llms/Cohere.py rename to src/backend/base/langflow/components/model_specs/CohereSpecs.py index 3b74fc9b4..eeda381a4 100644 --- a/src/backend/langflow/components/llms/Cohere.py +++ b/src/backend/base/langflow/components/model_specs/CohereSpecs.py @@ -1,12 +1,14 @@ from langchain_community.llms.cohere import Cohere from langchain_core.language_models.base import BaseLanguageModel -from langflow import CustomComponent + +from langflow.interface.custom.custom_component import CustomComponent class CohereComponent(CustomComponent): display_name = "Cohere" description = "Cohere large language models." documentation = "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/cohere" + icon = "Cohere" def build_config(self): return { diff --git a/src/backend/base/langflow/components/model_specs/GoogleGenerativeAISpecs.py b/src/backend/base/langflow/components/model_specs/GoogleGenerativeAISpecs.py new file mode 100644 index 000000000..8a3894292 --- /dev/null +++ b/src/backend/base/langflow/components/model_specs/GoogleGenerativeAISpecs.py @@ -0,0 +1,74 @@ +from typing import Optional + +from langchain_google_genai import ChatGoogleGenerativeAI # type: ignore +from pydantic.v1.types import SecretStr + +from langflow.field_typing import BaseLanguageModel, RangeSpec +from langflow.interface.custom.custom_component import CustomComponent + + +class GoogleGenerativeAIComponent(CustomComponent): + display_name: str = "Google Generative AI" + description: str = "A component that uses Google Generative AI to generate text." + documentation: str = "http://docs.langflow.org/components/custom" + icon = "Google" + + def build_config(self): + return { + "google_api_key": { + "display_name": "Google API Key", + "info": "The Google API Key to use for the Google Generative AI.", + }, + "max_output_tokens": { + "display_name": "Max Output Tokens", + "info": "The maximum number of tokens to generate.", + }, + "temperature": { + "display_name": "Temperature", + "info": "Run inference with this temperature. Must by in the closed interval [0.0, 1.0].", + }, + "top_k": { + "display_name": "Top K", + "info": "Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.", + "rangeSpec": RangeSpec(min=0, max=2, step=0.1), + "advanced": True, + }, + "top_p": { + "display_name": "Top P", + "info": "The maximum cumulative probability of tokens to consider when sampling.", + "advanced": True, + }, + "n": { + "display_name": "N", + "info": "Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.", + "advanced": True, + }, + "model": { + "display_name": "Model", + "info": "The name of the model to use. Supported examples: gemini-pro", + "options": ["gemini-pro", "gemini-pro-vision"], + }, + "code": { + "advanced": True, + }, + } + + def build( + self, + google_api_key: str, + model: str, + max_output_tokens: Optional[int] = None, + temperature: float = 0.1, + top_k: Optional[int] = None, + top_p: Optional[float] = None, + n: Optional[int] = 1, + ) -> BaseLanguageModel: + return ChatGoogleGenerativeAI( + model=model, + max_output_tokens=max_output_tokens or None, # type: ignore + temperature=temperature, + top_k=top_k or None, + top_p=top_p or None, # type: ignore + n=n or 1, + google_api_key=SecretStr(google_api_key), + ) diff --git a/src/backend/langflow/components/llms/HuggingFaceEndpoints.py b/src/backend/base/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py similarity index 93% rename from src/backend/langflow/components/llms/HuggingFaceEndpoints.py rename to src/backend/base/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py index 579e3c0b3..c3e74b1bd 100644 --- a/src/backend/langflow/components/llms/HuggingFaceEndpoints.py +++ b/src/backend/base/langflow/components/model_specs/HuggingFaceEndpointsSpecs.py @@ -2,12 +2,14 @@ from typing import Optional from langchain.llms.base import BaseLLM from langchain.llms.huggingface_endpoint import HuggingFaceEndpoint -from langflow import CustomComponent + +from langflow.interface.custom.custom_component import CustomComponent class HuggingFaceEndpointsComponent(CustomComponent): display_name: str = "Hugging Face Inference API" description: str = "LLM model from Hugging Face Inference API." + icon = "HuggingFace" def build_config(self): return { diff --git a/src/backend/langflow/components/llms/OllamaLLM.py b/src/backend/base/langflow/components/model_specs/OllamaLLMSpecs.py similarity index 98% rename from src/backend/langflow/components/llms/OllamaLLM.py rename to src/backend/base/langflow/components/model_specs/OllamaLLMSpecs.py index eb5c52975..8d94467db 100644 --- a/src/backend/langflow/components/llms/OllamaLLM.py +++ b/src/backend/base/langflow/components/model_specs/OllamaLLMSpecs.py @@ -3,7 +3,7 @@ from typing import List, Optional from langchain.llms.base import BaseLLM from langchain_community.llms.ollama import Ollama -from langflow import CustomComponent +from langflow.interface.custom.custom_component import CustomComponent class OllamaLLM(CustomComponent): diff --git a/src/backend/langflow/components/llms/VertexAI.py b/src/backend/base/langflow/components/model_specs/VertexAISpecs.py similarity index 96% rename from src/backend/langflow/components/llms/VertexAI.py rename to src/backend/base/langflow/components/model_specs/VertexAISpecs.py index 008451bc8..f186f2ed0 100644 --- a/src/backend/langflow/components/llms/VertexAI.py +++ b/src/backend/base/langflow/components/model_specs/VertexAISpecs.py @@ -1,12 +1,15 @@ -from langflow import CustomComponent +from typing import Callable, Dict, Optional, Union + from langchain.llms import BaseLLM -from typing import Optional, Union, Callable, Dict from langchain_community.llms.vertexai import VertexAI +from langflow.interface.custom.custom_component import CustomComponent + class VertexAIComponent(CustomComponent): display_name = "VertexAI" description = "Google Vertex AI large language models" + icon = "VertexAI" def build_config(self): return { diff --git a/src/backend/base/langflow/components/model_specs/__init__.py b/src/backend/base/langflow/components/model_specs/__init__.py new file mode 100644 index 000000000..7bf6c2881 --- /dev/null +++ b/src/backend/base/langflow/components/model_specs/__init__.py @@ -0,0 +1,35 @@ +from .AmazonBedrockSpecs import AmazonBedrockComponent +from .AnthropicLLMSpecs import AnthropicLLM +from .AnthropicSpecs import AnthropicComponent +from .AzureChatOpenAISpecs import AzureChatOpenAISpecsComponent +from .BaiduQianfanChatEndpointsSpecs import QianfanChatEndpointComponent +from .BaiduQianfanLLMEndpointsSpecs import QianfanLLMEndpointComponent +from .ChatAnthropicSpecs import ChatAnthropicComponent +from .ChatLiteLLMSpecs import ChatLiteLLMComponent +from .ChatOllamaEndpointSpecs import ChatOllamaComponent +from .ChatOpenAISpecs import ChatOpenAIComponent +from .ChatVertexAISpecs import ChatVertexAIComponent +from .CohereSpecs import CohereComponent +from .GoogleGenerativeAISpecs import GoogleGenerativeAIComponent +from .HuggingFaceEndpointsSpecs import HuggingFaceEndpointsComponent +from .OllamaLLMSpecs import OllamaLLM +from .VertexAISpecs import VertexAIComponent + +__all__ = [ + "AmazonBedrockComponent", + "AnthropicLLM", + "AnthropicComponent", + "AzureChatOpenAISpecsComponent", + "QianfanChatEndpointComponent", + "QianfanLLMEndpointComponent", + "ChatAnthropicComponent", + "ChatLiteLLMComponent", + "ChatOllamaComponent", + "ChatOpenAIComponent", + "ChatVertexAIComponent", + "CohereComponent", + "GoogleGenerativeAIComponent", + "HuggingFaceEndpointsComponent", + "OllamaLLM", + "VertexAIComponent", +] diff --git a/src/backend/base/langflow/components/models/AmazonBedrockModel.py b/src/backend/base/langflow/components/models/AmazonBedrockModel.py new file mode 100644 index 000000000..1015f1684 --- /dev/null +++ b/src/backend/base/langflow/components/models/AmazonBedrockModel.py @@ -0,0 +1,99 @@ +from typing import Optional + +from langchain_community.chat_models.bedrock import BedrockChat + +from langflow.base.constants import STREAM_INFO_TEXT +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import Text + + +class AmazonBedrockComponent(LCModelComponent): + display_name: str = "Amazon Bedrock" + description: str = "Generate text using Amazon Bedrock LLMs." + icon = "Amazon" + field_order = [ + "model_id", + "credentials_profile_name", + "region_name", + "model_kwargs", + "endpoint_url", + "cache", + "stream", + "input_value", + "system_message", + ] + + def build_config(self): + return { + "model_id": { + "display_name": "Model Id", + "options": [ + "amazon.titan-text-express-v1", + "amazon.titan-text-lite-v1", + "amazon.titan-embed-text-v1", + "amazon.titan-embed-image-v1", + "amazon.titan-image-generator-v1", + "anthropic.claude-v2", + "anthropic.claude-v2:1", + "anthropic.claude-3-sonnet-20240229-v1:0", + "anthropic.claude-3-haiku-20240307-v1:0", + "anthropic.claude-instant-v1", + "ai21.j2-mid-v1", + "ai21.j2-ultra-v1", + "cohere.command-text-v14", + "cohere.command-light-text-v14", + "cohere.embed-english-v3", + "cohere.embed-multilingual-v3", + "meta.llama2-13b-chat-v1", + "meta.llama2-70b-chat-v1", + "mistral.mistral-7b-instruct-v0:2", + "mistral.mixtral-8x7b-instruct-v0:1", + ], + }, + "credentials_profile_name": {"display_name": "Credentials Profile Name"}, + "endpoint_url": {"display_name": "Endpoint URL"}, + "region_name": {"display_name": "Region Name"}, + "model_kwargs": { + "display_name": "Model Kwargs", + "advanced": True, + }, + "cache": {"display_name": "Cache"}, + "input_value": {"display_name": "Input"}, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + "advanced": True, + }, + "stream": { + "display_name": "Stream", + "info": STREAM_INFO_TEXT, + "advanced": True, + }, + } + + def build( + self, + input_value: Text, + system_message: Optional[str] = None, + model_id: str = "anthropic.claude-instant-v1", + credentials_profile_name: Optional[str] = None, + region_name: Optional[str] = None, + model_kwargs: Optional[dict] = None, + endpoint_url: Optional[str] = None, + cache: Optional[bool] = None, + stream: bool = False, + ) -> Text: + try: + output = BedrockChat( + credentials_profile_name=credentials_profile_name, + model_id=model_id, + region_name=region_name, + model_kwargs=model_kwargs, + endpoint_url=endpoint_url, + streaming=stream, + cache=cache, + ) # type: ignore + except Exception as e: + raise ValueError("Could not connect to AmazonBedrock API.") from e + + return self.get_chat_result(output, stream, input_value, system_message) diff --git a/src/backend/base/langflow/components/models/AnthropicModel.py b/src/backend/base/langflow/components/models/AnthropicModel.py new file mode 100644 index 000000000..1e5a7aec5 --- /dev/null +++ b/src/backend/base/langflow/components/models/AnthropicModel.py @@ -0,0 +1,106 @@ +from typing import Optional + +from langchain_anthropic.chat_models import ChatAnthropic +from pydantic.v1 import SecretStr + +from langflow.base.constants import STREAM_INFO_TEXT +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import Text + + +class AnthropicLLM(LCModelComponent): + display_name: str = "Anthropic" + description: str = "Generate text using Anthropic Chat&Completion LLMs." + icon = "Anthropic" + + field_order = [ + "model", + "anthropic_api_key", + "max_tokens", + "temperature", + "anthropic_api_url", + "input_value", + "system_message", + "stream", + ] + + def build_config(self): + return { + "model": { + "display_name": "Model Name", + "options": [ + "claude-3-opus-20240229", + "claude-3-sonnet-20240229", + "claude-3-haiku-20240307", + "claude-2.1", + "claude-2.0", + "claude-instant-1.2", + "claude-instant-1", + ], + "info": "https://python.langchain.com/docs/integrations/chat/anthropic", + "required": True, + "value": "claude-3-opus-20240229", + }, + "anthropic_api_key": { + "display_name": "Anthropic API Key", + "required": True, + "password": True, + "info": "Your Anthropic API key.", + }, + "max_tokens": { + "display_name": "Max Tokens", + "field_type": "int", + "advanced": True, + "value": 256, + }, + "temperature": { + "display_name": "Temperature", + "field_type": "float", + "value": 0.1, + }, + "anthropic_api_url": { + "display_name": "Anthropic API URL", + "advanced": True, + "info": "Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.", + }, + "code": {"show": False}, + "input_value": {"display_name": "Input"}, + "stream": { + "display_name": "Stream", + "advanced": True, + "info": STREAM_INFO_TEXT, + }, + "system_message": { + "display_name": "System Message", + "advanced": True, + "info": "System message to pass to the model.", + }, + } + + def build( + self, + model: str, + input_value: Text, + system_message: Optional[str] = None, + anthropic_api_key: Optional[str] = None, + max_tokens: Optional[int] = None, + temperature: Optional[float] = None, + anthropic_api_url: Optional[str] = None, + stream: bool = False, + ) -> Text: + # Set default API endpoint if not provided + if not anthropic_api_url: + anthropic_api_url = "https://api.anthropic.com" + + try: + output = ChatAnthropic( + model_name=model, + anthropic_api_key=(SecretStr(anthropic_api_key) if anthropic_api_key else None), + max_tokens_to_sample=max_tokens, # type: ignore + temperature=temperature, + anthropic_api_url=anthropic_api_url, + ) + except Exception as e: + raise ValueError("Could not connect to Anthropic API.") from e + + return self.get_chat_result(output, stream, input_value, system_message) diff --git a/src/backend/base/langflow/components/models/AzureOpenAIModel.py b/src/backend/base/langflow/components/models/AzureOpenAIModel.py new file mode 100644 index 000000000..9e1d32bb7 --- /dev/null +++ b/src/backend/base/langflow/components/models/AzureOpenAIModel.py @@ -0,0 +1,121 @@ +from typing import Optional + +from langchain.llms.base import BaseLanguageModel +from langchain_openai import AzureChatOpenAI + +from langflow.base.constants import STREAM_INFO_TEXT +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import Text + + +class AzureChatOpenAIComponent(LCModelComponent): + display_name: str = "Azure OpenAI" + description: str = "Generate text using Azure OpenAI LLMs." + documentation: str = "https://python.langchain.com/docs/integrations/llms/azure_openai" + beta = False + icon = "Azure" + + field_order = [ + "model", + "azure_endpoint", + "azure_deployment", + "api_version", + "api_key", + "temperature", + "max_tokens", + "input_value", + "system_message", + "stream", + ] + + AZURE_OPENAI_MODELS = [ + "gpt-35-turbo", + "gpt-35-turbo-16k", + "gpt-35-turbo-instruct", + "gpt-4", + "gpt-4-32k", + "gpt-4-vision", + ] + + AZURE_OPENAI_API_VERSIONS = [ + "2023-03-15-preview", + "2023-05-15", + "2023-06-01-preview", + "2023-07-01-preview", + "2023-08-01-preview", + "2023-09-01-preview", + "2023-12-01-preview", + ] + + def build_config(self): + return { + "model": { + "display_name": "Model Name", + "value": self.AZURE_OPENAI_MODELS[0], + "options": self.AZURE_OPENAI_MODELS, + }, + "azure_endpoint": { + "display_name": "Azure Endpoint", + "info": "Your Azure endpoint, including the resource.. Example: `https://example-resource.azure.openai.com/`", + }, + "azure_deployment": { + "display_name": "Deployment Name", + }, + "api_version": { + "display_name": "API Version", + "options": self.AZURE_OPENAI_API_VERSIONS, + "value": self.AZURE_OPENAI_API_VERSIONS[-1], + "advanced": True, + }, + "api_key": {"display_name": "API Key", "password": True}, + "temperature": { + "display_name": "Temperature", + "value": 0.7, + }, + "max_tokens": { + "display_name": "Max Tokens", + "value": 1000, + "advanced": True, + "info": "Maximum number of tokens to generate.", + }, + "code": {"show": False}, + "input_value": {"display_name": "Input"}, + "stream": { + "display_name": "Stream", + "info": STREAM_INFO_TEXT, + "advanced": True, + }, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + "advanced": True, + }, + } + + def build( + self, + model: str, + azure_endpoint: str, + input_value: Text, + azure_deployment: str, + api_version: str, + api_key: str, + temperature: float, + system_message: Optional[str] = None, + max_tokens: Optional[int] = 1000, + stream: bool = False, + ) -> BaseLanguageModel: + try: + output = AzureChatOpenAI( + model=model, + azure_endpoint=azure_endpoint, + azure_deployment=azure_deployment, + api_version=api_version, + api_key=api_key, + temperature=temperature, + max_tokens=max_tokens, + ) + except Exception as e: + raise ValueError("Could not connect to AzureOpenAI API.") from e + + return self.get_chat_result(output, stream, input_value, system_message) diff --git a/src/backend/base/langflow/components/models/BaiduQianfanChatModel.py b/src/backend/base/langflow/components/models/BaiduQianfanChatModel.py new file mode 100644 index 000000000..f5e6497d0 --- /dev/null +++ b/src/backend/base/langflow/components/models/BaiduQianfanChatModel.py @@ -0,0 +1,123 @@ +from typing import Optional + +from langchain_community.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint +from pydantic.v1 import SecretStr + +from langflow.base.constants import STREAM_INFO_TEXT +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import Text + + +class QianfanChatEndpointComponent(LCModelComponent): + display_name: str = "Qianfan" + description: str = "Generate text using Baidu Qianfan LLMs." + documentation: str = "https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint." + icon = "BaiduQianfan" + + field_order = [ + "model", + "qianfan_ak", + "qianfan_sk", + "top_p", + "temperature", + "penalty_score", + "endpoint", + "input_value", + "system_message", + "stream", + ] + + def build_config(self): + return { + "model": { + "display_name": "Model Name", + "options": [ + "ERNIE-Bot", + "ERNIE-Bot-turbo", + "BLOOMZ-7B", + "Llama-2-7b-chat", + "Llama-2-13b-chat", + "Llama-2-70b-chat", + "Qianfan-BLOOMZ-7B-compressed", + "Qianfan-Chinese-Llama-2-7B", + "ChatGLM2-6B-32K", + "AquilaChat-7B", + ], + "info": "https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint", + "value": "ERNIE-Bot-turbo", + }, + "qianfan_ak": { + "display_name": "Qianfan Ak", + "password": True, + "info": "which you could get from https://cloud.baidu.com/product/wenxinworkshop", + }, + "qianfan_sk": { + "display_name": "Qianfan Sk", + "password": True, + "info": "which you could get from https://cloud.baidu.com/product/wenxinworkshop", + }, + "top_p": { + "display_name": "Top p", + "field_type": "float", + "info": "Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo", + "value": 0.8, + "advanced": True, + }, + "temperature": { + "display_name": "Temperature", + "field_type": "float", + "info": "Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo", + "value": 0.95, + }, + "penalty_score": { + "display_name": "Penalty Score", + "field_type": "float", + "info": "Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo", + "value": 1.0, + "advanced": True, + }, + "endpoint": { + "display_name": "Endpoint", + "info": "Endpoint of the Qianfan LLM, required if custom model used.", + }, + "code": {"show": False}, + "input_value": {"display_name": "Input"}, + "stream": { + "display_name": "Stream", + "info": STREAM_INFO_TEXT, + "advanced": True, + }, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + "advanced": True, + }, + } + + def build( + self, + input_value: Text, + qianfan_ak: str, + qianfan_sk: str, + model: str, + top_p: Optional[float] = None, + temperature: Optional[float] = None, + penalty_score: Optional[float] = None, + endpoint: Optional[str] = None, + stream: bool = False, + system_message: Optional[str] = None, + ) -> Text: + try: + output = QianfanChatEndpoint( # type: ignore + model=model, + qianfan_ak=SecretStr(qianfan_ak) if qianfan_ak else None, + qianfan_sk=SecretStr(qianfan_sk) if qianfan_sk else None, + top_p=top_p, + temperature=temperature, + penalty_score=penalty_score, + endpoint=endpoint, + ) + except Exception as e: + raise ValueError("Could not connect to Baidu Qianfan API.") from e + + return self.get_chat_result(output, stream, input_value, system_message) diff --git a/src/backend/base/langflow/components/models/ChatLiteLLMModel.py b/src/backend/base/langflow/components/models/ChatLiteLLMModel.py new file mode 100644 index 000000000..5266c6935 --- /dev/null +++ b/src/backend/base/langflow/components/models/ChatLiteLLMModel.py @@ -0,0 +1,191 @@ +from typing import Any, Dict, Optional + +from langchain_community.chat_models.litellm import ChatLiteLLM, ChatLiteLLMException + +from langflow.base.constants import STREAM_INFO_TEXT +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import BaseLanguageModel, Text + + +class ChatLiteLLMModelComponent(LCModelComponent): + display_name = "LiteLLM" + description = "`LiteLLM` collection of large language models." + documentation = "https://python.langchain.com/docs/integrations/chat/litellm" + field_order = [ + "model", + "api_key", + "provider", + "temperature", + "model_kwargs", + "top_p", + "top_k", + "n", + "max_tokens", + "max_retries", + "verbose", + "stream", + "input_value", + "system_message", + ] + + def build_config(self): + return { + "model": { + "display_name": "Model name", + "field_type": "str", + "advanced": False, + "required": True, + "info": "The name of the model to use. For example, `gpt-3.5-turbo`.", + }, + "api_key": { + "display_name": "API key", + "field_type": "str", + "advanced": False, + "required": False, + "password": True, + }, + "provider": { + "display_name": "Provider", + "info": "The provider of the API key.", + "options": [ + "OpenAI", + "Azure", + "Anthropic", + "Replicate", + "Cohere", + "OpenRouter", + ], + }, + "temperature": { + "display_name": "Temperature", + "field_type": "float", + "advanced": False, + "required": False, + "default": 0.7, + }, + "model_kwargs": { + "display_name": "Model kwargs", + "field_type": "dict", + "advanced": True, + "required": False, + "default": {}, + }, + "top_p": { + "display_name": "Top p", + "field_type": "float", + "advanced": True, + "required": False, + }, + "top_k": { + "display_name": "Top k", + "field_type": "int", + "advanced": True, + "required": False, + }, + "n": { + "display_name": "N", + "field_type": "int", + "advanced": True, + "required": False, + "info": "Number of chat completions to generate for each prompt. " + "Note that the API may not return the full n completions if duplicates are generated.", + "default": 1, + }, + "max_tokens": { + "display_name": "Max tokens", + "field_type": "int", + "advanced": False, + "required": False, + "default": 256, + "info": "The maximum number of tokens to generate for each chat completion.", + }, + "max_retries": { + "display_name": "Max retries", + "field_type": "int", + "advanced": True, + "required": False, + "default": 6, + }, + "verbose": { + "display_name": "Verbose", + "field_type": "bool", + "advanced": True, + "required": False, + "default": False, + }, + "input_value": {"display_name": "Input"}, + "stream": { + "display_name": "Stream", + "info": STREAM_INFO_TEXT, + "advanced": True, + }, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + "advanced": True, + }, + } + + def build( + self, + input_value: Text, + model: str, + provider: str, + api_key: Optional[str] = None, + stream: bool = False, + temperature: Optional[float] = 0.7, + model_kwargs: Optional[Dict[str, Any]] = {}, + top_p: Optional[float] = None, + top_k: Optional[int] = None, + n: int = 1, + max_tokens: int = 256, + max_retries: int = 6, + verbose: bool = False, + system_message: Optional[str] = None, + ) -> BaseLanguageModel: + try: + import litellm # type: ignore + + litellm.drop_params = True + litellm.set_verbose = verbose + except ImportError: + raise ChatLiteLLMException( + "Could not import litellm python package. " "Please install it with `pip install litellm`" + ) + provider_map = { + "OpenAI": "openai_api_key", + "Azure": "azure_api_key", + "Anthropic": "anthropic_api_key", + "Replicate": "replicate_api_key", + "Cohere": "cohere_api_key", + "OpenRouter": "openrouter_api_key", + } + # Set the API key based on the provider + api_keys: dict[str, Optional[str]] = {v: None for v in provider_map.values()} + + if variable_name := provider_map.get(provider): + api_keys[variable_name] = api_key + else: + raise ChatLiteLLMException( + f"Provider {provider} is not supported. Supported providers are: {', '.join(provider_map.keys())}" + ) + + output = ChatLiteLLM( + model=model, + client=None, + streaming=stream, + temperature=temperature, + model_kwargs=model_kwargs if model_kwargs is not None else {}, + top_p=top_p, + top_k=top_k, + n=n, + max_tokens=max_tokens, + max_retries=max_retries, + openai_api_key=api_keys["openai_api_key"], + azure_api_key=api_keys["azure_api_key"], + anthropic_api_key=api_keys["anthropic_api_key"], + replicate_api_key=api_keys["replicate_api_key"], + cohere_api_key=api_keys["cohere_api_key"], + openrouter_api_key=api_keys["openrouter_api_key"], + ) + return self.get_chat_result(output, stream, input_value, system_message) diff --git a/src/backend/base/langflow/components/models/CohereModel.py b/src/backend/base/langflow/components/models/CohereModel.py new file mode 100644 index 000000000..665aacc13 --- /dev/null +++ b/src/backend/base/langflow/components/models/CohereModel.py @@ -0,0 +1,73 @@ +from typing import Optional + +from langchain_community.chat_models.cohere import ChatCohere +from pydantic.v1 import SecretStr +from langflow.field_typing import Text +from langflow.base.constants import STREAM_INFO_TEXT +from langflow.base.models.model import LCModelComponent + + +class CohereComponent(LCModelComponent): + display_name = "Cohere" + description = "Generate text using Cohere LLMs." + documentation = "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/cohere" + + icon = "Cohere" + + field_order = [ + "cohere_api_key", + "max_tokens", + "temperature", + "input_value", + "system_message", + "stream", + ] + + def build_config(self): + return { + "cohere_api_key": { + "display_name": "Cohere API Key", + "type": "password", + "password": True, + "required": True, + }, + "max_tokens": { + "display_name": "Max Tokens", + "advanced": True, + "default": 256, + "type": "int", + "show": True, + }, + "temperature": { + "display_name": "Temperature", + "default": 0.75, + "type": "float", + "show": True, + }, + "input_value": {"display_name": "Input"}, + "stream": { + "display_name": "Stream", + "info": STREAM_INFO_TEXT, + "advanced": True, + }, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + "advanced": True, + }, + } + + def build( + self, + cohere_api_key: str, + input_value: Text, + temperature: float = 0.75, + stream: bool = False, + system_message: Optional[str] = None, + ) -> Text: + api_key = SecretStr(cohere_api_key) + output = ChatCohere( # type: ignore + cohere_api_key=api_key, + temperature=temperature, + ) + return self.get_chat_result(output, stream, input_value, system_message) diff --git a/src/backend/base/langflow/components/models/GoogleGenerativeAIModel.py b/src/backend/base/langflow/components/models/GoogleGenerativeAIModel.py new file mode 100644 index 000000000..0c2b8eef5 --- /dev/null +++ b/src/backend/base/langflow/components/models/GoogleGenerativeAIModel.py @@ -0,0 +1,102 @@ +from typing import Optional + +from langchain_google_genai import ChatGoogleGenerativeAI +from pydantic.v1 import SecretStr +from langflow.field_typing import Text, RangeSpec +from langflow.base.constants import STREAM_INFO_TEXT +from langflow.base.models.model import LCModelComponent + + +class GoogleGenerativeAIComponent(LCModelComponent): + display_name: str = "Google Generative AI" + description: str = "Generate text using Google Generative AI." + icon = "GoogleGenerativeAI" + + field_order = [ + "google_api_key", + "model", + "max_output_tokens", + "temperature", + "top_k", + "top_p", + "n", + "input_value", + "system_message", + "stream", + ] + + def build_config(self): + return { + "google_api_key": { + "display_name": "Google API Key", + "info": "The Google API Key to use for the Google Generative AI.", + }, + "max_output_tokens": { + "display_name": "Max Output Tokens", + "info": "The maximum number of tokens to generate.", + "advanced": True, + }, + "temperature": { + "display_name": "Temperature", + "info": "Run inference with this temperature. Must by in the closed interval [0.0, 1.0].", + }, + "top_k": { + "display_name": "Top K", + "info": "Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.", + "rangeSpec": RangeSpec(min=0, max=2, step=0.1), + "advanced": True, + }, + "top_p": { + "display_name": "Top P", + "info": "The maximum cumulative probability of tokens to consider when sampling.", + "advanced": True, + }, + "n": { + "display_name": "N", + "info": "Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.", + "advanced": True, + }, + "model": { + "display_name": "Model", + "info": "The name of the model to use. Supported examples: gemini-pro", + "options": ["gemini-pro", "gemini-pro-vision"], + }, + "code": { + "advanced": True, + }, + "input_value": {"display_name": "Input", "info": "The input to the model."}, + "stream": { + "display_name": "Stream", + "info": STREAM_INFO_TEXT, + "advanced": True, + }, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + "advanced": True, + }, + } + + def build( + self, + google_api_key: str, + model: str, + input_value: Text, + max_output_tokens: Optional[int] = None, + temperature: float = 0.1, + top_k: Optional[int] = None, + top_p: Optional[float] = None, + n: Optional[int] = 1, + stream: bool = False, + system_message: Optional[str] = None, + ) -> Text: + output = ChatGoogleGenerativeAI( + model=model, + max_output_tokens=max_output_tokens or None, # type: ignore + temperature=temperature, + top_k=top_k or None, + top_p=top_p or None, # type: ignore + n=n or 1, + google_api_key=SecretStr(google_api_key), + ) + return self.get_chat_result(output, stream, input_value, system_message) diff --git a/src/backend/base/langflow/components/models/HuggingFaceModel.py b/src/backend/base/langflow/components/models/HuggingFaceModel.py new file mode 100644 index 000000000..19750ef9f --- /dev/null +++ b/src/backend/base/langflow/components/models/HuggingFaceModel.py @@ -0,0 +1,74 @@ +from typing import Optional + +from langchain_community.chat_models.huggingface import ChatHuggingFace +from langchain_community.llms.huggingface_endpoint import HuggingFaceEndpoint +from langflow.field_typing import Text +from langflow.base.constants import STREAM_INFO_TEXT +from langflow.base.models.model import LCModelComponent + + +class HuggingFaceEndpointsComponent(LCModelComponent): + display_name: str = "Hugging Face API" + description: str = "Generate text using Hugging Face Inference APIs." + icon = "HuggingFace" + + field_order = [ + "endpoint_url", + "task", + "huggingfacehub_api_token", + "model_kwargs", + "input_value", + "system_message", + "stream", + ] + + def build_config(self): + return { + "endpoint_url": {"display_name": "Endpoint URL", "password": True}, + "task": { + "display_name": "Task", + "options": ["text2text-generation", "text-generation", "summarization"], + }, + "huggingfacehub_api_token": {"display_name": "API token", "password": True}, + "model_kwargs": { + "display_name": "Model Keyword Arguments", + "field_type": "code", + "advanced": True, + }, + "code": {"show": False}, + "input_value": {"display_name": "Input"}, + "stream": { + "display_name": "Stream", + "info": STREAM_INFO_TEXT, + "advanced": True, + }, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + "advanced": True, + }, + } + + def build( + self, + input_value: Text, + endpoint_url: str, + model: Optional[str] = None, + task: str = "text2text-generation", + huggingfacehub_api_token: Optional[str] = None, + model_kwargs: Optional[dict] = None, + stream: bool = False, + system_message: Optional[str] = None, + ) -> Text: + try: + llm = HuggingFaceEndpoint( # type: ignore + endpoint_url=endpoint_url, + task=task, + huggingfacehub_api_token=huggingfacehub_api_token, + model_kwargs=model_kwargs or {}, + model=model or "", + ) + except Exception as e: + raise ValueError("Could not connect to HuggingFace Endpoints API.") from e + output = ChatHuggingFace(llm=llm) + return self.get_chat_result(output, stream, input_value, system_message) diff --git a/src/backend/base/langflow/components/models/OllamaModel.py b/src/backend/base/langflow/components/models/OllamaModel.py new file mode 100644 index 000000000..41d75ba3e --- /dev/null +++ b/src/backend/base/langflow/components/models/OllamaModel.py @@ -0,0 +1,300 @@ +from typing import Any, Dict, List, Optional + +# from langchain_community.chat_models import ChatOllama +from langchain_community.chat_models import ChatOllama + +from langflow.base.constants import STREAM_INFO_TEXT +from langflow.base.models.model import LCModelComponent + +# from langchain.chat_models import ChatOllama +from langflow.field_typing import Text + +# whe When a callback component is added to Langflow, the comment must be uncommented. +# from langchain.callbacks.manager import CallbackManager + + +class ChatOllamaComponent(LCModelComponent): + display_name = "Ollama" + description = "Generate text using Ollama Local LLMs." + icon = "Ollama" + + field_order = [ + "base_url", + "model", + "temperature", + "cache", + "callback_manager", + "callbacks", + "format", + "metadata", + "mirostat", + "mirostat_eta", + "mirostat_tau", + "num_ctx", + "num_gpu", + "num_thread", + "repeat_last_n", + "repeat_penalty", + "tfs_z", + "timeout", + "top_k", + "top_p", + "verbose", + "tags", + "stop", + "system", + "template", + "input_value", + "system_message", + "stream", + ] + + def build_config(self) -> dict: + return { + "base_url": { + "display_name": "Base URL", + "info": "Endpoint of the Ollama API. Defaults to 'http://localhost:11434' if not specified.", + "advanced": True, + }, + "model": { + "display_name": "Model Name", + "value": "llama2", + "info": "Refer to https://ollama.ai/library for more models.", + }, + "temperature": { + "display_name": "Temperature", + "field_type": "float", + "value": 0.8, + "info": "Controls the creativity of model responses.", + }, + "cache": { + "display_name": "Cache", + "field_type": "bool", + "info": "Enable or disable caching.", + "advanced": True, + "value": False, + }, + ### When a callback component is added to Langflow, the comment must be uncommented. ### + # "callback_manager": { + # "display_name": "Callback Manager", + # "info": "Optional callback manager for additional functionality.", + # "advanced": True, + # }, + # "callbacks": { + # "display_name": "Callbacks", + # "info": "Callbacks to execute during model runtime.", + # "advanced": True, + # }, + ######################################################################################## + "format": { + "display_name": "Format", + "field_type": "str", + "info": "Specify the format of the output (e.g., json).", + "advanced": True, + }, + "metadata": { + "display_name": "Metadata", + "info": "Metadata to add to the run trace.", + "advanced": True, + }, + "mirostat": { + "display_name": "Mirostat", + "options": ["Disabled", "Mirostat", "Mirostat 2.0"], + "info": "Enable/disable Mirostat sampling for controlling perplexity.", + "value": "Disabled", + "advanced": True, + }, + "mirostat_eta": { + "display_name": "Mirostat Eta", + "field_type": "float", + "info": "Learning rate for Mirostat algorithm. (Default: 0.1)", + "advanced": True, + }, + "mirostat_tau": { + "display_name": "Mirostat Tau", + "field_type": "float", + "info": "Controls the balance between coherence and diversity of the output. (Default: 5.0)", + "advanced": True, + }, + "num_ctx": { + "display_name": "Context Window Size", + "field_type": "int", + "info": "Size of the context window for generating tokens. (Default: 2048)", + "advanced": True, + }, + "num_gpu": { + "display_name": "Number of GPUs", + "field_type": "int", + "info": "Number of GPUs to use for computation. (Default: 1 on macOS, 0 to disable)", + "advanced": True, + }, + "num_thread": { + "display_name": "Number of Threads", + "field_type": "int", + "info": "Number of threads to use during computation. (Default: detected for optimal performance)", + "advanced": True, + }, + "repeat_last_n": { + "display_name": "Repeat Last N", + "field_type": "int", + "info": "How far back the model looks to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx)", + "advanced": True, + }, + "repeat_penalty": { + "display_name": "Repeat Penalty", + "field_type": "float", + "info": "Penalty for repetitions in generated text. (Default: 1.1)", + "advanced": True, + }, + "tfs_z": { + "display_name": "TFS Z", + "field_type": "float", + "info": "Tail free sampling value. (Default: 1)", + "advanced": True, + }, + "timeout": { + "display_name": "Timeout", + "field_type": "int", + "info": "Timeout for the request stream.", + "advanced": True, + }, + "top_k": { + "display_name": "Top K", + "field_type": "int", + "info": "Limits token selection to top K. (Default: 40)", + "advanced": True, + }, + "top_p": { + "display_name": "Top P", + "field_type": "float", + "info": "Works together with top-k. (Default: 0.9)", + "advanced": True, + }, + "verbose": { + "display_name": "Verbose", + "field_type": "bool", + "info": "Whether to print out response text.", + }, + "tags": { + "display_name": "Tags", + "field_type": "list", + "info": "Tags to add to the run trace.", + "advanced": True, + }, + "stop": { + "display_name": "Stop Tokens", + "field_type": "list", + "info": "List of tokens to signal the model to stop generating text.", + "advanced": True, + }, + "system": { + "display_name": "System", + "field_type": "str", + "info": "System to use for generating text.", + "advanced": True, + }, + "template": { + "display_name": "Template", + "field_type": "str", + "info": "Template to use for generating text.", + "advanced": True, + }, + "input_value": {"display_name": "Input"}, + "stream": { + "display_name": "Stream", + "info": STREAM_INFO_TEXT, + }, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + "advanced": True, + }, + } + + def build( + self, + base_url: Optional[str], + model: str, + input_value: Text, + mirostat: Optional[str], + mirostat_eta: Optional[float] = None, + mirostat_tau: Optional[float] = None, + ### When a callback component is added to Langflow, the comment must be uncommented.### + # callback_manager: Optional[CallbackManager] = None, + # callbacks: Optional[List[Callbacks]] = None, + ####################################################################################### + repeat_last_n: Optional[int] = None, + verbose: Optional[bool] = None, + cache: Optional[bool] = None, + num_ctx: Optional[int] = None, + num_gpu: Optional[int] = None, + format: Optional[str] = None, + metadata: Optional[Dict[str, Any]] = None, + num_thread: Optional[int] = None, + repeat_penalty: Optional[float] = None, + stop: Optional[List[str]] = None, + system: Optional[str] = None, + tags: Optional[List[str]] = None, + temperature: Optional[float] = None, + template: Optional[str] = None, + tfs_z: Optional[float] = None, + timeout: Optional[int] = None, + top_k: Optional[int] = None, + top_p: Optional[int] = None, + stream: bool = False, + system_message: Optional[str] = None, + ) -> Text: + if not base_url: + base_url = "http://localhost:11434" + + # Mapping mirostat settings to their corresponding values + mirostat_options = {"Mirostat": 1, "Mirostat 2.0": 2} + + # Default to 0 for 'Disabled' + mirostat_value = mirostat_options.get(mirostat, 0) # type: ignore + + # Set mirostat_eta and mirostat_tau to None if mirostat is disabled + if mirostat_value == 0: + mirostat_eta = None + mirostat_tau = None + + # Mapping system settings to their corresponding values + llm_params = { + "base_url": base_url, + "cache": cache, + "model": model, + "mirostat": mirostat_value, + "format": format, + "metadata": metadata, + "tags": tags, + ## When a callback component is added to Langflow, the comment must be uncommented.## + # "callback_manager": callback_manager, + # "callbacks": callbacks, + ##################################################################################### + "mirostat_eta": mirostat_eta, + "mirostat_tau": mirostat_tau, + "num_ctx": num_ctx, + "num_gpu": num_gpu, + "num_thread": num_thread, + "repeat_last_n": repeat_last_n, + "repeat_penalty": repeat_penalty, + "temperature": temperature, + "stop": stop, + "system": system, + "template": template, + "tfs_z": tfs_z, + "timeout": timeout, + "top_k": top_k, + "top_p": top_p, + "verbose": verbose, + } + + # None Value remove + llm_params = {k: v for k, v in llm_params.items() if v is not None} + + try: + output = ChatOllama(**llm_params) # type: ignore + except Exception as e: + raise ValueError("Could not initialize Ollama LLM.") from e + + return self.get_chat_result(output, stream, input_value, system_message) diff --git a/src/backend/base/langflow/components/models/OpenAIModel.py b/src/backend/base/langflow/components/models/OpenAIModel.py new file mode 100644 index 000000000..678151064 --- /dev/null +++ b/src/backend/base/langflow/components/models/OpenAIModel.py @@ -0,0 +1,106 @@ +from typing import Optional + +from langchain_openai import ChatOpenAI + +from langflow.base.constants import STREAM_INFO_TEXT +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import NestedDict, Text + + +class OpenAIModelComponent(LCModelComponent): + display_name = "OpenAI" + description = "Generates text using OpenAI LLMs." + icon = "OpenAI" + + field_order = [ + "max_tokens", + "model_kwargs", + "model_name", + "openai_api_base", + "openai_api_key", + "temperature", + "input_value", + "system_message", + "stream", + ] + + def build_config(self): + return { + "input_value": {"display_name": "Input"}, + "max_tokens": { + "display_name": "Max Tokens", + "advanced": True, + }, + "model_kwargs": { + "display_name": "Model Kwargs", + "advanced": True, + }, + "model_name": { + "display_name": "Model Name", + "advanced": False, + "options": [ + "gpt-4-turbo-preview", + "gpt-3.5-turbo", + "gpt-4-0125-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-1106", + ], + "value": "gpt-4-turbo-preview", + }, + "openai_api_base": { + "display_name": "OpenAI API Base", + "advanced": True, + "info": ( + "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\n" + "You can change this to use other APIs like JinaChat, LocalAI and Prem." + ), + }, + "openai_api_key": { + "display_name": "OpenAI API Key", + "info": "The OpenAI API Key to use for the OpenAI model.", + "advanced": False, + "password": True, + }, + "temperature": { + "display_name": "Temperature", + "advanced": False, + "value": 0.1, + }, + "stream": { + "display_name": "Stream", + "info": STREAM_INFO_TEXT, + "advanced": True, + }, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + "advanced": True, + }, + } + + def build( + self, + input_value: Text, + openai_api_key: str, + temperature: float, + model_name: str, + max_tokens: Optional[int] = 256, + model_kwargs: NestedDict = {}, + openai_api_base: Optional[str] = None, + stream: bool = False, + system_message: Optional[str] = None, + ) -> Text: + if not openai_api_base: + openai_api_base = "https://api.openai.com/v1" + output = ChatOpenAI( + max_tokens=max_tokens, + model_kwargs=model_kwargs, + model=model_name, + base_url=openai_api_base, + api_key=openai_api_key, + temperature=temperature, + ) + + return self.get_chat_result(output, stream, input_value, system_message) diff --git a/src/backend/base/langflow/components/models/VertexAiModel.py b/src/backend/base/langflow/components/models/VertexAiModel.py new file mode 100644 index 000000000..ff520e0d1 --- /dev/null +++ b/src/backend/base/langflow/components/models/VertexAiModel.py @@ -0,0 +1,126 @@ +from typing import List, Optional + +from langchain_core.messages.base import BaseMessage + +from langflow.base.constants import STREAM_INFO_TEXT +from langflow.base.models.model import LCModelComponent +from langflow.field_typing import Text + + +class ChatVertexAIComponent(LCModelComponent): + display_name = "Vertex AI" + description = "Generate text using Vertex AI LLMs." + icon = "VertexAI" + + field_order = [ + "credentials", + "project", + "examples", + "location", + "max_output_tokens", + "model_name", + "temperature", + "top_k", + "top_p", + "verbose", + "input_value", + "system_message", + "stream", + ] + + def build_config(self): + return { + "credentials": { + "display_name": "Credentials", + "field_type": "file", + "file_types": [".json"], + "file_path": None, + }, + "examples": { + "display_name": "Examples", + "multiline": True, + }, + "location": { + "display_name": "Location", + "value": "us-central1", + }, + "max_output_tokens": { + "display_name": "Max Output Tokens", + "value": 128, + "advanced": True, + }, + "model_name": { + "display_name": "Model Name", + "value": "chat-bison", + }, + "project": { + "display_name": "Project", + }, + "temperature": { + "display_name": "Temperature", + "value": 0.0, + }, + "top_k": { + "display_name": "Top K", + "value": 40, + "advanced": True, + }, + "top_p": { + "display_name": "Top P", + "value": 0.95, + "advanced": True, + }, + "verbose": { + "display_name": "Verbose", + "value": False, + "advanced": True, + }, + "input_value": {"display_name": "Input"}, + "stream": { + "display_name": "Stream", + "info": STREAM_INFO_TEXT, + "advanced": True, + }, + "system_message": { + "display_name": "System Message", + "info": "System message to pass to the model.", + "advanced": True, + }, + } + + def build( + self, + input_value: Text, + credentials: Optional[str], + project: str, + examples: Optional[List[BaseMessage]] = [], + location: str = "us-central1", + max_output_tokens: int = 128, + model_name: str = "chat-bison", + temperature: float = 0.0, + top_k: int = 40, + top_p: float = 0.95, + verbose: bool = False, + stream: bool = False, + system_message: Optional[str] = None, + ) -> Text: + try: + from langchain_google_vertexai import ChatVertexAI # type: ignore + except ImportError: + raise ImportError( + "To use the ChatVertexAI model, you need to install the langchain-google-vertexai package." + ) + output = ChatVertexAI( + credentials=credentials, + examples=examples, + location=location, + max_output_tokens=max_output_tokens, + model_name=model_name, + project=project, + temperature=temperature, + top_k=top_k, + top_p=top_p, + verbose=verbose, + ) + + return self.get_chat_result(output, stream, input_value, system_message) diff --git a/src/backend/base/langflow/components/models/__init__.py b/src/backend/base/langflow/components/models/__init__.py new file mode 100644 index 000000000..9db6caa26 --- /dev/null +++ b/src/backend/base/langflow/components/models/__init__.py @@ -0,0 +1,26 @@ +from .AmazonBedrockModel import AmazonBedrockComponent +from .AnthropicModel import AnthropicLLM +from .AzureOpenAIModel import AzureChatOpenAIComponent +from .BaiduQianfanChatModel import QianfanChatEndpointComponent +from .ChatLiteLLMModel import ChatLiteLLMModelComponent +from .CohereModel import CohereComponent +from .GoogleGenerativeAIModel import GoogleGenerativeAIComponent +from .HuggingFaceModel import HuggingFaceEndpointsComponent +from .OllamaModel import ChatOllamaComponent +from .OpenAIModel import OpenAIModelComponent +from .VertexAiModel import ChatVertexAIComponent + +__all__ = [ + "ChatLiteLLMModelComponent", + "AmazonBedrockComponent", + "AnthropicLLM", + "AzureChatOpenAIComponent", + "QianfanChatEndpointComponent", + "CohereComponent", + "GoogleGenerativeAIComponent", + "HuggingFaceEndpointsComponent", + "ChatOllamaComponent", + "OpenAIModelComponent", + "ChatVertexAIComponent", + "base", +] diff --git a/src/backend/base/langflow/components/outputs/ChatOutput.py b/src/backend/base/langflow/components/outputs/ChatOutput.py new file mode 100644 index 000000000..9ca162c4c --- /dev/null +++ b/src/backend/base/langflow/components/outputs/ChatOutput.py @@ -0,0 +1,29 @@ +from typing import Optional, Union + +from langflow.base.io.chat import ChatComponent +from langflow.field_typing import Text +from langflow.schema import Record + + +class ChatOutput(ChatComponent): + display_name = "Chat Output" + description = "Display a chat message in the Interaction Panel." + icon = "ChatOutput" + + def build( + self, + sender: Optional[str] = "Machine", + sender_name: Optional[str] = "AI", + input_value: Optional[str] = None, + session_id: Optional[str] = None, + return_record: Optional[bool] = False, + record_template: Optional[str] = "{text}", + ) -> Union[Text, Record]: + return super().build( + sender=sender, + sender_name=sender_name, + input_value=input_value, + session_id=session_id, + return_record=return_record, + record_template=record_template, + ) diff --git a/src/backend/base/langflow/components/outputs/TextOutput.py b/src/backend/base/langflow/components/outputs/TextOutput.py new file mode 100644 index 000000000..fe2d8d145 --- /dev/null +++ b/src/backend/base/langflow/components/outputs/TextOutput.py @@ -0,0 +1,28 @@ +from typing import Optional + +from langflow.base.io.text import TextComponent +from langflow.field_typing import Text + + +class TextOutput(TextComponent): + display_name = "Text Output" + description = "Display a text output in the Interaction Panel." + icon = "type" + + def build_config(self): + return { + "input_value": { + "display_name": "Value", + "input_types": ["Record", "Text"], + "info": "Text or Record to be passed as output.", + }, + "record_template": { + "display_name": "Record Template", + "multiline": True, + "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.", + "advanced": True, + }, + } + + def build(self, input_value: Optional[Text] = "", record_template: str = "") -> Text: + return super().build(input_value=input_value, record_template=record_template) diff --git a/src/backend/base/langflow/components/outputs/__init__.py b/src/backend/base/langflow/components/outputs/__init__.py new file mode 100644 index 000000000..cf395bb86 --- /dev/null +++ b/src/backend/base/langflow/components/outputs/__init__.py @@ -0,0 +1,4 @@ +from .ChatOutput import ChatOutput +from .TextOutput import TextOutput + +__all__ = ["ChatOutput", "TextOutput"] diff --git a/src/backend/langflow/components/retrievers/AmazonKendra.py b/src/backend/base/langflow/components/retrievers/AmazonKendra.py similarity index 91% rename from src/backend/langflow/components/retrievers/AmazonKendra.py rename to src/backend/base/langflow/components/retrievers/AmazonKendra.py index 827945a51..6584f6545 100644 --- a/src/backend/langflow/components/retrievers/AmazonKendra.py +++ b/src/backend/base/langflow/components/retrievers/AmazonKendra.py @@ -1,12 +1,15 @@ from typing import Optional -from langflow import CustomComponent -from langchain.retrievers import AmazonKendraRetriever + from langchain.schema import BaseRetriever +from langchain_community.retrievers import AmazonKendraRetriever + +from langflow.interface.custom.custom_component import CustomComponent class AmazonKendraRetrieverComponent(CustomComponent): display_name: str = "Amazon Kendra Retriever" description: str = "Retriever that uses the Amazon Kendra API." + icon = "Amazon" def build_config(self): return { diff --git a/src/backend/langflow/components/retrievers/MetalRetriever.py b/src/backend/base/langflow/components/retrievers/MetalRetriever.py similarity index 88% rename from src/backend/langflow/components/retrievers/MetalRetriever.py rename to src/backend/base/langflow/components/retrievers/MetalRetriever.py index 88393f26d..c5c56a397 100644 --- a/src/backend/langflow/components/retrievers/MetalRetriever.py +++ b/src/backend/base/langflow/components/retrievers/MetalRetriever.py @@ -1,9 +1,11 @@ from typing import Optional -from langflow import CustomComponent -from langchain.retrievers import MetalRetriever + from langchain.schema import BaseRetriever +from langchain_community.retrievers import MetalRetriever from metal_sdk.metal import Metal # type: ignore +from langflow.interface.custom.custom_component import CustomComponent + class MetalRetrieverComponent(CustomComponent): display_name: str = "Metal Retriever" diff --git a/src/backend/langflow/components/retrievers/MultiQueryRetriever.py b/src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py similarity index 96% rename from src/backend/langflow/components/retrievers/MultiQueryRetriever.py rename to src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py index d2c12d7a5..0dec1c028 100644 --- a/src/backend/langflow/components/retrievers/MultiQueryRetriever.py +++ b/src/backend/base/langflow/components/retrievers/MultiQueryRetriever.py @@ -1,8 +1,9 @@ from typing import Callable, Optional, Union from langchain.retrievers import MultiQueryRetriever -from langflow import CustomComponent + from langflow.field_typing import BaseLLM, BaseRetriever, PromptTemplate +from langflow.interface.custom.custom_component import CustomComponent class MultiQueryRetrieverComponent(CustomComponent): diff --git a/src/backend/langflow/components/retrievers/VectaraSelfQueryRetriver.py b/src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py similarity index 96% rename from src/backend/langflow/components/retrievers/VectaraSelfQueryRetriver.py rename to src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py index 26afd765c..759021487 100644 --- a/src/backend/langflow/components/retrievers/VectaraSelfQueryRetriver.py +++ b/src/backend/base/langflow/components/retrievers/VectaraSelfQueryRetriver.py @@ -1,11 +1,13 @@ -from typing import List -from langflow import CustomComponent import json +from typing import List + +from langchain.base_language import BaseLanguageModel +from langchain.chains.query_constructor.base import AttributeInfo +from langchain.retrievers.self_query.base import SelfQueryRetriever from langchain.schema import BaseRetriever from langchain.schema.vectorstore import VectorStore -from langchain.base_language import BaseLanguageModel -from langchain.retrievers.self_query.base import SelfQueryRetriever -from langchain.chains.query_constructor.base import AttributeInfo + +from langflow.interface.custom.custom_component import CustomComponent class VectaraSelfQueryRetriverComponent(CustomComponent): @@ -16,7 +18,7 @@ class VectaraSelfQueryRetriverComponent(CustomComponent): display_name: str = "Vectara Self Query Retriever for Vectara Vector Store" description: str = "Implementation of Vectara Self Query Retriever" documentation = "https://python.langchain.com/docs/integrations/retrievers/self_query/vectara_self_query" - beta = True + icon = "Vectara" field_config = { "code": {"show": True}, diff --git a/src/backend/langflow/components/retrievers/VectorStoreRetriever.py b/src/backend/base/langflow/components/retrievers/VectorStoreRetriever.py similarity index 87% rename from src/backend/langflow/components/retrievers/VectorStoreRetriever.py rename to src/backend/base/langflow/components/retrievers/VectorStoreRetriever.py index 576b02e69..43f1aab71 100644 --- a/src/backend/langflow/components/retrievers/VectorStoreRetriever.py +++ b/src/backend/base/langflow/components/retrievers/VectorStoreRetriever.py @@ -1,7 +1,7 @@ from langchain_core.vectorstores import VectorStoreRetriever -from langflow import CustomComponent from langflow.field_typing import VectorStore +from langflow.interface.custom.custom_component import CustomComponent class VectoStoreRetrieverComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/retrievers/__init__.py b/src/backend/base/langflow/components/retrievers/__init__.py new file mode 100644 index 000000000..35030bc5e --- /dev/null +++ b/src/backend/base/langflow/components/retrievers/__init__.py @@ -0,0 +1,13 @@ +from .AmazonKendra import AmazonKendraRetrieverComponent +from .MetalRetriever import MetalRetrieverComponent +from .MultiQueryRetriever import MultiQueryRetrieverComponent +from .VectaraSelfQueryRetriver import VectaraSelfQueryRetriverComponent +from .VectorStoreRetriever import VectoStoreRetrieverComponent + +__all__ = [ + "AmazonKendraRetrieverComponent", + "MetalRetrieverComponent", + "MultiQueryRetrieverComponent", + "VectaraSelfQueryRetriverComponent", + "VectoStoreRetrieverComponent", +] diff --git a/src/backend/langflow/components/textsplitters/CharacterTextSplitter.py b/src/backend/base/langflow/components/textsplitters/CharacterTextSplitter.py similarity index 54% rename from src/backend/langflow/components/textsplitters/CharacterTextSplitter.py rename to src/backend/base/langflow/components/textsplitters/CharacterTextSplitter.py index 1bfd2d2e3..2a0f3686f 100644 --- a/src/backend/langflow/components/textsplitters/CharacterTextSplitter.py +++ b/src/backend/base/langflow/components/textsplitters/CharacterTextSplitter.py @@ -1,8 +1,10 @@ from typing import List from langchain.text_splitter import CharacterTextSplitter -from langchain_core.documents.base import Document -from langflow import CustomComponent + +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema.schema import Record +from langflow.utils.util import unescape_string class CharacterTextSplitterComponent(CustomComponent): @@ -11,7 +13,7 @@ class CharacterTextSplitterComponent(CustomComponent): def build_config(self): return { - "documents": {"display_name": "Documents"}, + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, "chunk_overlap": {"display_name": "Chunk Overlap", "default": 200}, "chunk_size": {"display_name": "Chunk Size", "default": 1000}, "separator": {"display_name": "Separator", "default": "\n"}, @@ -19,15 +21,24 @@ class CharacterTextSplitterComponent(CustomComponent): def build( self, - documents: List[Document], + inputs: List[Record], chunk_overlap: int = 200, chunk_size: int = 1000, separator: str = "\n", - ) -> List[Document]: + ) -> List[Record]: + # separator may come escaped from the frontend + separator = unescape_string(separator) + documents = [] + for _input in inputs: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) docs = CharacterTextSplitter( chunk_overlap=chunk_overlap, chunk_size=chunk_size, separator=separator, ).split_documents(documents) - self.status = docs - return docs + records = self.to_records(docs) + self.status = records + return records diff --git a/src/backend/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py b/src/backend/base/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py similarity index 81% rename from src/backend/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py rename to src/backend/base/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py index d1494f4d0..1a4ae24a1 100644 --- a/src/backend/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py +++ b/src/backend/base/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py @@ -1,9 +1,9 @@ -from typing import Optional +from typing import List, Optional from langchain.text_splitter import Language -from langchain_core.documents import Document -from langflow import CustomComponent +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema.schema import Record class LanguageRecursiveTextSplitterComponent(CustomComponent): @@ -14,10 +14,7 @@ class LanguageRecursiveTextSplitterComponent(CustomComponent): def build_config(self): options = [x.value for x in Language] return { - "documents": { - "display_name": "Documents", - "info": "The documents to split.", - }, + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, "separator_type": { "display_name": "Separator Type", "info": "The type of separator to use.", @@ -47,11 +44,11 @@ class LanguageRecursiveTextSplitterComponent(CustomComponent): def build( self, - documents: list[Document], + inputs: List[Record], chunk_size: Optional[int] = 1000, chunk_overlap: Optional[int] = 200, separator_type: str = "Python", - ) -> list[Document]: + ) -> list[Record]: """ Split text into chunks of a specified length. @@ -77,6 +74,12 @@ class LanguageRecursiveTextSplitterComponent(CustomComponent): chunk_size=chunk_size, chunk_overlap=chunk_overlap, ) - + documents = [] + for _input in inputs: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) docs = splitter.split_documents(documents) - return docs + records = self.to_records(docs) + return records diff --git a/src/backend/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py b/src/backend/base/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py similarity index 75% rename from src/backend/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py rename to src/backend/base/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py index d07ae3ebe..1ceaa8bd6 100644 --- a/src/backend/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py +++ b/src/backend/base/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py @@ -1,10 +1,11 @@ from typing import Optional +from langchain.text_splitter import RecursiveCharacterTextSplitter from langchain_core.documents import Document -from langflow import CustomComponent -from langflow.utils.util import build_loader_repr_from_documents -from langchain.text_splitter import RecursiveCharacterTextSplitter +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema import Record +from langflow.utils.util import build_loader_repr_from_records, unescape_string class RecursiveCharacterTextSplitterComponent(CustomComponent): @@ -14,9 +15,10 @@ class RecursiveCharacterTextSplitterComponent(CustomComponent): def build_config(self): return { - "documents": { - "display_name": "Documents", - "info": "The documents to split.", + "inputs": { + "display_name": "Input", + "info": "The texts to split.", + "input_types": ["Document", "Record"], }, "separators": { "display_name": "Separators", @@ -40,11 +42,11 @@ class RecursiveCharacterTextSplitterComponent(CustomComponent): def build( self, - documents: list[Document], + inputs: list[Document], separators: Optional[list[str]] = None, chunk_size: Optional[int] = 1000, chunk_overlap: Optional[int] = 200, - ) -> list[Document]: + ) -> list[Record]: """ Split text into chunks of a specified length. @@ -63,7 +65,7 @@ class RecursiveCharacterTextSplitterComponent(CustomComponent): elif separators: # check if the separators list has escaped characters # if there are escaped characters, unescape them - separators = [x.encode().decode("unicode-escape") for x in separators] + separators = [unescape_string(x) for x in separators] # Make sure chunk_size and chunk_overlap are ints if isinstance(chunk_size, str): @@ -75,7 +77,13 @@ class RecursiveCharacterTextSplitterComponent(CustomComponent): chunk_size=chunk_size, chunk_overlap=chunk_overlap, ) - + documents = [] + for _input in inputs: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) docs = splitter.split_documents(documents) - self.repr_value = build_loader_repr_from_documents(docs) - return docs + records = self.to_records(docs) + self.repr_value = build_loader_repr_from_records(records) + return records diff --git a/src/backend/base/langflow/components/textsplitters/__init__.py b/src/backend/base/langflow/components/textsplitters/__init__.py new file mode 100644 index 000000000..eb2d6af13 --- /dev/null +++ b/src/backend/base/langflow/components/textsplitters/__init__.py @@ -0,0 +1,9 @@ +from .CharacterTextSplitter import CharacterTextSplitterComponent +from .LanguageRecursiveTextSplitter import LanguageRecursiveTextSplitterComponent +from .RecursiveCharacterTextSplitter import RecursiveCharacterTextSplitterComponent + +__all__ = [ + "CharacterTextSplitterComponent", + "LanguageRecursiveTextSplitterComponent", + "RecursiveCharacterTextSplitterComponent", +] diff --git a/src/backend/langflow/components/toolkits/JsonToolkit.py b/src/backend/base/langflow/components/toolkits/JsonToolkit.py similarity index 87% rename from src/backend/langflow/components/toolkits/JsonToolkit.py rename to src/backend/base/langflow/components/toolkits/JsonToolkit.py index ec9e49621..72fe17cde 100644 --- a/src/backend/langflow/components/toolkits/JsonToolkit.py +++ b/src/backend/base/langflow/components/toolkits/JsonToolkit.py @@ -1,6 +1,7 @@ -from langflow import CustomComponent -from langchain_community.tools.json.tool import JsonSpec from langchain_community.agent_toolkits.json.toolkit import JsonToolkit +from langchain_community.tools.json.tool import JsonSpec + +from langflow.interface.custom.custom_component import CustomComponent class JsonToolkitComponent(CustomComponent): diff --git a/src/backend/langflow/components/toolkits/Metaphor.py b/src/backend/base/langflow/components/toolkits/Metaphor.py similarity index 96% rename from src/backend/langflow/components/toolkits/Metaphor.py rename to src/backend/base/langflow/components/toolkits/Metaphor.py index 0f9f23334..14962924f 100644 --- a/src/backend/langflow/components/toolkits/Metaphor.py +++ b/src/backend/base/langflow/components/toolkits/Metaphor.py @@ -5,7 +5,7 @@ from langchain.agents.agent_toolkits.base import BaseToolkit from langchain.tools import Tool from metaphor_python import Metaphor # type: ignore -from langflow import CustomComponent +from langflow.interface.custom.custom_component import CustomComponent class MetaphorToolkit(CustomComponent): diff --git a/src/backend/langflow/components/toolkits/OpenAPIToolkit.py b/src/backend/base/langflow/components/toolkits/OpenAPIToolkit.py similarity index 91% rename from src/backend/langflow/components/toolkits/OpenAPIToolkit.py rename to src/backend/base/langflow/components/toolkits/OpenAPIToolkit.py index fc8780454..b29feb291 100644 --- a/src/backend/langflow/components/toolkits/OpenAPIToolkit.py +++ b/src/backend/base/langflow/components/toolkits/OpenAPIToolkit.py @@ -1,7 +1,8 @@ from langchain_community.agent_toolkits.openapi.toolkit import BaseToolkit, OpenAPIToolkit from langchain_community.utilities.requests import TextRequestsWrapper -from langflow import CustomComponent + from langflow.field_typing import AgentExecutor +from langflow.interface.custom.custom_component import CustomComponent class OpenAPIToolkitComponent(CustomComponent): diff --git a/src/backend/langflow/components/toolkits/VectorStoreInfo.py b/src/backend/base/langflow/components/toolkits/VectorStoreInfo.py similarity index 91% rename from src/backend/langflow/components/toolkits/VectorStoreInfo.py rename to src/backend/base/langflow/components/toolkits/VectorStoreInfo.py index ec2323bfd..1775d816a 100644 --- a/src/backend/langflow/components/toolkits/VectorStoreInfo.py +++ b/src/backend/base/langflow/components/toolkits/VectorStoreInfo.py @@ -3,7 +3,7 @@ from typing import Callable, Union from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo from langchain_community.vectorstores import VectorStore -from langflow import CustomComponent +from langflow.interface.custom.custom_component import CustomComponent class VectorStoreInfoComponent(CustomComponent): diff --git a/src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py b/src/backend/base/langflow/components/toolkits/VectorStoreRouterToolkit.py similarity index 84% rename from src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py rename to src/backend/base/langflow/components/toolkits/VectorStoreRouterToolkit.py index ed8797044..13ec1656f 100644 --- a/src/backend/langflow/components/toolkits/VectorStoreRouterToolkit.py +++ b/src/backend/base/langflow/components/toolkits/VectorStoreRouterToolkit.py @@ -1,8 +1,9 @@ -from langflow import CustomComponent from typing import List, Union -from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreRouterToolkit -from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo + +from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo, VectorStoreRouterToolkit + from langflow.field_typing import BaseLanguageModel, Tool +from langflow.interface.custom.custom_component import CustomComponent class VectorStoreRouterToolkitComponent(CustomComponent): diff --git a/src/backend/langflow/components/toolkits/VectorStoreToolkit.py b/src/backend/base/langflow/components/toolkits/VectorStoreToolkit.py similarity index 72% rename from src/backend/langflow/components/toolkits/VectorStoreToolkit.py rename to src/backend/base/langflow/components/toolkits/VectorStoreToolkit.py index 38b9c9171..8436ba58e 100644 --- a/src/backend/langflow/components/toolkits/VectorStoreToolkit.py +++ b/src/backend/base/langflow/components/toolkits/VectorStoreToolkit.py @@ -1,14 +1,10 @@ -from langflow import CustomComponent -from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreToolkit -from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo -from langflow.field_typing import ( - BaseLanguageModel, -) -from langflow.field_typing import ( - Tool, -) from typing import Union +from langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo, VectorStoreToolkit + +from langflow.field_typing import BaseLanguageModel, Tool +from langflow.interface.custom.custom_component import CustomComponent + class VectorStoreToolkitComponent(CustomComponent): display_name = "VectorStoreToolkit" diff --git a/src/backend/base/langflow/components/toolkits/__init__.py b/src/backend/base/langflow/components/toolkits/__init__.py new file mode 100644 index 000000000..189b43159 --- /dev/null +++ b/src/backend/base/langflow/components/toolkits/__init__.py @@ -0,0 +1,15 @@ +from .JsonToolkit import JsonToolkitComponent +from .Metaphor import MetaphorToolkit +from .OpenAPIToolkit import OpenAPIToolkitComponent +from .VectorStoreInfo import VectorStoreInfoComponent +from .VectorStoreRouterToolkit import VectorStoreRouterToolkitComponent +from .VectorStoreToolkit import VectorStoreToolkitComponent + +__all__ = [ + "JsonToolkitComponent", + "MetaphorToolkit", + "OpenAPIToolkitComponent", + "VectorStoreInfoComponent", + "VectorStoreRouterToolkitComponent", + "VectorStoreToolkitComponent", +] diff --git a/src/backend/langflow/components/tools/RetrieverTool.py b/src/backend/base/langflow/components/tools/RetrieverTool.py similarity index 92% rename from src/backend/langflow/components/tools/RetrieverTool.py rename to src/backend/base/langflow/components/tools/RetrieverTool.py index 5a598dbe2..914ba3941 100644 --- a/src/backend/langflow/components/tools/RetrieverTool.py +++ b/src/backend/base/langflow/components/tools/RetrieverTool.py @@ -1,7 +1,7 @@ from langchain.tools.retriever import create_retriever_tool -from langflow import CustomComponent from langflow.field_typing import BaseRetriever, Tool +from langflow.interface.custom.custom_component import CustomComponent class RetrieverToolComponent(CustomComponent): diff --git a/src/backend/base/langflow/components/tools/SearchAPITool.py b/src/backend/base/langflow/components/tools/SearchAPITool.py new file mode 100644 index 000000000..e0658c8c8 --- /dev/null +++ b/src/backend/base/langflow/components/tools/SearchAPITool.py @@ -0,0 +1,37 @@ +from langchain_community.tools.searchapi import SearchAPIRun +from langchain_community.utilities.searchapi import SearchApiAPIWrapper + +from langflow.custom import CustomComponent +from langflow.field_typing import Tool + + +class SearchApiToolComponent(CustomComponent): + display_name: str = "SearchApi Tool" + description: str = "Real-time search engine results API." + documentation: str = "https://www.searchapi.io/docs/google" + field_config = { + "engine": { + "display_name": "Engine", + "field_type": "str", + "info": "The search engine to use.", + }, + "api_key": { + "display_name": "API Key", + "field_type": "str", + "required": True, + "password": True, + "info": "The API key to use SearchApi.", + }, + } + + def build( + self, + engine: str, + api_key: str, + ) -> Tool: + search_api_wrapper = SearchApiAPIWrapper(engine=engine, searchapi_api_key=api_key) + + tool = SearchAPIRun(api_wrapper=search_api_wrapper) + + self.status = tool + return tool # type: ignore diff --git a/src/backend/base/langflow/components/tools/SearchApi.py b/src/backend/base/langflow/components/tools/SearchApi.py new file mode 100644 index 000000000..3dcd48d9f --- /dev/null +++ b/src/backend/base/langflow/components/tools/SearchApi.py @@ -0,0 +1,53 @@ +from typing import Optional + +from langchain_community.utilities.searchapi import SearchApiAPIWrapper + +from langflow.custom import CustomComponent +from langflow.schema.schema import Record +from langflow.services.database.models.base import orjson_dumps + + +class SearchApi(CustomComponent): + display_name: str = "SearchApi" + description: str = "Real-time search engine results API." + output_types: list[str] = ["Document"] + documentation: str = "https://www.searchapi.io/docs/google" + field_config = { + "engine": { + "display_name": "Engine", + "field_type": "str", + "info": "The search engine to use.", + }, + "params": { + "display_name": "Parameters", + "info": "The parameters to send with the request.", + }, + "code": {"show": False}, + "api_key": { + "display_name": "API Key", + "field_type": "str", + "required": True, + "password": True, + "info": "The API key to use SearchApi.", + }, + } + + def build( + self, + engine: str, + api_key: str, + params: Optional[dict] = None, + ) -> Record: + if params is None: + params = {} + + search_api_wrapper = SearchApiAPIWrapper(engine=engine, searchapi_api_key=api_key) + + q = params.pop("q", "SearchApi Langflow") + results = search_api_wrapper.results(q, **params) + + result = orjson_dumps(results, indent_2=False) + + record = Record(data=result) + self.status = record + return record diff --git a/src/backend/base/langflow/components/tools/__init__.py b/src/backend/base/langflow/components/tools/__init__.py new file mode 100644 index 000000000..27072c109 --- /dev/null +++ b/src/backend/base/langflow/components/tools/__init__.py @@ -0,0 +1,5 @@ +from .RetrieverTool import RetrieverToolComponent +from .SearchAPITool import SearchApiToolComponent +from .SearchApi import SearchApi + +__all__ = ["RetrieverToolComponent", "SearchApiToolComponent", "SearchApi"] diff --git a/src/backend/base/langflow/components/vectorsearch/AstraDBSearch.py b/src/backend/base/langflow/components/vectorsearch/AstraDBSearch.py new file mode 100644 index 000000000..83ed42daf --- /dev/null +++ b/src/backend/base/langflow/components/vectorsearch/AstraDBSearch.py @@ -0,0 +1,148 @@ +from typing import List, Optional + +from langflow.components.vectorstores.AstraDB import AstraDBVectorStoreComponent +from langflow.components.vectorstores.base.model import LCVectorStoreComponent +from langflow.field_typing import Embeddings, Text +from langflow.schema import Record + + +class AstraDBSearchComponent(LCVectorStoreComponent): + display_name = "Astra DB Search" + description = "Searches an existing Astra DB Vector Store." + icon = "AstraDB" + field_order = ["token", "api_endpoint", "collection_name", "input_value", "embedding"] + + def build_config(self): + return { + "search_type": { + "display_name": "Search Type", + "options": ["Similarity", "MMR"], + }, + "input_value": { + "display_name": "Input Value", + "info": "Input value to search", + }, + "embedding": {"display_name": "Embedding", "info": "Embedding to use"}, + "collection_name": { + "display_name": "Collection Name", + "info": "The name of the collection within Astra DB where the vectors will be stored.", + }, + "token": { + "display_name": "Token", + "info": "Authentication token for accessing Astra DB.", + "password": True, + }, + "api_endpoint": { + "display_name": "API Endpoint", + "info": "API endpoint URL for the Astra DB service.", + }, + "namespace": { + "display_name": "Namespace", + "info": "Optional namespace within Astra DB to use for the collection.", + "advanced": True, + }, + "metric": { + "display_name": "Metric", + "info": "Optional distance metric for vector comparisons in the vector store.", + "advanced": True, + }, + "batch_size": { + "display_name": "Batch Size", + "info": "Optional number of records to process in a single batch.", + "advanced": True, + }, + "bulk_insert_batch_concurrency": { + "display_name": "Bulk Insert Batch Concurrency", + "info": "Optional concurrency level for bulk insert operations.", + "advanced": True, + }, + "bulk_insert_overwrite_concurrency": { + "display_name": "Bulk Insert Overwrite Concurrency", + "info": "Optional concurrency level for bulk insert operations that overwrite existing records.", + "advanced": True, + }, + "bulk_delete_concurrency": { + "display_name": "Bulk Delete Concurrency", + "info": "Optional concurrency level for bulk delete operations.", + "advanced": True, + }, + "setup_mode": { + "display_name": "Setup Mode", + "info": "Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.", + "options": ["Sync", "Async", "Off"], + "advanced": True, + }, + "pre_delete_collection": { + "display_name": "Pre Delete Collection", + "info": "Boolean flag to determine whether to delete the collection before creating a new one.", + "advanced": True, + }, + "metadata_indexing_include": { + "display_name": "Metadata Indexing Include", + "info": "Optional list of metadata fields to include in the indexing.", + "advanced": True, + }, + "metadata_indexing_exclude": { + "display_name": "Metadata Indexing Exclude", + "info": "Optional list of metadata fields to exclude from the indexing.", + "advanced": True, + }, + "collection_indexing_policy": { + "display_name": "Collection Indexing Policy", + "info": "Optional dictionary defining the indexing policy for the collection.", + "advanced": True, + }, + "number_of_results": { + "display_name": "Number of Results", + "info": "Number of results to return.", + "advanced": True, + }, + } + + def build( + self, + embedding: Embeddings, + collection_name: str, + input_value: Text, + token: str, + api_endpoint: str, + search_type: str = "Similarity", + number_of_results: int = 4, + namespace: Optional[str] = None, + metric: Optional[str] = None, + batch_size: Optional[int] = None, + bulk_insert_batch_concurrency: Optional[int] = None, + bulk_insert_overwrite_concurrency: Optional[int] = None, + bulk_delete_concurrency: Optional[int] = None, + setup_mode: str = "Sync", + pre_delete_collection: bool = False, + metadata_indexing_include: Optional[List[str]] = None, + metadata_indexing_exclude: Optional[List[str]] = None, + collection_indexing_policy: Optional[dict] = None, + ) -> List[Record]: + vector_store = AstraDBVectorStoreComponent().build( + embedding=embedding, + collection_name=collection_name, + token=token, + api_endpoint=api_endpoint, + namespace=namespace, + metric=metric, + batch_size=batch_size, + bulk_insert_batch_concurrency=bulk_insert_batch_concurrency, + bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency, + bulk_delete_concurrency=bulk_delete_concurrency, + setup_mode=setup_mode, + pre_delete_collection=pre_delete_collection, + metadata_indexing_include=metadata_indexing_include, + metadata_indexing_exclude=metadata_indexing_exclude, + collection_indexing_policy=collection_indexing_policy, + ) + try: + return self.search_with_vector_store(input_value, search_type, vector_store, k=number_of_results) + except KeyError as e: + if "content" in str(e): + raise ValueError( + "You should ingest data through Langflow (or LangChain) to query it in Langflow. Your collection does not contain a field name 'content'." + ) + else: + raise e diff --git a/src/backend/base/langflow/components/vectorsearch/ChromaSearch.py b/src/backend/base/langflow/components/vectorsearch/ChromaSearch.py new file mode 100644 index 000000000..3e4b7880d --- /dev/null +++ b/src/backend/base/langflow/components/vectorsearch/ChromaSearch.py @@ -0,0 +1,111 @@ +from typing import List, Optional + +import chromadb # type: ignore +from langchain_community.vectorstores.chroma import Chroma + +from langflow.components.vectorstores.base.model import LCVectorStoreComponent +from langflow.field_typing import Embeddings, Text +from langflow.schema import Record + + +class ChromaSearchComponent(LCVectorStoreComponent): + display_name: str = "Chroma Search" + description: str = "Search a Chroma collection for similar documents." + icon = "Chroma" + + def build_config(self): + """ + Builds the configuration for the component. + + Returns: + - dict: A dictionary containing the configuration options for the component. + """ + return { + "input_value": {"display_name": "Input"}, + "search_type": { + "display_name": "Search Type", + "options": ["Similarity", "MMR"], + }, + "collection_name": {"display_name": "Collection Name", "value": "langflow"}, + # "persist": {"display_name": "Persist"}, + "index_directory": {"display_name": "Index Directory"}, + "code": {"show": False, "display_name": "Code"}, + "embedding": { + "display_name": "Embedding", + "info": "Embedding model to vectorize inputs (make sure to use same as index)", + }, + "chroma_server_cors_allow_origins": { + "display_name": "Server CORS Allow Origins", + "advanced": True, + }, + "chroma_server_host": {"display_name": "Server Host", "advanced": True}, + "chroma_server_port": {"display_name": "Server Port", "advanced": True}, + "chroma_server_grpc_port": { + "display_name": "Server gRPC Port", + "advanced": True, + }, + "chroma_server_ssl_enabled": { + "display_name": "Server SSL Enabled", + "advanced": True, + }, + "number_of_results": { + "display_name": "Number of Results", + "info": "Number of results to return.", + "advanced": True, + }, + } + + def build( + self, + input_value: Text, + search_type: str, + collection_name: str, + embedding: Embeddings, + chroma_server_ssl_enabled: bool, + number_of_results: int = 4, + index_directory: Optional[str] = None, + chroma_server_cors_allow_origins: Optional[str] = None, + chroma_server_host: Optional[str] = None, + chroma_server_port: Optional[int] = None, + chroma_server_grpc_port: Optional[int] = None, + ) -> List[Record]: + """ + Builds the Vector Store or BaseRetriever object. + + Args: + - collection_name (str): The name of the collection. + - persist_directory (Optional[str]): The directory to persist the Vector Store to. + - chroma_server_ssl_enabled (bool): Whether to enable SSL for the Chroma server. + - persist (bool): Whether to persist the Vector Store or not. + - embedding (Optional[Embeddings]): The embeddings to use for the Vector Store. + - documents (Optional[Document]): The documents to use for the Vector Store. + - chroma_server_cors_allow_origins (Optional[str]): The CORS allow origins for the Chroma server. + - chroma_server_host (Optional[str]): The host for the Chroma server. + - chroma_server_port (Optional[int]): The port for the Chroma server. + - chroma_server_grpc_port (Optional[int]): The gRPC port for the Chroma server. + + Returns: + - Union[VectorStore, BaseRetriever]: The Vector Store or BaseRetriever object. + """ + + # Chroma settings + chroma_settings = None + + if chroma_server_host is not None: + chroma_settings = chromadb.config.Settings( + chroma_server_cors_allow_origins=chroma_server_cors_allow_origins or None, + chroma_server_host=chroma_server_host, + chroma_server_port=chroma_server_port or None, + chroma_server_grpc_port=chroma_server_grpc_port or None, + chroma_server_ssl_enabled=chroma_server_ssl_enabled, + ) + if index_directory: + index_directory = self.resolve_path(index_directory) + vector_store = Chroma( + embedding_function=embedding, + collection_name=collection_name, + persist_directory=index_directory, + client_settings=chroma_settings, + ) + + return self.search_with_vector_store(input_value, search_type, vector_store, k=number_of_results) diff --git a/src/backend/base/langflow/components/vectorsearch/FAISSSearch.py b/src/backend/base/langflow/components/vectorsearch/FAISSSearch.py new file mode 100644 index 000000000..d68f455cc --- /dev/null +++ b/src/backend/base/langflow/components/vectorsearch/FAISSSearch.py @@ -0,0 +1,48 @@ +from typing import List + +from langchain_community.vectorstores.faiss import FAISS + +from langflow.components.vectorstores.base.model import LCVectorStoreComponent +from langflow.field_typing import Embeddings, Text +from langflow.schema import Record + + +class FAISSSearchComponent(LCVectorStoreComponent): + display_name = "FAISS Search" + description = "Search a FAISS Vector Store for similar documents." + documentation = "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/faiss" + + def build_config(self): + return { + "embedding": {"display_name": "Embedding"}, + "folder_path": { + "display_name": "Folder Path", + "info": "Path to save the FAISS index. It will be relative to where Langflow is running.", + }, + "input_value": {"display_name": "Input"}, + "index_name": {"display_name": "Index Name"}, + "number_of_results": { + "display_name": "Number of Results", + "info": "Number of results to return.", + "advanced": True, + }, + } + + def build( + self, + input_value: Text, + embedding: Embeddings, + folder_path: str, + number_of_results: int = 4, + index_name: str = "langflow_index", + ) -> List[Record]: + if not folder_path: + raise ValueError("Folder path is required to save the FAISS index.") + path = self.resolve_path(folder_path) + vector_store = FAISS.load_local(folder_path=Text(path), embeddings=embedding, index_name=index_name) + if not vector_store: + raise ValueError("Failed to load the FAISS index.") + + return self.search_with_vector_store( + vector_store=vector_store, input_value=input_value, search_type="similarity", k=number_of_results + ) diff --git a/src/backend/base/langflow/components/vectorsearch/MongoDBAtlasVectorSearch.py b/src/backend/base/langflow/components/vectorsearch/MongoDBAtlasVectorSearch.py new file mode 100644 index 000000000..0ecde1688 --- /dev/null +++ b/src/backend/base/langflow/components/vectorsearch/MongoDBAtlasVectorSearch.py @@ -0,0 +1,57 @@ +from typing import List, Optional + +from langflow.components.vectorstores.base.model import LCVectorStoreComponent +from langflow.components.vectorstores.MongoDBAtlasVector import MongoDBAtlasComponent +from langflow.field_typing import Embeddings, NestedDict, Text +from langflow.schema import Record + + +class MongoDBAtlasSearchComponent(LCVectorStoreComponent): + display_name = "MongoDB Atlas Search" + description = "Search a MongoDB Atlas Vector Store for similar documents." + + def build_config(self): + return { + "search_type": { + "display_name": "Search Type", + "options": ["Similarity", "MMR"], + }, + "input_value": {"display_name": "Input"}, + "embedding": {"display_name": "Embedding"}, + "collection_name": {"display_name": "Collection Name"}, + "db_name": {"display_name": "Database Name"}, + "index_name": {"display_name": "Index Name"}, + "mongodb_atlas_cluster_uri": {"display_name": "MongoDB Atlas Cluster URI"}, + "search_kwargs": {"display_name": "Search Kwargs", "advanced": True}, + "number_of_results": { + "display_name": "Number of Results", + "info": "Number of results to return.", + "advanced": True, + }, + } + + def build( # type: ignore[override] + self, + input_value: Text, + search_type: str, + embedding: Embeddings, + number_of_results: int = 4, + collection_name: str = "", + db_name: str = "", + index_name: str = "", + mongodb_atlas_cluster_uri: str = "", + search_kwargs: Optional[NestedDict] = None, + ) -> List[Record]: + search_kwargs = search_kwargs or {} + vector_store = MongoDBAtlasComponent().build( + mongodb_atlas_cluster_uri=mongodb_atlas_cluster_uri, + collection_name=collection_name, + db_name=db_name, + embedding=embedding, + index_name=index_name, + ) + if not vector_store: + raise ValueError("Failed to create MongoDB Atlas Vector Store") + return self.search_with_vector_store( + vector_store=vector_store, input_value=input_value, search_type=search_type, k=number_of_results + ) diff --git a/src/backend/base/langflow/components/vectorsearch/PineconeSearch.py b/src/backend/base/langflow/components/vectorsearch/PineconeSearch.py new file mode 100644 index 000000000..96ba35743 --- /dev/null +++ b/src/backend/base/langflow/components/vectorsearch/PineconeSearch.py @@ -0,0 +1,76 @@ +from typing import List, Optional + +from langflow.components.vectorstores.base.model import LCVectorStoreComponent +from langflow.components.vectorstores.Pinecone import PineconeComponent +from langflow.field_typing import Embeddings, Text +from langflow.schema import Record + + +class PineconeSearchComponent(PineconeComponent, LCVectorStoreComponent): + display_name = "Pinecone Search" + description = "Search a Pinecone Vector Store for similar documents." + icon = "Pinecone" + + def build_config(self): + return { + "search_type": { + "display_name": "Search Type", + "options": ["Similarity", "MMR"], + }, + "input_value": {"display_name": "Input"}, + "embedding": {"display_name": "Embedding"}, + "index_name": {"display_name": "Index Name"}, + "namespace": {"display_name": "Namespace"}, + "pinecone_api_key": { + "display_name": "Pinecone API Key", + "default": "", + "password": True, + "required": True, + }, + "pinecone_env": { + "display_name": "Pinecone Environment", + "default": "", + "required": True, + }, + "search_kwargs": {"display_name": "Search Kwargs", "default": "{}"}, + "pool_threads": { + "display_name": "Pool Threads", + "default": 1, + "advanced": True, + }, + "number_of_results": { + "display_name": "Number of Results", + "info": "Number of results to return.", + "advanced": True, + }, + } + + def build( # type: ignore[override] + self, + input_value: Text, + embedding: Embeddings, + pinecone_env: str, + text_key: str = "text", + number_of_results: int = 4, + pool_threads: int = 4, + index_name: Optional[str] = None, + pinecone_api_key: Optional[str] = None, + namespace: Optional[str] = "default", + search_type: str = "similarity", + ) -> List[Record]: # type: ignore[override] + vector_store = super().build( + embedding=embedding, + pinecone_env=pinecone_env, + inputs=[], + text_key=text_key, + pool_threads=pool_threads, + index_name=index_name, + pinecone_api_key=pinecone_api_key, + namespace=namespace, + ) + if not vector_store: + raise ValueError("Failed to load the Pinecone index.") + + return self.search_with_vector_store( + vector_store=vector_store, input_value=input_value, search_type=search_type, k=number_of_results + ) diff --git a/src/backend/base/langflow/components/vectorsearch/QdrantSearch.py b/src/backend/base/langflow/components/vectorsearch/QdrantSearch.py new file mode 100644 index 000000000..8fcc5d761 --- /dev/null +++ b/src/backend/base/langflow/components/vectorsearch/QdrantSearch.py @@ -0,0 +1,98 @@ +from typing import List, Optional + +from langflow.components.vectorstores.base.model import LCVectorStoreComponent +from langflow.components.vectorstores.Qdrant import QdrantComponent +from langflow.field_typing import Embeddings, NestedDict, Text +from langflow.schema import Record + + +class QdrantSearchComponent(QdrantComponent, LCVectorStoreComponent): + display_name = "Qdrant Search" + description = "Construct Qdrant wrapper from a list of texts." + icon = "Qdrant" + + def build_config(self): + return { + "search_type": { + "display_name": "Search Type", + "options": ["Similarity", "MMR"], + }, + "input_value": {"display_name": "Input"}, + "embedding": {"display_name": "Embedding"}, + "api_key": {"display_name": "API Key", "password": True, "advanced": True}, + "collection_name": {"display_name": "Collection Name"}, + "content_payload_key": { + "display_name": "Content Payload Key", + "advanced": True, + }, + "distance_func": {"display_name": "Distance Function", "advanced": True}, + "grpc_port": {"display_name": "gRPC Port", "advanced": True}, + "host": {"display_name": "Host", "advanced": True}, + "https": {"display_name": "HTTPS", "advanced": True}, + "location": {"display_name": "Location", "advanced": True}, + "metadata_payload_key": { + "display_name": "Metadata Payload Key", + "advanced": True, + }, + "path": {"display_name": "Path", "advanced": True}, + "port": {"display_name": "Port", "advanced": True}, + "prefer_grpc": {"display_name": "Prefer gRPC", "advanced": True}, + "prefix": {"display_name": "Prefix", "advanced": True}, + "search_kwargs": {"display_name": "Search Kwargs", "advanced": True}, + "timeout": {"display_name": "Timeout", "advanced": True}, + "url": {"display_name": "URL", "advanced": True}, + "number_of_results": { + "display_name": "Number of Results", + "info": "Number of results to return.", + "advanced": True, + }, + } + + def build( # type: ignore[override] + self, + input_value: Text, + embedding: Embeddings, + collection_name: str, + number_of_results: int = 4, + search_type: str = "similarity", + api_key: Optional[str] = None, + content_payload_key: str = "page_content", + distance_func: str = "Cosine", + grpc_port: int = 6334, + https: bool = False, + host: Optional[str] = None, + location: Optional[str] = None, + metadata_payload_key: str = "metadata", + path: Optional[str] = None, + port: Optional[int] = 6333, + prefer_grpc: bool = False, + prefix: Optional[str] = None, + search_kwargs: Optional[NestedDict] = None, + timeout: Optional[int] = None, + url: Optional[str] = None, + ) -> List[Record]: # type: ignore[override] + vector_store = super().build( + embedding=embedding, + collection_name=collection_name, + api_key=api_key, + content_payload_key=content_payload_key, + distance_func=distance_func, + grpc_port=grpc_port, + https=https, + host=host, + location=location, + metadata_payload_key=metadata_payload_key, + path=path, + port=port, + prefer_grpc=prefer_grpc, + prefix=prefix, + search_kwargs=search_kwargs, + timeout=timeout, + url=url, + ) + if not vector_store: + raise ValueError("Failed to load the Qdrant index.") + + return self.search_with_vector_store( + vector_store=vector_store, input_value=input_value, search_type=search_type, k=number_of_results + ) diff --git a/src/backend/base/langflow/components/vectorsearch/RedisSearch.py b/src/backend/base/langflow/components/vectorsearch/RedisSearch.py new file mode 100644 index 000000000..25e71c64b --- /dev/null +++ b/src/backend/base/langflow/components/vectorsearch/RedisSearch.py @@ -0,0 +1,82 @@ +from typing import List, Optional + +from langchain.embeddings.base import Embeddings + +from langflow.components.vectorstores.base.model import LCVectorStoreComponent +from langflow.components.vectorstores.Redis import RedisComponent +from langflow.field_typing import Text +from langflow.schema import Record + + +class RedisSearchComponent(RedisComponent, LCVectorStoreComponent): + """ + A custom component for implementing a Vector Store using Redis. + """ + + display_name: str = "Redis Search" + description: str = "Search a Redis Vector Store for similar documents." + documentation = "https://python.langchain.com/docs/integrations/vectorstores/redis" + + def build_config(self): + """ + Builds the configuration for the component. + + Returns: + - dict: A dictionary containing the configuration options for the component. + """ + return { + "search_type": { + "display_name": "Search Type", + "options": ["Similarity", "MMR"], + }, + "input_value": {"display_name": "Input"}, + "index_name": {"display_name": "Index Name", "value": "your_index"}, + "code": {"show": False, "display_name": "Code"}, + "embedding": {"display_name": "Embedding"}, + "schema": {"display_name": "Schema", "file_types": [".yaml"]}, + "redis_server_url": { + "display_name": "Redis Server Connection String", + "advanced": False, + }, + "redis_index_name": {"display_name": "Redis Index", "advanced": False}, + "number_of_results": { + "display_name": "Number of Results", + "info": "Number of results to return.", + "advanced": True, + }, + } + + def build( # type: ignore[override] + self, + input_value: Text, + search_type: str, + embedding: Embeddings, + redis_server_url: str, + redis_index_name: str, + number_of_results: int = 4, + schema: Optional[str] = None, + ) -> List[Record]: + """ + Builds the Vector Store or BaseRetriever object. + + Args: + - embedding (Embeddings): The embeddings to use for the Vector Store. + - documents (Optional[Document]): The documents to use for the Vector Store. + - redis_index_name (str): The name of the Redis index. + - redis_server_url (str): The URL for the Redis server. + + Returns: + - VectorStore: The Vector Store object. + """ + vector_store = super().build( + embedding=embedding, + redis_server_url=redis_server_url, + redis_index_name=redis_index_name, + schema=schema, + ) + if not vector_store: + raise ValueError("Failed to load the Redis index.") + + return self.search_with_vector_store( + input_value=input_value, search_type=search_type, vector_store=vector_store, k=number_of_results + ) diff --git a/src/backend/base/langflow/components/vectorsearch/SupabaseVectorStoreSearch.py b/src/backend/base/langflow/components/vectorsearch/SupabaseVectorStoreSearch.py new file mode 100644 index 000000000..aef1c13b7 --- /dev/null +++ b/src/backend/base/langflow/components/vectorsearch/SupabaseVectorStoreSearch.py @@ -0,0 +1,54 @@ +from typing import List + +from langchain_community.vectorstores.supabase import SupabaseVectorStore +from supabase.client import Client, create_client + +from langflow.components.vectorstores.base.model import LCVectorStoreComponent +from langflow.field_typing import Embeddings, Text +from langflow.schema import Record + + +class SupabaseSearchComponent(LCVectorStoreComponent): + display_name = "Supabase Search" + description = "Search a Supabase Vector Store for similar documents." + icon = "Supabase" + + def build_config(self): + return { + "search_type": { + "display_name": "Search Type", + "options": ["Similarity", "MMR"], + }, + "input_value": {"display_name": "Input"}, + "embedding": {"display_name": "Embedding"}, + "query_name": {"display_name": "Query Name"}, + "search_kwargs": {"display_name": "Search Kwargs", "advanced": True}, + "supabase_service_key": {"display_name": "Supabase Service Key"}, + "supabase_url": {"display_name": "Supabase URL"}, + "table_name": {"display_name": "Table Name", "advanced": True}, + "number_of_results": { + "display_name": "Number of Results", + "info": "Number of results to return.", + "advanced": True, + }, + } + + def build( + self, + input_value: Text, + search_type: str, + embedding: Embeddings, + number_of_results: int = 4, + query_name: str = "", + supabase_service_key: str = "", + supabase_url: str = "", + table_name: str = "", + ) -> List[Record]: + supabase: Client = create_client(supabase_url, supabase_key=supabase_service_key) + vector_store = SupabaseVectorStore( + client=supabase, + embedding=embedding, + table_name=table_name, + query_name=query_name, + ) + return self.search_with_vector_store(input_value, search_type, vector_store, k=number_of_results) diff --git a/src/backend/base/langflow/components/vectorsearch/VectaraSearch.py b/src/backend/base/langflow/components/vectorsearch/VectaraSearch.py new file mode 100644 index 000000000..459054f67 --- /dev/null +++ b/src/backend/base/langflow/components/vectorsearch/VectaraSearch.py @@ -0,0 +1,66 @@ +from typing import List + +from langchain_community.vectorstores.vectara import Vectara + +from langflow.components.vectorstores.base.model import LCVectorStoreComponent +from langflow.components.vectorstores.Vectara import VectaraComponent +from langflow.field_typing import Text +from langflow.schema import Record + + +class VectaraSearchComponent(VectaraComponent, LCVectorStoreComponent): + display_name: str = "Vectara Search" + description: str = "Search a Vectara Vector Store for similar documents." + documentation = "https://python.langchain.com/docs/integrations/vectorstores/vectara" + icon = "Vectara" + + field_config = { + "search_type": { + "display_name": "Search Type", + "options": ["Similarity", "MMR"], + }, + "input_value": {"display_name": "Input"}, + "vectara_customer_id": { + "display_name": "Vectara Customer ID", + }, + "vectara_corpus_id": { + "display_name": "Vectara Corpus ID", + }, + "vectara_api_key": { + "display_name": "Vectara API Key", + "password": True, + }, + "files_url": { + "display_name": "Files Url", + "info": "Make vectara object using url of files (optional)", + }, + "number_of_results": { + "display_name": "Number of Results", + "info": "Number of results to return.", + "advanced": True, + }, + } + + def build( # type: ignore[override] + self, + input_value: Text, + search_type: str, + vectara_customer_id: str, + vectara_corpus_id: str, + vectara_api_key: str, + number_of_results: int = 4, + ) -> List[Record]: + source = "Langflow" + vector_store = Vectara( + vectara_customer_id=vectara_customer_id, + vectara_corpus_id=vectara_corpus_id, + vectara_api_key=vectara_api_key, + source=source, + ) + + if not vector_store: + raise ValueError("Failed to create Vectara Vector Store") + + return self.search_with_vector_store( + vector_store=vector_store, input_value=input_value, search_type=search_type, k=number_of_results + ) diff --git a/src/backend/base/langflow/components/vectorsearch/WeaviateSearch.py b/src/backend/base/langflow/components/vectorsearch/WeaviateSearch.py new file mode 100644 index 000000000..f47538f2e --- /dev/null +++ b/src/backend/base/langflow/components/vectorsearch/WeaviateSearch.py @@ -0,0 +1,86 @@ +from typing import List, Optional + +from langchain.embeddings.base import Embeddings + +from langflow.components.vectorstores.base.model import LCVectorStoreComponent +from langflow.components.vectorstores.Weaviate import WeaviateVectorStoreComponent +from langflow.field_typing import Text +from langflow.schema import Record + + +class WeaviateSearchVectorStore(WeaviateVectorStoreComponent, LCVectorStoreComponent): + display_name: str = "Weaviate Search" + description: str = "Search a Weaviate Vector Store for similar documents." + documentation = "https://python.langchain.com/docs/integrations/vectorstores/weaviate" + icon = "Weaviate" + + field_config = { + "search_type": { + "display_name": "Search Type", + "options": ["Similarity", "MMR"], + }, + "input_value": {"display_name": "Input"}, + "url": {"display_name": "Weaviate URL", "value": "http://localhost:8080"}, + "api_key": { + "display_name": "API Key", + "password": True, + "required": False, + }, + "index_name": { + "display_name": "Index name", + "required": False, + }, + "text_key": { + "display_name": "Text Key", + "required": False, + "advanced": True, + "value": "text", + }, + "embedding": {"display_name": "Embedding"}, + "attributes": { + "display_name": "Attributes", + "required": False, + "is_list": True, + "field_type": "str", + "advanced": True, + }, + "search_by_text": { + "display_name": "Search By Text", + "field_type": "bool", + "advanced": True, + }, + "number_of_results": { + "display_name": "Number of Results", + "info": "Number of results to return.", + "advanced": True, + }, + } + + def build( # type: ignore[override] + self, + input_value: Text, + search_type: str, + url: str, + number_of_results: int = 4, + search_by_text: bool = False, + api_key: Optional[str] = None, + index_name: Optional[str] = None, + text_key: str = "text", + embedding: Optional[Embeddings] = None, + attributes: Optional[list] = None, + ) -> List[Record]: + vector_store = super().build( + url=url, + api_key=api_key, + index_name=index_name, + text_key=text_key, + embedding=embedding, + attributes=attributes, + search_by_text=search_by_text, + ) + if not vector_store: + raise ValueError("Failed to load the Weaviate index.") + + return self.search_with_vector_store( + vector_store=vector_store, input_value=input_value, search_type=search_type, k=number_of_results + ) diff --git a/src/backend/base/langflow/components/vectorsearch/__init__.py b/src/backend/base/langflow/components/vectorsearch/__init__.py new file mode 100644 index 000000000..28ea85fce --- /dev/null +++ b/src/backend/base/langflow/components/vectorsearch/__init__.py @@ -0,0 +1,25 @@ +from .AstraDBSearch import AstraDBSearchComponent +from .ChromaSearch import ChromaSearchComponent +from .FAISSSearch import FAISSSearchComponent +from .MongoDBAtlasVectorSearch import MongoDBAtlasSearchComponent +from .PineconeSearch import PineconeSearchComponent +from .QdrantSearch import QdrantSearchComponent +from .RedisSearch import RedisSearchComponent +from .SupabaseVectorStoreSearch import SupabaseSearchComponent +from .VectaraSearch import VectaraSearchComponent +from .WeaviateSearch import WeaviateSearchVectorStore +from .pgvectorSearch import PGVectorSearchComponent + +__all__ = [ + "AstraDBSearchComponent", + "ChromaSearchComponent", + "FAISSSearchComponent", + "MongoDBAtlasSearchComponent", + "PineconeSearchComponent", + "QdrantSearchComponent", + "RedisSearchComponent", + "SupabaseSearchComponent", + "VectaraSearchComponent", + "WeaviateSearchVectorStore", + "PGVectorSearchComponent", +] diff --git a/src/backend/base/langflow/components/vectorsearch/pgvectorSearch.py b/src/backend/base/langflow/components/vectorsearch/pgvectorSearch.py new file mode 100644 index 000000000..9b074b5f6 --- /dev/null +++ b/src/backend/base/langflow/components/vectorsearch/pgvectorSearch.py @@ -0,0 +1,74 @@ +from typing import List + +from langchain.embeddings.base import Embeddings + +from langflow.components.vectorstores.base.model import LCVectorStoreComponent +from langflow.components.vectorstores.pgvector import PGVectorComponent +from langflow.field_typing import Text +from langflow.schema import Record + + +class PGVectorSearchComponent(PGVectorComponent, LCVectorStoreComponent): + display_name: str = "PGVector Search" + description: str = "Search a PGVector Store for similar documents." + documentation = "https://python.langchain.com/docs/integrations/vectorstores/pgvector" + + def build_config(self): + """ + Builds the configuration for the component. + + Returns: + - dict: A dictionary containing the configuration options for the component. + """ + return { + "code": {"show": False}, + "embedding": {"display_name": "Embedding"}, + "search_type": { + "display_name": "Search Type", + "options": ["Similarity", "MMR"], + }, + "pg_server_url": { + "display_name": "PostgreSQL Server Connection String", + "advanced": False, + }, + "collection_name": {"display_name": "Table", "advanced": False}, + "input_value": {"display_name": "Input"}, + "number_of_results": { + "display_name": "Number of Results", + "info": "Number of results to return.", + "advanced": True, + }, + } + + def build( # type: ignore[override] + self, + input_value: Text, + embedding: Embeddings, + search_type: str, + pg_server_url: str, + collection_name: str, + number_of_results: int = 4, + ) -> List[Record]: + """ + Builds the Vector Store or BaseRetriever object. + + Args: + - input_value (str): The input value to search for. + - embedding (Embeddings): The embeddings to use for the Vector Store. + - collection_name (str): The name of the PG table. + - pg_server_url (str): The URL for the PG server. + + Returns: + - VectorStore: The Vector Store object. + """ + try: + vector_store = super().build( + embedding=embedding, + pg_server_url=pg_server_url, + collection_name=collection_name, + ) + except Exception as e: + raise RuntimeError(f"Failed to build PGVector: {e}") + return self.search_with_vector_store( + input_value=input_value, search_type=search_type, vector_store=vector_store, k=number_of_results + ) diff --git a/src/backend/base/langflow/components/vectorstores/AstraDB.py b/src/backend/base/langflow/components/vectorstores/AstraDB.py new file mode 100644 index 000000000..460d1f6db --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/AstraDB.py @@ -0,0 +1,158 @@ +from typing import List, Optional + +from langchain_astradb import AstraDBVectorStore +from langchain_astradb.utils.astradb import SetupMode + +from langflow.custom import CustomComponent +from langflow.field_typing import Embeddings, VectorStore +from langflow.schema import Record + + +class AstraDBVectorStoreComponent(CustomComponent): + display_name = "Astra DB" + description = "Builds or loads an Astra DB Vector Store." + icon = "AstraDB" + field_order = ["token", "api_endpoint", "collection_name", "inputs", "embedding"] + + def build_config(self): + return { + "inputs": { + "display_name": "Inputs", + "info": "Optional list of records to be processed and stored in the vector store.", + }, + "embedding": {"display_name": "Embedding", "info": "Embedding to use"}, + "collection_name": { + "display_name": "Collection Name", + "info": "The name of the collection within Astra DB where the vectors will be stored.", + }, + "token": { + "display_name": "Token", + "info": "Authentication token for accessing Astra DB.", + "password": True, + }, + "api_endpoint": { + "display_name": "API Endpoint", + "info": "API endpoint URL for the Astra DB service.", + }, + "namespace": { + "display_name": "Namespace", + "info": "Optional namespace within Astra DB to use for the collection.", + "advanced": True, + }, + "metric": { + "display_name": "Metric", + "info": "Optional distance metric for vector comparisons in the vector store.", + "advanced": True, + }, + "batch_size": { + "display_name": "Batch Size", + "info": "Optional number of records to process in a single batch.", + "advanced": True, + }, + "bulk_insert_batch_concurrency": { + "display_name": "Bulk Insert Batch Concurrency", + "info": "Optional concurrency level for bulk insert operations.", + "advanced": True, + }, + "bulk_insert_overwrite_concurrency": { + "display_name": "Bulk Insert Overwrite Concurrency", + "info": "Optional concurrency level for bulk insert operations that overwrite existing records.", + "advanced": True, + }, + "bulk_delete_concurrency": { + "display_name": "Bulk Delete Concurrency", + "info": "Optional concurrency level for bulk delete operations.", + "advanced": True, + }, + "setup_mode": { + "display_name": "Setup Mode", + "info": "Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.", + "options": ["Sync", "Async", "Off"], + "advanced": True, + }, + "pre_delete_collection": { + "display_name": "Pre Delete Collection", + "info": "Boolean flag to determine whether to delete the collection before creating a new one.", + "advanced": True, + }, + "metadata_indexing_include": { + "display_name": "Metadata Indexing Include", + "info": "Optional list of metadata fields to include in the indexing.", + "advanced": True, + }, + "metadata_indexing_exclude": { + "display_name": "Metadata Indexing Exclude", + "info": "Optional list of metadata fields to exclude from the indexing.", + "advanced": True, + }, + "collection_indexing_policy": { + "display_name": "Collection Indexing Policy", + "info": "Optional dictionary defining the indexing policy for the collection.", + "advanced": True, + }, + } + + def build( + self, + embedding: Embeddings, + token: str, + api_endpoint: str, + collection_name: str, + inputs: Optional[List[Record]] = None, + namespace: Optional[str] = None, + metric: Optional[str] = None, + batch_size: Optional[int] = None, + bulk_insert_batch_concurrency: Optional[int] = None, + bulk_insert_overwrite_concurrency: Optional[int] = None, + bulk_delete_concurrency: Optional[int] = None, + setup_mode: str = "Async", + pre_delete_collection: bool = False, + metadata_indexing_include: Optional[List[str]] = None, + metadata_indexing_exclude: Optional[List[str]] = None, + collection_indexing_policy: Optional[dict] = None, + ) -> VectorStore: + try: + setup_mode_value = SetupMode[setup_mode.upper()] + except KeyError: + raise ValueError(f"Invalid setup mode: {setup_mode}") + if inputs: + documents = [_input.to_lc_document() for _input in inputs] + + vector_store = AstraDBVectorStore.from_documents( + documents=documents, + embedding=embedding, + collection_name=collection_name, + token=token, + api_endpoint=api_endpoint, + namespace=namespace, + metric=metric, + batch_size=batch_size, + bulk_insert_batch_concurrency=bulk_insert_batch_concurrency, + bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency, + bulk_delete_concurrency=bulk_delete_concurrency, + setup_mode=setup_mode_value, + pre_delete_collection=pre_delete_collection, + metadata_indexing_include=metadata_indexing_include, + metadata_indexing_exclude=metadata_indexing_exclude, + collection_indexing_policy=collection_indexing_policy, + ) + else: + vector_store = AstraDBVectorStore( + embedding=embedding, + collection_name=collection_name, + token=token, + api_endpoint=api_endpoint, + namespace=namespace, + metric=metric, + batch_size=batch_size, + bulk_insert_batch_concurrency=bulk_insert_batch_concurrency, + bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency, + bulk_delete_concurrency=bulk_delete_concurrency, + setup_mode=setup_mode_value, + pre_delete_collection=pre_delete_collection, + metadata_indexing_include=metadata_indexing_include, + metadata_indexing_exclude=metadata_indexing_exclude, + collection_indexing_policy=collection_indexing_policy, + ) + + return vector_store diff --git a/src/backend/langflow/components/vectorstores/Chroma.py b/src/backend/base/langflow/components/vectorstores/Chroma.py similarity index 75% rename from src/backend/langflow/components/vectorstores/Chroma.py rename to src/backend/base/langflow/components/vectorstores/Chroma.py index 6eb8fedd6..8fe2f54a9 100644 --- a/src/backend/langflow/components/vectorstores/Chroma.py +++ b/src/backend/base/langflow/components/vectorstores/Chroma.py @@ -2,10 +2,12 @@ from typing import List, Optional, Union import chromadb # type: ignore from langchain.embeddings.base import Embeddings -from langchain.schema import BaseRetriever, Document +from langchain.schema import BaseRetriever from langchain_community.vectorstores import VectorStore from langchain_community.vectorstores.chroma import Chroma -from langflow import CustomComponent + +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema.schema import Record class ChromaComponent(CustomComponent): @@ -16,7 +18,7 @@ class ChromaComponent(CustomComponent): display_name: str = "Chroma" description: str = "Implementation of Vector Store using Chroma" documentation = "https://python.langchain.com/docs/integrations/vectorstores/chroma" - beta: bool = True + icon = "Chroma" def build_config(self): """ @@ -27,10 +29,9 @@ class ChromaComponent(CustomComponent): """ return { "collection_name": {"display_name": "Collection Name", "value": "langflow"}, - "persist": {"display_name": "Persist"}, - "persist_directory": {"display_name": "Persist Directory"}, + "index_directory": {"display_name": "Persist Directory"}, "code": {"advanced": True, "display_name": "Code"}, - "documents": {"display_name": "Documents", "is_list": True}, + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, "embedding": {"display_name": "Embedding"}, "chroma_server_cors_allow_origins": { "display_name": "Server CORS Allow Origins", @@ -51,11 +52,10 @@ class ChromaComponent(CustomComponent): def build( self, collection_name: str, - persist: bool, embedding: Embeddings, chroma_server_ssl_enabled: bool, - persist_directory: Optional[str] = None, - documents: Optional[List[Document]] = None, + index_directory: Optional[str] = None, + inputs: Optional[List[Record]] = None, chroma_server_cors_allow_origins: Optional[str] = None, chroma_server_host: Optional[str] = None, chroma_server_port: Optional[int] = None, @@ -66,9 +66,8 @@ class ChromaComponent(CustomComponent): Args: - collection_name (str): The name of the collection. - - persist_directory (Optional[str]): The directory to persist the Vector Store to. + - index_directory (Optional[str]): The directory to persist the Vector Store to. - chroma_server_ssl_enabled (bool): Whether to enable SSL for the Chroma server. - - persist (bool): Whether to persist the Vector Store or not. - embedding (Optional[Embeddings]): The embeddings to use for the Vector Store. - documents (Optional[Document]): The documents to use for the Vector Store. - chroma_server_cors_allow_origins (Optional[str]): The CORS allow origins for the Chroma server. @@ -93,15 +92,31 @@ class ChromaComponent(CustomComponent): ) # If documents, then we need to create a Chroma instance using .from_documents + + # Check index_directory and expand it if it is a relative path + if index_directory is not None: + index_directory = self.resolve_path(index_directory) + + documents = [] + for _input in inputs or []: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) if documents is not None and embedding is not None: if len(documents) == 0: raise ValueError("If documents are provided, there must be at least one document.") - return Chroma.from_documents( + chroma = Chroma.from_documents( documents=documents, # type: ignore - persist_directory=persist_directory if persist else None, + persist_directory=index_directory, collection_name=collection_name, embedding=embedding, client_settings=chroma_settings, ) - - return Chroma(persist_directory=persist_directory, client_settings=chroma_settings) + else: + chroma = Chroma( + persist_directory=index_directory, + client_settings=chroma_settings, + embedding_function=embedding, + ) + return chroma diff --git a/src/backend/base/langflow/components/vectorstores/FAISS.py b/src/backend/base/langflow/components/vectorstores/FAISS.py new file mode 100644 index 000000000..ea9ee1c4d --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/FAISS.py @@ -0,0 +1,46 @@ +from typing import List, Text, Union + +from langchain.schema import BaseRetriever +from langchain_community.vectorstores import VectorStore +from langchain_community.vectorstores.faiss import FAISS + +from langflow.field_typing import Embeddings +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema.schema import Record + + +class FAISSComponent(CustomComponent): + display_name = "FAISS" + description = "Ingest documents into FAISS Vector Store." + documentation = "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/faiss" + + def build_config(self): + return { + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, + "embedding": {"display_name": "Embedding"}, + "folder_path": { + "display_name": "Folder Path", + "info": "Path to save the FAISS index. It will be relative to where Langflow is running.", + }, + "index_name": {"display_name": "Index Name"}, + } + + def build( + self, + embedding: Embeddings, + inputs: List[Record], + folder_path: str, + index_name: str = "langflow_index", + ) -> Union[VectorStore, FAISS, BaseRetriever]: + documents = [] + for _input in inputs or []: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + vector_store = FAISS.from_documents(documents=documents, embedding=embedding) + if not folder_path: + raise ValueError("Folder path is required to save the FAISS index.") + path = self.resolve_path(folder_path) + vector_store.save_local(Text(path), index_name) + return vector_store diff --git a/src/backend/base/langflow/components/vectorstores/MongoDBAtlasVector.py b/src/backend/base/langflow/components/vectorstores/MongoDBAtlasVector.py new file mode 100644 index 000000000..5a12488e4 --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/MongoDBAtlasVector.py @@ -0,0 +1,67 @@ +from typing import List, Optional + +from langchain_community.vectorstores.mongodb_atlas import MongoDBAtlasVectorSearch +from langflow.field_typing import Embeddings, NestedDict +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema.schema import Record + + +class MongoDBAtlasComponent(CustomComponent): + display_name = "MongoDB Atlas" + description = "Construct a `MongoDB Atlas Vector Search` vector store from raw documents." + icon = "MongoDB" + + def build_config(self): + return { + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, + "embedding": {"display_name": "Embedding"}, + "collection_name": {"display_name": "Collection Name"}, + "db_name": {"display_name": "Database Name"}, + "index_name": {"display_name": "Index Name"}, + "mongodb_atlas_cluster_uri": {"display_name": "MongoDB Atlas Cluster URI"}, + "search_kwargs": {"display_name": "Search Kwargs", "advanced": True}, + } + + def build( + self, + embedding: Embeddings, + inputs: Optional[List[Record]] = None, + collection_name: str = "", + db_name: str = "", + index_name: str = "", + mongodb_atlas_cluster_uri: str = "", + search_kwargs: Optional[NestedDict] = None, + ) -> MongoDBAtlasVectorSearch: + search_kwargs = search_kwargs or {} + try: + from pymongo import MongoClient + except ImportError: + raise ImportError("Please install pymongo to use MongoDB Atlas Vector Store") + try: + mongo_client: MongoClient = MongoClient(mongodb_atlas_cluster_uri) + collection = mongo_client[db_name][collection_name] + except Exception as e: + raise ValueError(f"Failed to connect to MongoDB Atlas: {e}") + documents = [] + for _input in inputs or []: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + if documents: + vector_store = MongoDBAtlasVectorSearch.from_documents( + documents=documents, + embedding=embedding, + collection=collection, + db_name=db_name, + index_name=index_name, + mongodb_atlas_cluster_uri=mongodb_atlas_cluster_uri, + search_kwargs=search_kwargs, + ) + else: + vector_store = MongoDBAtlasVectorSearch( + embedding=embedding, + collection=collection, + index_name=index_name, + ) + return vector_store diff --git a/src/backend/langflow/components/vectorstores/Pinecone.py b/src/backend/base/langflow/components/vectorstores/Pinecone.py similarity index 65% rename from src/backend/langflow/components/vectorstores/Pinecone.py rename to src/backend/base/langflow/components/vectorstores/Pinecone.py index 147af1df8..e4d6de3aa 100644 --- a/src/backend/langflow/components/vectorstores/Pinecone.py +++ b/src/backend/base/langflow/components/vectorstores/Pinecone.py @@ -5,31 +5,47 @@ import pinecone # type: ignore from langchain.schema import BaseRetriever from langchain_community.vectorstores import VectorStore from langchain_community.vectorstores.pinecone import Pinecone -from langflow import CustomComponent -from langflow.field_typing import Document, Embeddings + +from langflow.field_typing import Embeddings +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema.schema import Record class PineconeComponent(CustomComponent): display_name = "Pinecone" description = "Construct Pinecone wrapper from raw documents." + icon = "Pinecone" def build_config(self): return { - "documents": {"display_name": "Documents"}, + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, "embedding": {"display_name": "Embedding"}, "index_name": {"display_name": "Index Name"}, "namespace": {"display_name": "Namespace"}, - "pinecone_api_key": {"display_name": "Pinecone API Key", "default": "", "password": True, "required": True}, - "pinecone_env": {"display_name": "Pinecone Environment", "default": "", "required": True}, + "pinecone_api_key": { + "display_name": "Pinecone API Key", + "default": "", + "password": True, + "required": True, + }, + "pinecone_env": { + "display_name": "Pinecone Environment", + "default": "", + "required": True, + }, "search_kwargs": {"display_name": "Search Kwargs", "default": "{}"}, - "pool_threads": {"display_name": "Pool Threads", "default": 1, "advanced": True}, + "pool_threads": { + "display_name": "Pool Threads", + "default": 1, + "advanced": True, + }, } def build( self, embedding: Embeddings, pinecone_env: str, - documents: List[Document], + inputs: Optional[List[Record]] = None, text_key: str = "text", pool_threads: int = 4, index_name: Optional[str] = None, @@ -44,6 +60,12 @@ class PineconeComponent(CustomComponent): pinecone.init(api_key=pinecone_api_key, environment=pinecone_env) # type: ignore if not index_name: raise ValueError("Index Name is required.") + documents = [] + for _input in inputs or []: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) if documents: return Pinecone.from_documents( documents=documents, diff --git a/src/backend/langflow/components/vectorstores/Qdrant.py b/src/backend/base/langflow/components/vectorstores/Qdrant.py similarity index 82% rename from src/backend/langflow/components/vectorstores/Qdrant.py rename to src/backend/base/langflow/components/vectorstores/Qdrant.py index 6d68cdd55..61f8e14af 100644 --- a/src/backend/langflow/components/vectorstores/Qdrant.py +++ b/src/backend/base/langflow/components/vectorstores/Qdrant.py @@ -3,27 +3,36 @@ from typing import Optional, Union from langchain.schema import BaseRetriever from langchain_community.vectorstores import VectorStore from langchain_community.vectorstores.qdrant import Qdrant -from langflow import CustomComponent -from langflow.field_typing import Document, Embeddings, NestedDict + +from langflow.field_typing import Embeddings, NestedDict +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema.schema import Record class QdrantComponent(CustomComponent): display_name = "Qdrant" description = "Construct Qdrant wrapper from a list of texts." + icon = "Qdrant" def build_config(self): return { - "documents": {"display_name": "Documents"}, + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, "embedding": {"display_name": "Embedding"}, "api_key": {"display_name": "API Key", "password": True, "advanced": True}, "collection_name": {"display_name": "Collection Name"}, - "content_payload_key": {"display_name": "Content Payload Key", "advanced": True}, + "content_payload_key": { + "display_name": "Content Payload Key", + "advanced": True, + }, "distance_func": {"display_name": "Distance Function", "advanced": True}, "grpc_port": {"display_name": "gRPC Port", "advanced": True}, "host": {"display_name": "Host", "advanced": True}, "https": {"display_name": "HTTPS", "advanced": True}, "location": {"display_name": "Location", "advanced": True}, - "metadata_payload_key": {"display_name": "Metadata Payload Key", "advanced": True}, + "metadata_payload_key": { + "display_name": "Metadata Payload Key", + "advanced": True, + }, "path": {"display_name": "Path", "advanced": True}, "port": {"display_name": "Port", "advanced": True}, "prefer_grpc": {"display_name": "Prefer gRPC", "advanced": True}, @@ -37,7 +46,7 @@ class QdrantComponent(CustomComponent): self, embedding: Embeddings, collection_name: str, - documents: Optional[Document] = None, + inputs: Optional[Record] = None, api_key: Optional[str] = None, content_payload_key: str = "page_content", distance_func: str = "Cosine", @@ -54,6 +63,12 @@ class QdrantComponent(CustomComponent): timeout: Optional[int] = None, url: Optional[str] = None, ) -> Union[VectorStore, Qdrant, BaseRetriever]: + documents = [] + for _input in inputs or []: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) if documents is None: from qdrant_client import QdrantClient diff --git a/src/backend/langflow/components/vectorstores/Redis.py b/src/backend/base/langflow/components/vectorstores/Redis.py similarity index 84% rename from src/backend/langflow/components/vectorstores/Redis.py rename to src/backend/base/langflow/components/vectorstores/Redis.py index b2d7e4542..ea1046037 100644 --- a/src/backend/langflow/components/vectorstores/Redis.py +++ b/src/backend/base/langflow/components/vectorstores/Redis.py @@ -3,9 +3,10 @@ from typing import Optional, Union from langchain.embeddings.base import Embeddings from langchain_community.vectorstores import VectorStore from langchain_community.vectorstores.redis import Redis -from langchain_core.documents import Document from langchain_core.retrievers import BaseRetriever -from langflow import CustomComponent + +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema.schema import Record class RedisComponent(CustomComponent): @@ -16,7 +17,6 @@ class RedisComponent(CustomComponent): display_name: str = "Redis" description: str = "Implementation of Vector Store using Redis" documentation = "https://python.langchain.com/docs/integrations/vectorstores/redis" - beta = True def build_config(self): """ @@ -28,7 +28,7 @@ class RedisComponent(CustomComponent): return { "index_name": {"display_name": "Index Name", "value": "your_index"}, "code": {"show": False, "display_name": "Code"}, - "documents": {"display_name": "Documents", "is_list": True}, + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, "embedding": {"display_name": "Embedding"}, "schema": {"display_name": "Schema", "file_types": [".yaml"]}, "redis_server_url": { @@ -44,7 +44,7 @@ class RedisComponent(CustomComponent): redis_server_url: str, redis_index_name: str, schema: Optional[str] = None, - documents: Optional[Document] = None, + inputs: Optional[Record] = None, ) -> Union[VectorStore, BaseRetriever]: """ Builds the Vector Store or BaseRetriever object. @@ -58,7 +58,13 @@ class RedisComponent(CustomComponent): Returns: - VectorStore: The Vector Store object. """ - if documents is None: + documents = [] + for _input in inputs or []: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + if not documents: if schema is None: raise ValueError("If no documents are provided, a schema must be provided.") redis_vs = Redis.from_existing_index( diff --git a/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py b/src/backend/base/langflow/components/vectorstores/SupabaseVectorStore.py similarity index 72% rename from src/backend/langflow/components/vectorstores/SupabaseVectorStore.py rename to src/backend/base/langflow/components/vectorstores/SupabaseVectorStore.py index 2ec6dfabc..876742117 100644 --- a/src/backend/langflow/components/vectorstores/SupabaseVectorStore.py +++ b/src/backend/base/langflow/components/vectorstores/SupabaseVectorStore.py @@ -1,12 +1,14 @@ -from typing import List, Union +from typing import List, Optional, Union from langchain.schema import BaseRetriever from langchain_community.vectorstores import VectorStore from langchain_community.vectorstores.supabase import SupabaseVectorStore -from langflow import CustomComponent -from langflow.field_typing import Document, Embeddings, NestedDict from supabase.client import Client, create_client +from langflow.field_typing import Embeddings, NestedDict +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema.schema import Record + class SupabaseComponent(CustomComponent): display_name = "Supabase" @@ -14,7 +16,7 @@ class SupabaseComponent(CustomComponent): def build_config(self): return { - "documents": {"display_name": "Documents"}, + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, "embedding": {"display_name": "Embedding"}, "query_name": {"display_name": "Query Name"}, "search_kwargs": {"display_name": "Search Kwargs", "advanced": True}, @@ -26,7 +28,7 @@ class SupabaseComponent(CustomComponent): def build( self, embedding: Embeddings, - documents: List[Document], + inputs: Optional[List[Record]] = None, query_name: str = "", search_kwargs: NestedDict = {}, supabase_service_key: str = "", @@ -34,6 +36,12 @@ class SupabaseComponent(CustomComponent): table_name: str = "", ) -> Union[VectorStore, SupabaseVectorStore, BaseRetriever]: supabase: Client = create_client(supabase_url, supabase_key=supabase_service_key) + documents = [] + for _input in inputs or []: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) return SupabaseVectorStore.from_documents( documents=documents, embedding=embedding, diff --git a/src/backend/langflow/components/vectorstores/Vectara.py b/src/backend/base/langflow/components/vectorstores/Vectara.py similarity index 78% rename from src/backend/langflow/components/vectorstores/Vectara.py rename to src/backend/base/langflow/components/vectorstores/Vectara.py index 31615fe7f..5c087875f 100644 --- a/src/backend/langflow/components/vectorstores/Vectara.py +++ b/src/backend/base/langflow/components/vectorstores/Vectara.py @@ -6,15 +6,17 @@ from typing import List, Optional, Union from langchain_community.embeddings import FakeEmbeddings from langchain_community.vectorstores.vectara import Vectara from langchain_core.vectorstores import VectorStore -from langflow import CustomComponent -from langflow.field_typing import BaseRetriever, Document + +from langflow.field_typing import BaseRetriever +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema.schema import Record class VectaraComponent(CustomComponent): display_name: str = "Vectara" description: str = "Implementation of Vector Store using Vectara" documentation = "https://python.langchain.com/docs/integrations/vectorstores/vectara" - beta = True + icon = "Vectara" field_config = { "vectara_customer_id": { "display_name": "Vectara Customer ID", @@ -26,7 +28,11 @@ class VectaraComponent(CustomComponent): "display_name": "Vectara API Key", "password": True, }, - "documents": {"display_name": "Documents", "info": "If provided, will be upserted to corpus (optional)"}, + "inputs": { + "display_name": "Input", + "input_types": ["Document", "Record"], + "info": "If provided, will be upserted to corpus (optional)", + }, "files_url": { "display_name": "Files Url", "info": "Make vectara object using url of files (optional)", @@ -39,11 +45,18 @@ class VectaraComponent(CustomComponent): vectara_corpus_id: str, vectara_api_key: str, files_url: Optional[List[str]] = None, - documents: Optional[Document] = None, + inputs: Optional[Record] = None, ) -> Union[VectorStore, BaseRetriever]: source = "Langflow" - if documents is not None: + documents = [] + for _input in inputs or []: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) + + if documents: return Vectara.from_documents( documents=documents, # type: ignore embedding=FakeEmbeddings(size=768), diff --git a/src/backend/langflow/components/vectorstores/Weaviate.py b/src/backend/base/langflow/components/vectorstores/Weaviate.py similarity index 73% rename from src/backend/langflow/components/vectorstores/Weaviate.py rename to src/backend/base/langflow/components/vectorstores/Weaviate.py index 9b4967c36..3362a3fca 100644 --- a/src/backend/langflow/components/vectorstores/Weaviate.py +++ b/src/backend/base/langflow/components/vectorstores/Weaviate.py @@ -2,17 +2,17 @@ from typing import Optional, Union import weaviate # type: ignore from langchain.embeddings.base import Embeddings -from langchain.schema import BaseRetriever, Document +from langchain.schema import BaseRetriever from langchain_community.vectorstores import VectorStore, Weaviate -from langflow import CustomComponent +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema.schema import Record -class WeaviateVectorStore(CustomComponent): +class WeaviateVectorStoreComponent(CustomComponent): display_name: str = "Weaviate" description: str = "Implementation of Vector Store using Weaviate" documentation = "https://python.langchain.com/docs/integrations/vectorstores/weaviate" - beta = True field_config = { "url": {"display_name": "Weaviate URL", "value": "http://localhost:8080"}, "api_key": { @@ -24,8 +24,13 @@ class WeaviateVectorStore(CustomComponent): "display_name": "Index name", "required": False, }, - "text_key": {"display_name": "Text Key", "required": False, "advanced": True, "value": "text"}, - "documents": {"display_name": "Documents", "is_list": True}, + "text_key": { + "display_name": "Text Key", + "required": False, + "advanced": True, + "value": "text", + }, + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, "embedding": {"display_name": "Embedding"}, "attributes": { "display_name": "Attributes", @@ -34,7 +39,11 @@ class WeaviateVectorStore(CustomComponent): "field_type": "str", "advanced": True, }, - "search_by_text": {"display_name": "Search By Text", "field_type": "bool", "advanced": True}, + "search_by_text": { + "display_name": "Search By Text", + "field_type": "bool", + "advanced": True, + }, "code": {"show": False}, } @@ -46,7 +55,7 @@ class WeaviateVectorStore(CustomComponent): index_name: Optional[str] = None, text_key: str = "text", embedding: Optional[Embeddings] = None, - documents: Optional[Document] = None, + inputs: Optional[Record] = None, attributes: Optional[list] = None, ) -> Union[VectorStore, BaseRetriever]: if api_key: @@ -69,8 +78,14 @@ class WeaviateVectorStore(CustomComponent): return pascal_case_word index_name = _to_pascal_case(index_name) if index_name else None + documents = [] + for _input in inputs or []: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) - if documents is not None and embedding is not None: + if documents and embedding is not None: return Weaviate.from_documents( client=client, index_name=index_name, diff --git a/src/backend/base/langflow/components/vectorstores/__init__.py b/src/backend/base/langflow/components/vectorstores/__init__.py new file mode 100644 index 000000000..48e1bf9c7 --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/__init__.py @@ -0,0 +1,26 @@ +from .AstraDB import AstraDBVectorStoreComponent +from .Chroma import ChromaComponent +from .FAISS import FAISSComponent +from .MongoDBAtlasVector import MongoDBAtlasComponent +from .Pinecone import PineconeComponent +from .Qdrant import QdrantComponent +from .Redis import RedisComponent +from .SupabaseVectorStore import SupabaseComponent +from .Vectara import VectaraComponent +from .Weaviate import WeaviateVectorStoreComponent +from .pgvector import PGVectorComponent + +__all__ = [ + "AstraDBVectorStoreComponent", + "ChromaComponent", + "FAISSComponent", + "MongoDBAtlasComponent", + "PineconeComponent", + "QdrantComponent", + "RedisComponent", + "SupabaseComponent", + "VectaraComponent", + "WeaviateVectorStoreComponent", + "base", + "PGVectorComponent", +] diff --git a/src/backend/base/langflow/components/vectorstores/base/__init__.py b/src/backend/base/langflow/components/vectorstores/base/__init__.py new file mode 100644 index 000000000..93e42c4aa --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/base/__init__.py @@ -0,0 +1,3 @@ +from .model import LCVectorStoreComponent + +__all__ = ["LCVectorStoreComponent"] diff --git a/src/backend/base/langflow/components/vectorstores/base/model.py b/src/backend/base/langflow/components/vectorstores/base/model.py new file mode 100644 index 000000000..668c5eff2 --- /dev/null +++ b/src/backend/base/langflow/components/vectorstores/base/model.py @@ -0,0 +1,47 @@ +from typing import List, Union + +from langchain_core.documents import Document +from langchain_core.retrievers import BaseRetriever +from langchain_core.vectorstores import VectorStore + +from langflow.field_typing import Text +from langflow.helpers.record import docs_to_records +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema import Record + + +class LCVectorStoreComponent(CustomComponent): + display_name: str = "LC Vector Store" + description: str = "Search a LC Vector Store for similar documents." + + def search_with_vector_store( + self, + input_value: Text, + search_type: str, + vector_store: Union[VectorStore, BaseRetriever], + k=10, + **kwargs, + ) -> List[Record]: + """ + Search for records in the vector store based on the input value and search type. + + Args: + input_value (Text): The input value to search for. + search_type (str): The type of search to perform. + vector_store (VectorStore): The vector store to search in. + + Returns: + List[Record]: A list of records matching the search criteria. + + Raises: + ValueError: If invalid inputs are provided. + """ + + docs: List[Document] = [] + if input_value and isinstance(input_value, str) and hasattr(vector_store, "search"): + docs = vector_store.search(query=input_value, search_type=search_type.lower(), k=k, **kwargs) + else: + raise ValueError("Invalid inputs provided.") + records = docs_to_records(docs) + self.status = records + return records diff --git a/src/backend/langflow/components/vectorstores/pgvector.py b/src/backend/base/langflow/components/vectorstores/pgvector.py similarity index 84% rename from src/backend/langflow/components/vectorstores/pgvector.py rename to src/backend/base/langflow/components/vectorstores/pgvector.py index 2baf6dae6..b061b22ac 100644 --- a/src/backend/langflow/components/vectorstores/pgvector.py +++ b/src/backend/base/langflow/components/vectorstores/pgvector.py @@ -3,9 +3,10 @@ from typing import Optional, Union from langchain.embeddings.base import Embeddings from langchain_community.vectorstores import VectorStore from langchain_community.vectorstores.pgvector import PGVector -from langchain_core.documents import Document from langchain_core.retrievers import BaseRetriever -from langflow import CustomComponent + +from langflow.interface.custom.custom_component import CustomComponent +from langflow.schema.schema import Record class PGVectorComponent(CustomComponent): @@ -26,7 +27,7 @@ class PGVectorComponent(CustomComponent): """ return { "code": {"show": False}, - "documents": {"display_name": "Documents", "is_list": True}, + "inputs": {"display_name": "Input", "input_types": ["Document", "Record"]}, "embedding": {"display_name": "Embedding"}, "pg_server_url": { "display_name": "PostgreSQL Server Connection String", @@ -40,7 +41,7 @@ class PGVectorComponent(CustomComponent): embedding: Embeddings, pg_server_url: str, collection_name: str, - documents: Optional[Document] = None, + inputs: Optional[Record] = None, ) -> Union[VectorStore, BaseRetriever]: """ Builds the Vector Store or BaseRetriever object. @@ -55,6 +56,12 @@ class PGVectorComponent(CustomComponent): - VectorStore: The Vector Store object. """ + documents = [] + for _input in inputs or []: + if isinstance(_input, Record): + documents.append(_input.to_lc_document()) + else: + documents.append(_input) try: if documents is None: vector_store = PGVector.from_existing_index( diff --git a/src/backend/langflow/config.yaml b/src/backend/base/langflow/config.yaml similarity index 74% rename from src/backend/langflow/config.yaml rename to src/backend/base/langflow/config.yaml index 5ec4f3aaa..102cb9016 100644 --- a/src/backend/langflow/config.yaml +++ b/src/backend/base/langflow/config.yaml @@ -11,31 +11,6 @@ agents: documentation: "" SQLAgent: documentation: "" -chains: - # LLMChain: - # documentation: "https://python.langchain.com/docs/modules/chains/foundational/llm_chain" - LLMMathChain: - documentation: "https://python.langchain.com/docs/modules/chains/additional/llm_math" - LLMCheckerChain: - documentation: "https://python.langchain.com/docs/modules/chains/additional/llm_checker" - # ConversationChain: - # documentation: "" - SeriesCharacterChain: - documentation: "" - MidJourneyPromptChain: - documentation: "" - TimeTravelGuideChain: - documentation: "" - SQLDatabaseChain: - documentation: "" - RetrievalQA: - documentation: "https://python.langchain.com/docs/modules/chains/popular/vector_db_qa" - RetrievalQAWithSourcesChain: - documentation: "" - ConversationalRetrievalChain: - documentation: "https://python.langchain.com/docs/modules/chains/popular/chat_vector_db" - CombineDocsChain: - documentation: "" documentloaders: AirbyteJSONLoader: documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json" @@ -109,32 +84,6 @@ embeddings: OllamaEmbeddings: documentation: "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/ollama" -llms: - OpenAI: - documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai" - ChatOpenAI: - documentation: "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai" - LlamaCpp: - documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/llamacpp" - CTransformers: - documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/ctransformers" - Cohere: - documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/cohere" - Anthropic: - documentation: "" - ChatAnthropic: - documentation: "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/anthropic" - HuggingFaceHub: - documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/huggingface_hub" - VertexAI: - documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/google_vertex_ai_palm" - ### - # There's a bug in this component deactivating until we get it sorted: _language_models.py", line 804, in send_message - # is_blocked=safety_attributes.get("blocked", False), - # AttributeError: 'list' object has no attribute 'get' - ChatVertexAI: - documentation: "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/google_vertex_ai_palm" - ### memories: # https://github.com/supabase-community/supabase-py/issues/482 # ZepChatMessageHistory: @@ -269,24 +218,7 @@ retrievers: # https://github.com/supabase-community/supabase-py/issues/482 # ZepRetriever: # documentation: "https://python.langchain.com/docs/modules/data_connection/retrievers/integrations/zep_memorystore" -vectorstores: - # Chroma: - # documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/chroma" - Qdrant: - documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/qdrant" - FAISS: - documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/faiss" - Pinecone: - documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/pinecone" - ElasticsearchStore: - documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/elasticsearch" - SupabaseVectorStore: - documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/supabase" - MongoDBAtlasVectorSearch: - documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/mongodb_atlas" - # Requires docarray >=0.32.0 but langchain-serve requires jina 3.15.2 which doesn't support docarray >=0.32.0 - # DocArrayInMemorySearch: - # documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/docarray_in_memory" + wrappers: RequestsWrapper: documentation: "" diff --git a/src/backend/langflow/interface/wrappers/__init__.py b/src/backend/base/langflow/core/__init__.py similarity index 100% rename from src/backend/langflow/interface/wrappers/__init__.py rename to src/backend/base/langflow/core/__init__.py diff --git a/src/backend/langflow/core/celery_app.py b/src/backend/base/langflow/core/celery_app.py similarity index 100% rename from src/backend/langflow/core/celery_app.py rename to src/backend/base/langflow/core/celery_app.py diff --git a/src/backend/langflow/core/celeryconfig.py b/src/backend/base/langflow/core/celeryconfig.py similarity index 100% rename from src/backend/langflow/core/celeryconfig.py rename to src/backend/base/langflow/core/celeryconfig.py diff --git a/src/backend/langflow/components/__init__.py b/src/backend/base/langflow/custom.py similarity index 62% rename from src/backend/langflow/components/__init__.py rename to src/backend/base/langflow/custom.py index 765042210..ebe4c04e8 100644 --- a/src/backend/langflow/components/__init__.py +++ b/src/backend/base/langflow/custom.py @@ -1,4 +1 @@ -from langflow.interface.custom.custom_component import CustomComponent - - -__all__ = ["CustomComponent"] +from langflow.interface.custom.custom_component import CustomComponent # noqa: F401 diff --git a/src/backend/langflow/field_typing/__init__.py b/src/backend/base/langflow/field_typing/__init__.py similarity index 97% rename from src/backend/langflow/field_typing/__init__.py rename to src/backend/base/langflow/field_typing/__init__.py index 46ea20111..15ce03693 100644 --- a/src/backend/langflow/field_typing/__init__.py +++ b/src/backend/base/langflow/field_typing/__init__.py @@ -13,6 +13,7 @@ from .constants import ( Callable, Chain, ChatPromptTemplate, + Code, Data, Document, Embeddings, @@ -20,6 +21,7 @@ from .constants import ( Object, Prompt, PromptTemplate, + Text, TextSplitter, Tool, VectorStore, @@ -64,6 +66,7 @@ __all__ = [ "TextSplitter", "Document", "AgentExecutor", + "Text", "Object", "Callable", "BasePromptTemplate", @@ -71,4 +74,5 @@ __all__ = [ "Prompt", "RangeSpec", "TemplateField", + "Code", ] diff --git a/src/backend/langflow/field_typing/constants.py b/src/backend/base/langflow/field_typing/constants.py similarity index 95% rename from src/backend/langflow/field_typing/constants.py rename to src/backend/base/langflow/field_typing/constants.py index 6680aceab..2e8fd4b3b 100644 --- a/src/backend/langflow/field_typing/constants.py +++ b/src/backend/base/langflow/field_typing/constants.py @@ -1,4 +1,4 @@ -from typing import Callable, Dict, Union +from typing import Callable, Dict, Text, Union from langchain.agents.agent import AgentExecutor from langchain.chains.base import Chain @@ -30,6 +30,10 @@ class Prompt: pass +class Code: + pass + + LANGCHAIN_BASE_TYPES = { "Chain": Chain, "AgentExecutor": AgentExecutor, @@ -54,6 +58,7 @@ CUSTOM_COMPONENT_SUPPORTED_TYPES = { **LANGCHAIN_BASE_TYPES, "NestedDict": NestedDict, "Data": Data, + "Text": Text, "Object": Object, "Callable": Callable, "Prompt": Prompt, diff --git a/src/backend/base/langflow/field_typing/range_spec.py b/src/backend/base/langflow/field_typing/range_spec.py new file mode 100644 index 000000000..78ed2e435 --- /dev/null +++ b/src/backend/base/langflow/field_typing/range_spec.py @@ -0,0 +1,30 @@ +from typing import Literal + +from pydantic import BaseModel, field_validator + + +class RangeSpec(BaseModel): + step_type: Literal["int", "float"] = "float" + min: float = -1.0 + max: float = 1.0 + step: float = 0.1 + + @field_validator("max") + @classmethod + def max_must_be_greater_than_min(cls, v, values, **kwargs): + if "min" in values.data and v <= values.data["min"]: + raise ValueError("Max must be greater than min") + return v + + @field_validator("step") + @classmethod + def step_must_be_positive(cls, v, values, **kwargs): + if v <= 0: + raise ValueError("Step must be positive") + if values.data["step_type"] == "int" and isinstance(v, float) and not v.is_integer(): + raise ValueError("When step_type is int, step must be an integer") + return v + + @classmethod + def set_step_type(cls, step_type: Literal["int", "float"], range_spec: "RangeSpec") -> "RangeSpec": + return cls(min=range_spec.min, max=range_spec.max, step=range_spec.step, step_type=step_type) diff --git a/src/backend/langflow/graph/__init__.py b/src/backend/base/langflow/graph/__init__.py similarity index 93% rename from src/backend/langflow/graph/__init__.py rename to src/backend/base/langflow/graph/__init__.py index e63b9dcc0..e80dcaa5f 100644 --- a/src/backend/langflow/graph/__init__.py +++ b/src/backend/base/langflow/graph/__init__.py @@ -4,17 +4,18 @@ from langflow.graph.vertex.base import Vertex from langflow.graph.vertex.types import ( AgentVertex, ChainVertex, + CustomComponentVertex, DocumentLoaderVertex, EmbeddingVertex, LLMVertex, MemoryVertex, PromptVertex, + RetrieverVertex, TextSplitterVertex, - ToolVertex, ToolkitVertex, + ToolVertex, VectorStoreVertex, WrapperVertex, - RetrieverVertex, ) __all__ = [ @@ -34,4 +35,5 @@ __all__ = [ "VectorStoreVertex", "WrapperVertex", "RetrieverVertex", + "CustomComponentVertex", ] diff --git a/src/backend/langflow/processing/__init__.py b/src/backend/base/langflow/graph/edge/__init__.py similarity index 100% rename from src/backend/langflow/processing/__init__.py rename to src/backend/base/langflow/graph/edge/__init__.py diff --git a/src/backend/langflow/graph/edge/base.py b/src/backend/base/langflow/graph/edge/base.py similarity index 54% rename from src/backend/langflow/graph/edge/base.py rename to src/backend/base/langflow/graph/edge/base.py index 399638e18..ba7fe05e8 100644 --- a/src/backend/langflow/graph/edge/base.py +++ b/src/backend/base/langflow/graph/edge/base.py @@ -1,8 +1,13 @@ -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING, Any, List, Optional from loguru import logger from pydantic import BaseModel, Field +from langflow.graph.edge.utils import build_clean_params +from langflow.schema.schema import INPUT_FIELD_NAME +from langflow.services.deps import get_monitor_service +from langflow.services.monitor.utils import log_message + if TYPE_CHECKING: from langflow.graph.vertex.base import Vertex @@ -93,5 +98,85 @@ class Edge: def __hash__(self) -> int: return hash(self.__repr__()) - def __eq__(self, __value: object) -> bool: - return self.__repr__() == __value.__repr__() if isinstance(__value, Edge) else False + def __eq__(self, __o: object) -> bool: + if not isinstance(__o, Edge): + return False + return ( + self._source_handle == __o._source_handle + and self._target_handle == __o._target_handle + and self.target_param == __o.target_param + ) + + +class ContractEdge(Edge): + def __init__(self, source: "Vertex", target: "Vertex", raw_edge: dict): + super().__init__(source, target, raw_edge) + self.is_fulfilled = False # Whether the contract has been fulfilled. + self.result: Any = None + + async def honor(self, source: "Vertex", target: "Vertex") -> None: + """ + Fulfills the contract by setting the result of the source vertex to the target vertex's parameter. + If the edge is runnable, the source vertex is run with the message text and the target vertex's + root_field param is set to the + result. If the edge is not runnable, the target vertex's parameter is set to the result. + :param message: The message object to be processed if the edge is runnable. + """ + if self.is_fulfilled: + return + + if not source._built: + # The system should be read-only, so we should not be building vertices + # that are not already built. + raise ValueError(f"Source vertex {source.id} is not built.") + + if self.matched_type == "Text": + self.result = source._built_result + else: + self.result = source._built_object + + target.params[self.target_param] = self.result + self.is_fulfilled = True + + async def get_result_from_source(self, source: "Vertex", target: "Vertex"): + # Fulfill the contract if it has not been fulfilled. + if not self.is_fulfilled: + await self.honor(source, target) + + log_transaction(self, source, target, "success") + # If the target vertex is a power component we log messages + if target.vertex_type == "ChatOutput" and ( + isinstance(target.params.get(INPUT_FIELD_NAME), str) + or isinstance(target.params.get(INPUT_FIELD_NAME), dict) + ): + if target.params.get("message") == "": + return self.result + await log_message( + sender=target.params.get("sender", ""), + sender_name=target.params.get("sender_name", ""), + message=target.params.get(INPUT_FIELD_NAME, {}), + session_id=target.params.get("session_id", ""), + artifacts=target.artifacts, + ) + return self.result + + def __repr__(self) -> str: + return f"{self.source_id} -[{self.target_param}]-> {self.target_id}" + + +def log_transaction(edge: ContractEdge, source: "Vertex", target: "Vertex", status, error=None): + try: + monitor_service = get_monitor_service() + clean_params = build_clean_params(target) + data = { + "source": source.vertex_type, + "target": target.vertex_type, + "target_args": clean_params, + "timestamp": monitor_service.get_timestamp(), + "status": status, + "error": error, + } + monitor_service.add_row(table_name="transactions", data=data) + except Exception as e: + logger.error(f"Error logging transaction: {e}") + logger.error(f"Error logging transaction: {e}") diff --git a/src/backend/base/langflow/graph/edge/schema.py b/src/backend/base/langflow/graph/edge/schema.py new file mode 100644 index 000000000..ea691beae --- /dev/null +++ b/src/backend/base/langflow/graph/edge/schema.py @@ -0,0 +1,34 @@ +from typing import Any, List +from pydantic import BaseModel + + +class ResultPair(BaseModel): + result: Any + extra: Any + + +class Payload(BaseModel): + result_pairs: List[ResultPair] = [] + + def __iter__(self): + return iter(self.result_pairs) + + def add_result_pair(self, result: Any, extra: Any = None) -> None: + self.result_pairs.append(ResultPair(result=result, extra=extra)) + + def get_last_result_pair(self) -> ResultPair: + return self.result_pairs[-1] + + # format all but the last result pair + # into a string + def format(self, sep: str = "\n") -> str: + # Result: the result + # Extra: the extra if it exists don't show if it doesn't + return sep.join( + [ + f"Result: {result_pair.result}\nExtra: {result_pair.extra}" + if result_pair.extra is not None + else f"Result: {result_pair.result}" + for result_pair in self.result_pairs[:-1] + ] + ) diff --git a/src/backend/base/langflow/graph/edge/utils.py b/src/backend/base/langflow/graph/edge/utils.py new file mode 100644 index 000000000..0f69e4b2d --- /dev/null +++ b/src/backend/base/langflow/graph/edge/utils.py @@ -0,0 +1,19 @@ +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from langflow.graph.vertex.base import Vertex + + +def build_clean_params(target: "Vertex") -> dict: + """ + Cleans the parameters of the target vertex. + """ + # Removes all keys that the values aren't python types like str, int, bool, etc. + params = { + key: value for key, value in target.params.items() if isinstance(value, (str, int, bool, float, list, dict)) + } + # if it is a list we need to check if the contents are python types + for key, value in params.items(): + if isinstance(value, list): + params[key] = [item for item in value if isinstance(item, (str, int, bool, float, list, dict))] + return params diff --git a/src/backend/langflow/services/auth/__init__.py b/src/backend/base/langflow/graph/graph/__init__.py similarity index 100% rename from src/backend/langflow/services/auth/__init__.py rename to src/backend/base/langflow/graph/graph/__init__.py diff --git a/src/backend/base/langflow/graph/graph/base.py b/src/backend/base/langflow/graph/graph/base.py new file mode 100644 index 000000000..8261d0b8c --- /dev/null +++ b/src/backend/base/langflow/graph/graph/base.py @@ -0,0 +1,1220 @@ +import asyncio +from collections import defaultdict, deque +from itertools import chain +from typing import TYPE_CHECKING, Callable, Coroutine, Dict, Generator, List, Optional, Type, Union + +from loguru import logger + +from langflow.graph.edge.base import ContractEdge +from langflow.graph.graph.constants import lazy_load_vertex_dict +from langflow.graph.graph.runnable_vertices_manager import RunnableVerticesManager +from langflow.graph.graph.state_manager import GraphStateManager +from langflow.graph.graph.utils import process_flow +from langflow.graph.schema import InterfaceComponentTypes, RunOutputs +from langflow.graph.vertex.base import Vertex +from langflow.graph.vertex.types import ChatVertex, FileToolVertex, LLMVertex, RoutingVertex, StateVertex, ToolkitVertex +from langflow.interface.tools.constants import FILE_TOOLS +from langflow.schema import Record +from langflow.schema.schema import INPUT_FIELD_NAME, InputType + +if TYPE_CHECKING: + from langflow.graph.schema import ResultData + + +class Graph: + """A class representing a graph of vertices and edges.""" + + def __init__( + self, + nodes: List[Dict], + edges: List[Dict[str, str]], + flow_id: Optional[str] = None, + ) -> None: + """ + Initializes a new instance of the Graph class. + + Args: + nodes (List[Dict]): A list of dictionaries representing the vertices of the graph. + edges (List[Dict[str, str]]): A list of dictionaries representing the edges of the graph. + flow_id (Optional[str], optional): The ID of the flow. Defaults to None. + """ + self._vertices = nodes + self._edges = edges + self.raw_graph_data = {"nodes": nodes, "edges": edges} + self._runs = 0 + self._updates = 0 + self.flow_id = flow_id + self._is_input_vertices: List[str] = [] + self._is_output_vertices: List[str] = [] + self._is_state_vertices: List[str] = [] + self._has_session_id_vertices: List[str] = [] + self._sorted_vertices_layers: List[List[str]] = [] + self._run_id = "" + + self.top_level_vertices = [] + for vertex in self._vertices: + if vertex_id := vertex.get("id"): + self.top_level_vertices.append(vertex_id) + self._graph_data = process_flow(self.raw_graph_data) + + self._vertices = self._graph_data["nodes"] + self._edges = self._graph_data["edges"] + self.inactivated_vertices: set = set() + self.activated_vertices: List[str] = [] + self.vertices_layers: List[List[str]] = [] + self.vertices_to_run: set[str] = set() + self.stop_vertex: Optional[str] = None + + self.inactive_vertices: set = set() + self.edges: List[ContractEdge] = [] + self.vertices: List[Vertex] = [] + self.run_manager = RunnableVerticesManager() + self._build_graph() + self.build_graph_maps() + self.define_vertices_lists() + self.state_manager = GraphStateManager() + + def get_state(self, name: str) -> Optional[Record]: + """ + Returns the state of the graph with the given name. + + Args: + name (str): The name of the state. + + Returns: + Optional[Record]: The state record, or None if the state does not exist. + """ + return self.state_manager.get_state(name, run_id=self._run_id) + + def update_state(self, name: str, record: Union[str, Record], caller: Optional[str] = None) -> None: + """ + Updates the state of the graph with the given name. + + Args: + name (str): The name of the state. + record (Union[str, Record]): The new state record. + caller (Optional[str], optional): The ID of the vertex that is updating the state. Defaults to None. + """ + if caller: + # If there is a caller which is a vertex_id, I want to activate + # all StateVertex in self.vertices that are not the caller + # essentially notifying all the other vertices that the state has changed + # This also has to activate their successors + self.activate_state_vertices(name, caller) + + self.state_manager.update_state(name, record, run_id=self._run_id) + + def activate_state_vertices(self, name: str, caller: str): + """ + Activates the state vertices in the graph with the given name and caller. + + Args: + name (str): The name of the state. + caller (str): The ID of the vertex that is updating the state. + """ + vertices_ids = [] + for vertex_id in self._is_state_vertices: + if vertex_id == caller: + continue + vertex = self.get_vertex(vertex_id) + if ( + isinstance(vertex._raw_params["name"], str) + and name in vertex._raw_params["name"] + and vertex_id != caller + and isinstance(vertex, StateVertex) + ): + vertices_ids.append(vertex_id) + successors = self.get_all_successors(vertex, flat=True) + self.vertices_to_run.update(list(map(lambda x: x.id, successors))) + self.activated_vertices = vertices_ids + self.vertices_to_run.update(vertices_ids) + + def reset_activated_vertices(self): + """ + Resets the activated vertices in the graph. + """ + self.activated_vertices = [] + + def append_state(self, name: str, record: Union[str, Record], caller: Optional[str] = None) -> None: + """ + Appends the state of the graph with the given name. + + Args: + name (str): The name of the state. + record (Union[str, Record]): The state record to append. + caller (Optional[str], optional): The ID of the vertex that is updating the state. Defaults to None. + """ + if caller: + self.activate_state_vertices(name, caller) + + self.state_manager.append_state(name, record, run_id=self._run_id) + + @property + def run_id(self): + """ + The ID of the current run. + + Returns: + str: The run ID. + + Raises: + ValueError: If the run ID is not set. + """ + if not self._run_id: + raise ValueError("Run ID not set") + return self._run_id + + def set_run_id(self, run_id: str): + """ + Sets the ID of the current run. + + Args: + run_id (str): The run ID. + """ + for vertex in self.vertices: + self.state_manager.subscribe(run_id, vertex.update_graph_state) + self._run_id = run_id + + @property + def sorted_vertices_layers(self) -> List[List[str]]: + """ + The sorted layers of vertices in the graph. + + Returns: + List[List[str]]: The sorted layers of vertices. + """ + if not self._sorted_vertices_layers: + self.sort_vertices() + return self._sorted_vertices_layers + + def define_vertices_lists(self): + """ + Defines the lists of vertices that are inputs, outputs, and have session_id. + """ + attributes = ["is_input", "is_output", "has_session_id", "is_state"] + for vertex in self.vertices: + for attribute in attributes: + if getattr(vertex, attribute): + getattr(self, f"_{attribute}_vertices").append(vertex.id) + + async def _run( + self, + inputs: Dict[str, str], + input_components: list[str], + input_type: InputType | None, + outputs: list[str], + stream: bool, + session_id: str, + ) -> List[Optional["ResultData"]]: + """ + Runs the graph with the given inputs. + + Args: + inputs (Dict[str, str]): The input values for the graph. + input_components (list[str]): The components to run for the inputs. + outputs (list[str]): The outputs to retrieve from the graph. + stream (bool): Whether to stream the results or not. + session_id (str): The session ID for the graph. + + Returns: + List[Optional["ResultData"]]: The outputs of the graph. + """ + if input_components and not isinstance(input_components, list): + raise ValueError(f"Invalid components value: {input_components}. Expected list") + elif input_components is None: + input_components = [] + + if not isinstance(inputs.get(INPUT_FIELD_NAME, ""), str): + raise ValueError(f"Invalid input value: {inputs.get(INPUT_FIELD_NAME)}. Expected string") + if inputs: + for vertex_id in self._is_input_vertices: + vertex = self.get_vertex(vertex_id) + # If the vertex is not in the input_components list + if input_components and ( + vertex_id not in input_components or vertex.display_name not in input_components + ): + continue + # If the input_type is not any and the input_type is not in the vertex id + # Example: input_type = "chat" and vertex.id = "OpenAI-19ddn" + elif input_type is not None and input_type != "any" and input_type not in vertex.id.lower(): + continue + if vertex is None: + raise ValueError(f"Vertex {vertex_id} not found") + vertex.update_raw_params(inputs, overwrite=True) + # Update all the vertices with the session_id + for vertex_id in self._has_session_id_vertices: + vertex = self.get_vertex(vertex_id) + if vertex is None: + raise ValueError(f"Vertex {vertex_id} not found") + vertex.update_raw_params({"session_id": session_id}) + # Process the graph + try: + await self.process() + self.increment_run_count() + except Exception as exc: + logger.exception(exc) + raise ValueError(f"Error running graph: {exc}") from exc + # Get the outputs + vertex_outputs = [] + for vertex in self.vertices: + if vertex is None: + raise ValueError(f"Vertex {vertex_id} not found") + + if not vertex.result and not stream and hasattr(vertex, "consume_async_generator"): + await vertex.consume_async_generator() + if (not outputs and vertex.is_output) or (vertex.display_name in outputs or vertex.id in outputs): + vertex_outputs.append(vertex.result) + + return vertex_outputs + + def run( + self, + inputs: list[Dict[str, str]], + input_components: Optional[list[list[str]]] = None, + types: Optional[list[InputType | None]] = None, + outputs: Optional[list[str]] = None, + session_id: Optional[str] = None, + stream: bool = False, + ) -> List[RunOutputs]: + """ + Run the graph with the given inputs and return the outputs. + + Args: + inputs (Dict[str, str]): A dictionary of input values. + input_components (Optional[list[str]]): A list of input components. + types (Optional[list[str]]): A list of types. + outputs (Optional[list[str]]): A list of output components. + session_id (Optional[str]): The session ID. + stream (bool): Whether to stream the outputs. + + Returns: + List[RunOutputs]: A list of RunOutputs objects representing the outputs. + """ + # run the async function in a sync way + # this could be used in a FastAPI endpoint + # so we should take care of the event loop + loop = asyncio.get_event_loop() + return loop.run_until_complete( + self.arun( + inputs=inputs, + inputs_components=input_components, + types=types, + outputs=outputs, + session_id=session_id, + stream=stream, + ) + ) + + async def arun( + self, + inputs: list[Dict[str, str]], + inputs_components: Optional[list[list[str]]] = None, + types: Optional[list[InputType | None]] = None, + outputs: Optional[list[str]] = None, + session_id: Optional[str] = None, + stream: bool = False, + ) -> List[RunOutputs]: + """ + Runs the graph with the given inputs. + + Args: + inputs (list[Dict[str, str]]): The input values for the graph. + inputs_components (Optional[list[list[str]]], optional): The components to run for the inputs. Defaults to None. + outputs (Optional[list[str]], optional): The outputs to retrieve from the graph. Defaults to None. + session_id (Optional[str], optional): The session ID for the graph. Defaults to None. + stream (bool, optional): Whether to stream the results or not. Defaults to False. + + Returns: + List[RunOutputs]: The outputs of the graph. + """ + # inputs is {"message": "Hello, world!"} + # we need to go through self.inputs and update the self._raw_params + # of the vertices that are inputs + # if the value is a list, we need to run multiple times + vertex_outputs = [] + if not isinstance(inputs, list): + inputs = [inputs] + elif not inputs: + inputs = [{}] + # Length of all should be the as inputs length + # just add empty lists to complete the length + if inputs_components is None: + inputs_components = [] + for _ in range(len(inputs) - len(inputs_components)): + inputs_components.append([]) + if types is None: + types = [] + for _ in range(len(inputs) - len(types)): + types.append("any") + for run_inputs, components, input_type in zip(inputs, inputs_components, types): + run_outputs = await self._run( + inputs=run_inputs, + input_components=components, + input_type=input_type, + outputs=outputs or [], + stream=stream, + session_id=session_id or "", + ) + run_output_object = RunOutputs(inputs=run_inputs, outputs=run_outputs) + logger.debug(f"Run outputs: {run_output_object}") + vertex_outputs.append(run_output_object) + return vertex_outputs + + def next_vertex_to_build(self): + """ + Returns the next vertex to be built. + + Yields: + str: The ID of the next vertex to be built. + """ + yield from chain.from_iterable(self.vertices_layers) + + @property + def metadata(self): + """ + The metadata of the graph. + + Returns: + dict: The metadata of the graph. + """ + return { + "runs": self._runs, + "updates": self._updates, + "inactivated_vertices": self.inactivated_vertices, + } + + def build_graph_maps(self): + """ + Builds the adjacency maps for the graph. + """ + self.predecessor_map, self.successor_map = self.build_adjacency_maps() + + self.in_degree_map = self.build_in_degree() + self.parent_child_map = self.build_parent_child_map() + + def reset_inactivated_vertices(self): + """ + Resets the inactivated vertices in the graph. + """ + self.inactivated_vertices = [] + self.inactivated_vertices = set() + + def mark_all_vertices(self, state: str): + """Marks all vertices in the graph.""" + for vertex in self.vertices: + vertex.set_state(state) + + def mark_vertex(self, vertex_id: str, state: str): + """Marks a vertex in the graph.""" + vertex = self.get_vertex(vertex_id) + vertex.set_state(state) + + def mark_branch(self, vertex_id: str, state: str): + """Marks a branch of the graph.""" + self.mark_vertex(vertex_id, state) + for child_id in self.parent_child_map[vertex_id]: + self.mark_branch(child_id, state) + + def build_parent_child_map(self): + parent_child_map = defaultdict(list) + for vertex in self.vertices: + parent_child_map[vertex.id] = [child.id for child in self.get_successors(vertex)] + return parent_child_map + + def increment_run_count(self): + self._runs += 1 + + def increment_update_count(self): + self._updates += 1 + + def __getstate__(self): + return self.raw_graph_data + + def __setstate__(self, state): + self.__init__(**state) + + @classmethod + def from_payload(cls, payload: Dict, flow_id: Optional[str] = None) -> "Graph": + """ + Creates a graph from a payload. + + Args: + payload (Dict): The payload to create the graph from.˜` + + Returns: + Graph: The created graph. + """ + if "data" in payload: + payload = payload["data"] + try: + vertices = payload["nodes"] + edges = payload["edges"] + return cls(vertices, edges, flow_id) + except KeyError as exc: + logger.exception(exc) + if "nodes" not in payload and "edges" not in payload: + logger.exception(exc) + raise ValueError( + f"Invalid payload. Expected keys 'nodes' and 'edges'. Found {list(payload.keys())}" + ) from exc + raise ValueError(f"Error while creating graph from payload: {exc}") from exc + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Graph): + return False + return self.__repr__() == other.__repr__() + + # update this graph with another graph by comparing the __repr__ of each vertex + # and if the __repr__ of a vertex is not the same as the other + # then update the .data of the vertex to the self + # both graphs have the same vertices and edges + # but the data of the vertices might be different + + def update_edges_from_vertex(self, vertex: Vertex, other_vertex: Vertex) -> None: + """Updates the edges of a vertex in the Graph.""" + new_edges = [] + for edge in self.edges: + if edge.source_id == other_vertex.id or edge.target_id == other_vertex.id: + continue + new_edges.append(edge) + new_edges += other_vertex.edges + self.edges = new_edges + + def vertex_data_is_identical(self, vertex: Vertex, other_vertex: Vertex) -> bool: + data_is_equivalent = vertex == other_vertex + if not data_is_equivalent: + return False + return self.vertex_edges_are_identical(vertex, other_vertex) + + def vertex_edges_are_identical(self, vertex: Vertex, other_vertex: Vertex) -> bool: + same_length = len(vertex.edges) == len(other_vertex.edges) + if not same_length: + return False + for edge in vertex.edges: + if edge not in other_vertex.edges: + return False + return True + + def update(self, other: "Graph") -> "Graph": + # Existing vertices in self graph + existing_vertex_ids = set(vertex.id for vertex in self.vertices) + # Vertex IDs in the other graph + other_vertex_ids = set(other.vertex_map.keys()) + + # Find vertices that are in other but not in self (new vertices) + new_vertex_ids = other_vertex_ids - existing_vertex_ids + + # Find vertices that are in self but not in other (removed vertices) + removed_vertex_ids = existing_vertex_ids - other_vertex_ids + + # Remove vertices that are not in the other graph + for vertex_id in removed_vertex_ids: + try: + self.remove_vertex(vertex_id) + except ValueError: + pass + + # The order here matters because adding the vertex is required + # if any of them have edges that point to any of the new vertices + # By adding them first, them adding the edges we ensure that the + # edges have valid vertices to point to + + # Add new vertices + for vertex_id in new_vertex_ids: + new_vertex = other.get_vertex(vertex_id) + self._add_vertex(new_vertex) + + # Now update the edges + for vertex_id in new_vertex_ids: + new_vertex = other.get_vertex(vertex_id) + self._update_edges(new_vertex) + # Graph is set at the end because the edges come from the graph + # and the other graph is where the new edges and vertices come from + new_vertex.graph = self + + # Update existing vertices that have changed + for vertex_id in existing_vertex_ids.intersection(other_vertex_ids): + self_vertex = self.get_vertex(vertex_id) + other_vertex = other.get_vertex(vertex_id) + # If the vertices are not identical, update the vertex + if not self.vertex_data_is_identical(self_vertex, other_vertex): + self.update_vertex_from_another(self_vertex, other_vertex) + + self.build_graph_maps() + self.increment_update_count() + return self + + def update_vertex_from_another(self, vertex: Vertex, other_vertex: Vertex) -> None: + """ + Updates a vertex from another vertex. + + Args: + vertex (Vertex): The vertex to be updated. + other_vertex (Vertex): The vertex to update from. + """ + vertex._data = other_vertex._data + vertex._parse_data() + # Now we update the edges of the vertex + self.update_edges_from_vertex(vertex, other_vertex) + vertex.params = {} + vertex._build_params() + vertex.graph = self + # If the vertex is frozen, we don't want + # to reset the results nor the _built attribute + if not vertex.frozen: + vertex._built = False + vertex.result = None + vertex.artifacts = {} + vertex.set_top_level(self.top_level_vertices) + self.reset_all_edges_of_vertex(vertex) + + def reset_all_edges_of_vertex(self, vertex: Vertex) -> None: + """Resets all the edges of a vertex.""" + for edge in vertex.edges: + for vid in [edge.source_id, edge.target_id]: + if vid in self.vertex_map: + _vertex = self.vertex_map[vid] + if not _vertex.frozen: + _vertex._build_params() + + def _add_vertex(self, vertex: Vertex) -> None: + """Adds a vertex to the graph.""" + self.vertices.append(vertex) + self.vertex_map[vertex.id] = vertex + + def add_vertex(self, vertex: Vertex) -> None: + """Adds a new vertex to the graph.""" + self._add_vertex(vertex) + self._update_edges(vertex) + + def _update_edges(self, vertex: Vertex) -> None: + """Updates the edges of a vertex.""" + # Vertex has edges, so we need to update the edges + for edge in vertex.edges: + if edge not in self.edges and edge.source_id in self.vertex_map and edge.target_id in self.vertex_map: + self.edges.append(edge) + + def _build_graph(self) -> None: + """Builds the graph from the vertices and edges.""" + self.vertices = self._build_vertices() + self.vertex_map = {vertex.id: vertex for vertex in self.vertices} + self.edges = self._build_edges() + + # This is a hack to make sure that the LLM vertex is sent to + # the toolkit vertex + self._build_vertex_params() + + # Now that we have the vertices and edges + # We need to map the vertices that are connected to + # to ChatVertex instances + + def remove_vertex(self, vertex_id: str) -> None: + """Removes a vertex from the graph.""" + vertex = self.get_vertex(vertex_id) + if vertex is None: + return + self.vertices.remove(vertex) + self.vertex_map.pop(vertex_id) + self.edges = [edge for edge in self.edges if edge.source_id != vertex_id and edge.target_id != vertex_id] + + def _build_vertex_params(self) -> None: + """Identifies and handles the LLM vertex within the graph.""" + llm_vertex = None + for vertex in self.vertices: + vertex._build_params() + if isinstance(vertex, LLMVertex): + llm_vertex = vertex + + if llm_vertex: + for vertex in self.vertices: + if isinstance(vertex, ToolkitVertex): + vertex.params["llm"] = llm_vertex + + def _validate_vertex(self, vertex: Vertex) -> bool: + """Validates a vertex.""" + # All vertices that do not have edges are invalid + return len(self.get_vertex_edges(vertex.id)) > 0 + + def get_vertex(self, vertex_id: str) -> Vertex: + """Returns a vertex by id.""" + try: + return self.vertex_map[vertex_id] + except KeyError: + raise ValueError(f"Vertex {vertex_id} not found") + + async def build_vertex( + self, + lock: asyncio.Lock, + set_cache_coro: Callable[["Graph", asyncio.Lock], Coroutine], + vertex_id: str, + inputs_dict: Optional[Dict[str, str]] = None, + user_id: Optional[str] = None, + ): + """ + Builds a vertex in the graph. + + Args: + lock (asyncio.Lock): A lock to synchronize access to the graph. + set_cache_coro (Coroutine): A coroutine to set the cache. + vertex_id (str): The ID of the vertex to build. + inputs (Optional[Dict[str, str]]): Optional dictionary of inputs for the vertex. Defaults to None. + user_id (Optional[str]): Optional user ID. Defaults to None. + + Returns: + Tuple: A tuple containing the next runnable vertices, top level vertices, result dictionary, + parameters, validity flag, artifacts, and the built vertex. + + Raises: + ValueError: If no result is found for the vertex. + """ + vertex = self.get_vertex(vertex_id) + try: + if not vertex.frozen or not vertex._built: + await vertex.build(user_id=user_id, inputs=inputs_dict) + + if vertex.result is not None: + params = vertex._built_object_repr() + valid = True + result_dict = vertex.result + artifacts = vertex.artifacts + else: + raise ValueError(f"No result found for vertex {vertex_id}") + + next_runnable_vertices, top_level_vertices = await self.get_next_and_top_level_vertices( + lock, set_cache_coro, vertex + ) + return next_runnable_vertices, top_level_vertices, result_dict, params, valid, artifacts, vertex + except Exception as exc: + logger.exception(f"Error building vertex: {exc}") + raise exc + + async def get_next_and_top_level_vertices( + self, lock: asyncio.Lock, set_cache_coro: Callable[["Graph", asyncio.Lock], Coroutine], vertex: Vertex + ): + """ + Retrieves the next runnable vertices and the top level vertices for a given vertex. + + Args: + lock (asyncio.Lock): The lock used to synchronize access to the graph. + set_cache_coro (Coroutine): The coroutine used to set the cache for the graph. + vertex (Vertex): The vertex for which to retrieve the next runnable and top level vertices. + + Returns: + Tuple[List[Vertex], List[Vertex]]: A tuple containing the next runnable vertices and the top level vertices. + """ + next_runnable_vertices = await self.run_manager.get_next_runnable_vertices(lock, set_cache_coro, self, vertex) + top_level_vertices = self.run_manager.get_top_level_vertices(self, next_runnable_vertices) + return next_runnable_vertices, top_level_vertices + + def get_vertex_edges( + self, + vertex_id: str, + is_target: Optional[bool] = None, + is_source: Optional[bool] = None, + ) -> List[ContractEdge]: + """Returns a list of edges for a given vertex.""" + # The idea here is to return the edges that have the vertex_id as source or target + # or both + return [ + edge + for edge in self.edges + if (edge.source_id == vertex_id and is_source is not False) + or (edge.target_id == vertex_id and is_target is not False) + ] + + def get_vertices_with_target(self, vertex_id: str) -> List[Vertex]: + """Returns the vertices connected to a vertex.""" + vertices: List[Vertex] = [] + for edge in self.edges: + if edge.target_id == vertex_id: + vertex = self.get_vertex(edge.source_id) + if vertex is None: + continue + vertices.append(vertex) + return vertices + + async def process(self) -> "Graph": + """Processes the graph with vertices in each layer run in parallel.""" + vertices_layers = self.sorted_vertices_layers + vertex_task_run_count: Dict[str, int] = {} + for layer_index, layer in enumerate(vertices_layers): + tasks = [] + for vertex_id in layer: + vertex = self.get_vertex(vertex_id) + task = asyncio.create_task( + vertex.build(), + name=f"{vertex.display_name} Run {vertex_task_run_count.get(vertex_id, 0)}", + ) + tasks.append(task) + vertex_task_run_count[vertex_id] = vertex_task_run_count.get(vertex_id, 0) + 1 + logger.debug(f"Running layer {layer_index} with {len(tasks)} tasks") + await self._execute_tasks(tasks) + logger.debug("Graph processing complete") + return self + + async def _execute_tasks(self, tasks): + """Executes tasks in parallel, handling exceptions for each task.""" + results = [] + for i, task in enumerate(asyncio.as_completed(tasks)): + try: + result = await task + results.append(result) + except Exception as e: + # Log the exception along with the task name for easier debugging + # task_name = task.get_name() + # coroutine has not attribute get_name + task_name = tasks[i].get_name() + logger.error(f"Task {task_name} failed with exception: {e}") + # Cancel all remaining tasks + for t in tasks[i:]: + t.cancel() + raise e + return results + + def topological_sort(self) -> List[Vertex]: + """ + Performs a topological sort of the vertices in the graph. + + Returns: + List[Vertex]: A list of vertices in topological order. + + Raises: + ValueError: If the graph contains a cycle. + """ + # States: 0 = unvisited, 1 = visiting, 2 = visited + state = {vertex: 0 for vertex in self.vertices} + sorted_vertices = [] + + def dfs(vertex): + if state[vertex] == 1: + # We have a cycle + raise ValueError("Graph contains a cycle, cannot perform topological sort") + if state[vertex] == 0: + state[vertex] = 1 + for edge in vertex.edges: + if edge.source_id == vertex.id: + dfs(self.get_vertex(edge.target_id)) + state[vertex] = 2 + sorted_vertices.append(vertex) + + # Visit each vertex + for vertex in self.vertices: + if state[vertex] == 0: + dfs(vertex) + + return list(reversed(sorted_vertices)) + + def generator_build(self) -> Generator[Vertex, None, None]: + """Builds each vertex in the graph and yields it.""" + sorted_vertices = self.topological_sort() + logger.debug("There are %s vertices in the graph", len(sorted_vertices)) + yield from sorted_vertices + + def get_predecessors(self, vertex): + """Returns the predecessors of a vertex.""" + return [self.get_vertex(source_id) for source_id in self.predecessor_map.get(vertex.id, [])] + + def get_all_successors(self, vertex, recursive=True, flat=True): + # Recursively get the successors of the current vertex + # successors = vertex.successors + # if not successors: + # return [] + # successors_result = [] + # for successor in successors: + # # Just return a list of successors + # if recursive: + # next_successors = self.get_all_successors(successor) + # successors_result.extend(next_successors) + # successors_result.append(successor) + # return successors_result + # The above is the version without the flat parameter + # The below is the version with the flat parameter + # the flat parameter will define if each layer of successors + # becomes one list or if the result is a list of lists + # if flat is True, the result will be a list of vertices + # if flat is False, the result will be a list of lists of vertices + # each list will represent a layer of successors + successors = vertex.successors + if not successors: + return [] + successors_result = [] + for successor in successors: + if recursive: + next_successors = self.get_all_successors(successor) + if flat: + successors_result.extend(next_successors) + else: + successors_result.append(next_successors) + if flat: + successors_result.append(successor) + else: + successors_result.append([successor]) + return successors_result + + def get_successors(self, vertex): + """Returns the successors of a vertex.""" + return [self.get_vertex(target_id) for target_id in self.successor_map.get(vertex.id, [])] + + def get_vertex_neighbors(self, vertex: Vertex) -> Dict[Vertex, int]: + """Returns the neighbors of a vertex.""" + neighbors: Dict[Vertex, int] = {} + for edge in self.edges: + if edge.source_id == vertex.id: + neighbor = self.get_vertex(edge.target_id) + if neighbor is None: + continue + if neighbor not in neighbors: + neighbors[neighbor] = 0 + neighbors[neighbor] += 1 + elif edge.target_id == vertex.id: + neighbor = self.get_vertex(edge.source_id) + if neighbor is None: + continue + if neighbor not in neighbors: + neighbors[neighbor] = 0 + neighbors[neighbor] += 1 + return neighbors + + def _build_edges(self) -> List[ContractEdge]: + """Builds the edges of the graph.""" + # Edge takes two vertices as arguments, so we need to build the vertices first + # and then build the edges + # if we can't find a vertex, we raise an error + + edges: set[ContractEdge] = set() + for edge in self._edges: + source = self.get_vertex(edge["source"]) + target = self.get_vertex(edge["target"]) + + if source is None: + raise ValueError(f"Source vertex {edge['source']} not found") + if target is None: + raise ValueError(f"Target vertex {edge['target']} not found") + new_edge = ContractEdge(source, target, edge) + + edges.add(new_edge) + + return list(edges) + + def _get_vertex_class(self, node_type: str, node_base_type: str, node_id: str) -> Type[Vertex]: + """Returns the node class based on the node type.""" + # First we check for the node_base_type + node_name = node_id.split("-")[0] + if node_name in ["ChatOutput", "ChatInput"]: + return ChatVertex + elif node_name in ["ShouldRunNext"]: + return RoutingVertex + elif node_name in ["SharedState", "Notify", "GetNotified"]: + return StateVertex + elif node_base_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP: + return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_base_type] + elif node_name in lazy_load_vertex_dict.VERTEX_TYPE_MAP: + return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_name] + + if node_type in FILE_TOOLS: + return FileToolVertex + if node_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP: + return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_type] + return ( + lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_base_type] + if node_base_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP + else Vertex + ) + + def _build_vertices(self) -> List[Vertex]: + """Builds the vertices of the graph.""" + vertices: List[Vertex] = [] + for vertex in self._vertices: + vertex_data = vertex["data"] + vertex_type: str = vertex_data["type"] # type: ignore + vertex_base_type: str = vertex_data["node"]["template"]["_type"] # type: ignore + if "id" not in vertex_data: + raise ValueError(f"Vertex data for {vertex_data['display_name']} does not contain an id") + + VertexClass = self._get_vertex_class(vertex_type, vertex_base_type, vertex_data["id"]) + + vertex_instance = VertexClass(vertex, graph=self) + vertex_instance.set_top_level(self.top_level_vertices) + vertices.append(vertex_instance) + + return vertices + + def get_children_by_vertex_type(self, vertex: Vertex, vertex_type: str) -> List[Vertex]: + """Returns the children of a vertex based on the vertex type.""" + children = [] + vertex_types = [vertex.data["type"]] + if "node" in vertex.data: + vertex_types += vertex.data["node"]["base_classes"] + if vertex_type in vertex_types: + children.append(vertex) + return children + + def __repr__(self): + vertex_ids = [vertex.id for vertex in self.vertices] + edges_repr = "\n".join([f"{edge.source_id} --> {edge.target_id}" for edge in self.edges]) + return f"Graph:\nNodes: {vertex_ids}\nConnections:\n{edges_repr}" + + def sort_up_to_vertex(self, vertex_id: str, is_start: bool = False) -> List[Vertex]: + """Cuts the graph up to a given vertex and sorts the resulting subgraph.""" + # Initial setup + visited = set() # To keep track of visited vertices + excluded = set() # To keep track of vertices that should be excluded + stack = [vertex_id] # Use a list as a stack for DFS + + def get_successors(vertex, recursive=True): + # Recursively get the successors of the current vertex + successors = vertex.successors + if not successors: + return [] + successors_result = [] + for successor in successors: + # Just return a list of successors + if recursive: + next_successors = get_successors(successor) + successors_result.extend(next_successors) + successors_result.append(successor) + return successors_result + + stop_or_start_vertex = self.get_vertex(vertex_id) + stop_predecessors = [pre.id for pre in stop_or_start_vertex.predecessors] + # DFS to collect all vertices that can reach the specified vertex + while stack: + current_id = stack.pop() + if current_id not in visited and current_id not in excluded: + visited.add(current_id) + current_vertex = self.get_vertex(current_id) + # Assuming get_predecessors is a method that returns all vertices with edges to current_vertex + for predecessor in current_vertex.predecessors: + stack.append(predecessor.id) + + if current_id == vertex_id: + # We should add to visited all the vertices that are successors of the current vertex + # and their successors and so on + # if the vertex is a start, it means we are starting from the beginning + # and getting successors + for successor in current_vertex.successors: + if is_start: + stack.append(successor.id) + else: + excluded.add(successor.id) + all_successors = get_successors(successor) + for successor in all_successors: + if is_start: + stack.append(successor.id) + else: + excluded.add(successor.id) + elif current_id not in stop_predecessors: + # If the current vertex is not the target vertex, we should add all its successors + # to the stack if they are not in visited + for successor in current_vertex.successors: + if successor.id not in visited: + stack.append(successor.id) + + # Filter the original graph's vertices and edges to keep only those in `visited` + vertices_to_keep = [self.get_vertex(vid) for vid in visited] + + return vertices_to_keep + + def layered_topological_sort( + self, + vertices: List[Vertex], + filter_graphs: bool = False, + ) -> List[List[str]]: + """Performs a layered topological sort of the vertices in the graph.""" + vertices_ids = {vertex.id for vertex in vertices} + # Queue for vertices with no incoming edges + queue = deque( + vertex.id + for vertex in vertices + # if filter_graphs then only vertex.is_input will be considered + if self.in_degree_map[vertex.id] == 0 and (not filter_graphs or vertex.is_input) + ) + layers: List[List[str]] = [] + visited = set(queue) + current_layer = 0 + while queue: + layers.append([]) # Start a new layer + layer_size = len(queue) + for _ in range(layer_size): + vertex_id = queue.popleft() + visited.add(vertex_id) + + layers[current_layer].append(vertex_id) + for neighbor in self.successor_map[vertex_id]: + # only vertices in `vertices_ids` should be considered + # because vertices by have been filtered out + # in a previous step. All dependencies of theirs + # will be built automatically if required + if neighbor not in vertices_ids: + continue + + self.in_degree_map[neighbor] -= 1 # 'remove' edge + if self.in_degree_map[neighbor] == 0 and neighbor not in visited: + queue.append(neighbor) + + # if > 0 it might mean not all predecessors have added to the queue + # so we should process the neighbors predecessors + elif self.in_degree_map[neighbor] > 0: + for predecessor in self.predecessor_map[neighbor]: + if predecessor not in queue and predecessor not in visited: + queue.append(predecessor) + + current_layer += 1 # Next layer + new_layers = self.refine_layers(layers) + return new_layers + + def refine_layers(self, initial_layers): + # Map each vertex to its current layer + vertex_to_layer = {} + for layer_index, layer in enumerate(initial_layers): + for vertex in layer: + vertex_to_layer[vertex] = layer_index + + # Build the adjacency list for reverse lookup (dependencies) + + refined_layers = [[] for _ in initial_layers] # Start with empty layers + new_layer_index_map = defaultdict(int) + + # Map each vertex to its new layer index + # by finding the lowest layer index of its dependencies + # and subtracting 1 + # If a vertex has no dependencies, it will be placed in the first layer + # If a vertex has dependencies, it will be placed in the lowest layer index of its dependencies + # minus 1 + for vertex_id, deps in self.successor_map.items(): + indexes = [vertex_to_layer[dep] for dep in deps if dep in vertex_to_layer] + new_layer_index = max(min(indexes, default=0) - 1, 0) + new_layer_index_map[vertex_id] = new_layer_index + + for layer_index, layer in enumerate(initial_layers): + for vertex_id in layer: + # Place the vertex in the highest possible layer where its dependencies are met + new_layer_index = new_layer_index_map[vertex_id] + if new_layer_index > layer_index: + refined_layers[new_layer_index].append(vertex_id) + vertex_to_layer[vertex_id] = new_layer_index + else: + refined_layers[layer_index].append(vertex_id) + + # Remove empty layers if any + refined_layers = [layer for layer in refined_layers if layer] + + return refined_layers + + def sort_chat_inputs_first(self, vertices_layers: List[List[str]]) -> List[List[str]]: + chat_inputs_first = [] + for layer in vertices_layers: + for vertex_id in layer: + if "ChatInput" in vertex_id: + # Remove the ChatInput from the layer + layer.remove(vertex_id) + chat_inputs_first.append(vertex_id) + if not chat_inputs_first: + return vertices_layers + + vertices_layers = [chat_inputs_first] + vertices_layers + + return vertices_layers + + def sort_vertices( + self, + stop_component_id: Optional[str] = None, + start_component_id: Optional[str] = None, + ) -> List[str]: + """Sorts the vertices in the graph.""" + self.mark_all_vertices("ACTIVE") + if stop_component_id is not None: + self.stop_vertex = stop_component_id + vertices = self.sort_up_to_vertex(stop_component_id) + elif start_component_id: + vertices = self.sort_up_to_vertex(start_component_id, is_start=True) + else: + vertices = self.vertices + # without component_id we are probably running in the chat + # so we want to pick only graphs that start with ChatInput or + # TextInput + + vertices_layers = self.layered_topological_sort(vertices) + vertices_layers = self.sort_by_avg_build_time(vertices_layers) + # vertices_layers = self.sort_chat_inputs_first(vertices_layers) + self.increment_run_count() + self._sorted_vertices_layers = vertices_layers + first_layer = vertices_layers[0] + # save the only the rest + self.vertices_layers = vertices_layers[1:] + self.vertices_to_run = {vertex_id for vertex_id in chain.from_iterable(vertices_layers)} + self.build_run_map() + # Return just the first layer + return first_layer + + def sort_interface_components_first(self, vertices_layers: List[List[str]]) -> List[List[str]]: + """Sorts the vertices in the graph so that vertices containing ChatInput or ChatOutput come first.""" + + def contains_interface_component(vertex): + return any(component.value in vertex for component in InterfaceComponentTypes) + + # Sort each inner list so that vertices containing ChatInput or ChatOutput come first + sorted_vertices = [ + sorted( + inner_list, + key=lambda vertex: not contains_interface_component(vertex), + ) + for inner_list in vertices_layers + ] + return sorted_vertices + + def sort_by_avg_build_time(self, vertices_layers: List[List[str]]) -> List[List[str]]: + """Sorts the vertices in the graph so that vertices with the lowest average build time come first.""" + + def sort_layer_by_avg_build_time(vertices_ids: List[str]) -> List[str]: + """Sorts the vertices in the graph so that vertices with the lowest average build time come first.""" + if len(vertices_ids) == 1: + return vertices_ids + vertices_ids.sort(key=lambda vertex_id: self.get_vertex(vertex_id).avg_build_time) + + return vertices_ids + + sorted_vertices = [sort_layer_by_avg_build_time(layer) for layer in vertices_layers] + return sorted_vertices + + def is_vertex_runnable(self, vertex_id: str) -> bool: + """Returns whether a vertex is runnable.""" + return self.run_manager.is_vertex_runnable(vertex_id) + + def build_run_map(self): + """ + Builds the run map for the graph. + + This method is responsible for building the run map for the graph, + which maps each node in the graph to its corresponding run function. + + Returns: + None + """ + self.run_manager.build_run_map(self) + + def find_runnable_predecessors_for_successors(self, vertex_id: str) -> List[str]: + """ + For each successor of the current vertex, find runnable predecessors if any. + This checks the direct predecessors of each successor to identify any that are + immediately runnable, expanding the search to ensure progress can be made. + """ + return self.run_manager.find_runnable_predecessors_for_successors(vertex_id) + + def remove_from_predecessors(self, vertex_id: str): + self.run_manager.remove_from_predecessors(vertex_id) + + def build_in_degree(self): + in_degree = defaultdict(int) + for edge in self.edges: + in_degree[edge.target_id] += 1 + return in_degree + + def build_adjacency_maps(self): + """Returns the adjacency maps for the graph.""" + predecessor_map = defaultdict(list) + successor_map = defaultdict(list) + for edge in self.edges: + predecessor_map[edge.target_id].append(edge.source_id) + successor_map[edge.source_id].append(edge.target_id) + return predecessor_map, successor_map diff --git a/src/backend/langflow/graph/graph/constants.py b/src/backend/base/langflow/graph/graph/constants.py similarity index 89% rename from src/backend/langflow/graph/graph/constants.py rename to src/backend/base/langflow/graph/graph/constants.py index 928280743..658c3c68a 100644 --- a/src/backend/langflow/graph/graph/constants.py +++ b/src/backend/base/langflow/graph/graph/constants.py @@ -1,6 +1,5 @@ from langflow.graph.vertex import types from langflow.interface.agents.base import agent_creator -from langflow.interface.chains.base import chain_creator from langflow.interface.custom.base import custom_component_creator from langflow.interface.document_loaders.base import documentloader_creator from langflow.interface.embeddings.base import embedding_creator @@ -15,6 +14,9 @@ from langflow.interface.tools.base import tool_creator from langflow.interface.wrappers.base import wrapper_creator from langflow.utils.lazy_load import LazyLoadDictBase +CHAT_COMPONENTS = ["ChatInput", "ChatOutput", "TextInput", "SessionID"] +ROUTING_COMPONENTS = ["ShouldRunNext"] + class VertexTypesDict(LazyLoadDictBase): def __init__(self): @@ -31,14 +33,11 @@ class VertexTypesDict(LazyLoadDictBase): "Custom": ["Custom Tool", "Python Function"], } - def get_custom_component_vertex_type(self): - return types.CustomComponentVertex - def get_type_dict(self): return { **{t: types.PromptVertex for t in prompt_creator.to_list()}, **{t: types.AgentVertex for t in agent_creator.to_list()}, - **{t: types.ChainVertex for t in chain_creator.to_list()}, + # **{t: types.ChainVertex for t in chain_creator.to_list()}, **{t: types.ToolVertex for t in tool_creator.to_list()}, **{t: types.ToolkitVertex for t in toolkits_creator.to_list()}, **{t: types.WrapperVertex for t in wrapper_creator.to_list()}, @@ -51,7 +50,12 @@ class VertexTypesDict(LazyLoadDictBase): **{t: types.OutputParserVertex for t in output_parser_creator.to_list()}, **{t: types.CustomComponentVertex for t in custom_component_creator.to_list()}, **{t: types.RetrieverVertex for t in retriever_creator.to_list()}, + **{t: types.ChatVertex for t in CHAT_COMPONENTS}, + **{t: types.RoutingVertex for t in ROUTING_COMPONENTS}, } + def get_custom_component_vertex_type(self): + return types.CustomComponentVertex + lazy_load_vertex_dict = VertexTypesDict() diff --git a/src/backend/base/langflow/graph/graph/runnable_vertices_manager.py b/src/backend/base/langflow/graph/graph/runnable_vertices_manager.py new file mode 100644 index 000000000..875f43472 --- /dev/null +++ b/src/backend/base/langflow/graph/graph/runnable_vertices_manager.py @@ -0,0 +1,111 @@ +import asyncio +from collections import defaultdict +from typing import TYPE_CHECKING, Awaitable, Callable, List + +if TYPE_CHECKING: + from langflow.graph.graph.base import Graph + from langflow.graph.vertex.base import Vertex + + +class RunnableVerticesManager: + def __init__(self): + self.run_map = defaultdict(list) # Tracks successors of each vertex + self.run_predecessors = defaultdict(set) # Tracks predecessors for each vertex + self.vertices_to_run = set() # Set of vertices that are ready to run + + def is_vertex_runnable(self, vertex_id: str) -> bool: + """Determines if a vertex is runnable.""" + return vertex_id in self.vertices_to_run and not self.run_predecessors.get(vertex_id) + + def find_runnable_predecessors_for_successors(self, vertex_id: str) -> List[str]: + """Finds runnable predecessors for the successors of a given vertex.""" + runnable_vertices = [] + visited = set() + + for successor_id in self.run_map.get(vertex_id, []): + for predecessor_id in self.run_predecessors.get(successor_id, []): + if predecessor_id not in visited and self.is_vertex_runnable(predecessor_id): + runnable_vertices.append(predecessor_id) + visited.add(predecessor_id) + return runnable_vertices + + def remove_from_predecessors(self, vertex_id: str): + """Removes a vertex from the predecessor list of its successors.""" + predecessors = self.run_map.get(vertex_id, []) + for predecessor in predecessors: + if vertex_id in self.run_predecessors[predecessor]: + self.run_predecessors[predecessor].remove(vertex_id) + + def build_run_map(self, graph): + """Builds a map of vertices and their runnable successors.""" + self.run_map = defaultdict(list) + for vertex_id, predecessors in graph.predecessor_map.items(): + for predecessor in predecessors: + self.run_map[predecessor].append(vertex_id) + self.run_predecessors = graph.predecessor_map.copy() + self.vertices_to_run = graph.vertices_to_run + + def update_vertex_run_state(self, vertex_id: str, is_runnable: bool): + """Updates the runnable state of a vertex.""" + if is_runnable: + self.vertices_to_run.add(vertex_id) + else: + self.vertices_to_run.discard(vertex_id) + + async def get_next_runnable_vertices( + self, + lock: asyncio.Lock, + set_cache_coro: Callable[["Graph", asyncio.Lock], Awaitable[None]], + graph: "Graph", + vertex: "Vertex", + ): + """ + Retrieves the next runnable vertices in the graph for a given vertex. + + Args: + graph (Graph): The graph object representing the flow. + vertex (Vertex): The current vertex. + vertex_id (str): The ID of the current vertex. + chat_service (ChatService): The chat service object. + flow_id (str): The ID of the flow. + + Returns: + list: A list of IDs of the next runnable vertices. + + """ + async with lock: + self.remove_from_predecessors(vertex.id) + direct_successors_ready = [v for v in vertex.successors_ids if self.is_vertex_runnable(v)] + if not direct_successors_ready: + # No direct successors ready, look for runnable predecessors of successors + next_runnable_vertices = self.find_runnable_predecessors_for_successors(vertex.id) + else: + next_runnable_vertices = direct_successors_ready + + for v_id in set(next_runnable_vertices): # Use set to avoid duplicates + self.update_vertex_run_state(v_id, is_runnable=False) + self.remove_from_predecessors(v_id) + await set_cache_coro(data=graph, lock=lock) # type: ignore + return next_runnable_vertices + + @staticmethod + def get_top_level_vertices(graph, vertices_ids): + """ + Retrieves the top-level vertices from the given graph based on the provided vertex IDs. + + Args: + graph (Graph): The graph object containing the vertices. + vertices_ids (list): A list of vertex IDs. + + Returns: + list: A list of top-level vertex IDs. + + """ + top_level_vertices = [] + for vertex_id in vertices_ids: + vertex = graph.get_vertex(vertex_id) + if vertex.parent_is_top_level: + top_level_vertices.append(vertex.parent_node_id) + else: + top_level_vertices.append(vertex_id) + return top_level_vertices diff --git a/src/backend/base/langflow/graph/graph/state_manager.py b/src/backend/base/langflow/graph/graph/state_manager.py new file mode 100644 index 000000000..880adca55 --- /dev/null +++ b/src/backend/base/langflow/graph/graph/state_manager.py @@ -0,0 +1,36 @@ +from typing import TYPE_CHECKING, Callable + +from langflow.services.deps import get_state_service +from loguru import logger + +if TYPE_CHECKING: + from langflow.services.state.service import StateService + + +class GraphStateManager: + def __init__(self): + self.state_service: "StateService" = get_state_service() + + def append_state(self, key, new_state, run_id: str): + self.state_service.append_state(key, new_state, run_id) + + def update_state(self, key, new_state, run_id: str): + self.state_service.update_state(key, new_state, run_id) + + def get_state(self, key, run_id: str): + return self.state_service.get_state(key, run_id) + + def subscribe(self, key, observer: Callable): + self.state_service.subscribe(key, observer) + + def notify_observers(self, key, new_state): + for callback in self.observers[key]: + callback(key, new_state, append=False) + + def notify_append_observers(self, key, new_state): + for callback in self.observers[key]: + try: + callback(key, new_state, append=True) + except Exception as e: + logger.error(f"Error in observer {callback} for key {key}: {e}") + logger.warning("Callbacks not implemented yet") diff --git a/src/backend/langflow/graph/graph/utils.py b/src/backend/base/langflow/graph/graph/utils.py similarity index 82% rename from src/backend/langflow/graph/graph/utils.py rename to src/backend/base/langflow/graph/graph/utils.py index d43fc4d84..a3299739e 100644 --- a/src/backend/langflow/graph/graph/utils.py +++ b/src/backend/base/langflow/graph/graph/utils.py @@ -1,6 +1,5 @@ -import copy from collections import deque -from typing import Dict, List +import copy def find_last_node(nodes, edges): @@ -47,38 +46,6 @@ def ungroup_node(group_node_data, base_flow): return nodes -def raw_topological_sort(nodes, edges) -> List[Dict]: - # Redefine the above function but using the nodes and self._edges - # which are dicts instead of Vertex and Edge objects - # nodes have an id, edges have a source and target keys - # return a list of node ids in topological order - - # States: 0 = unvisited, 1 = visiting, 2 = visited - state = {node["id"]: 0 for node in nodes} - nodes_dict = {node["id"]: node for node in nodes} - sorted_vertices = [] - - def dfs(node): - if state[node] == 1: - # We have a cycle - raise ValueError("Graph contains a cycle, cannot perform topological sort") - if state[node] == 0: - state[node] = 1 - for edge in edges: - if edge["source"] == node: - dfs(edge["target"]) - state[node] = 2 - sorted_vertices.append(node) - - # Visit each node - for node in nodes: - if state[node["id"]] == 0: - dfs(node["id"]) - - reverse_sorted = list(reversed(sorted_vertices)) - return [nodes_dict[node_id] for node_id in reverse_sorted] - - def process_flow(flow_object): cloned_flow = copy.deepcopy(flow_object) processed_nodes = set() # To keep track of processed nodes @@ -99,8 +66,7 @@ def process_flow(flow_object): # Mark node as processed processed_nodes.add(node_id) - sorted_nodes_list = raw_topological_sort(cloned_flow["nodes"], cloned_flow["edges"]) - nodes_to_process = deque(sorted_nodes_list) + nodes_to_process = deque(cloned_flow["nodes"]) while nodes_to_process: node = nodes_to_process.popleft() @@ -141,11 +107,7 @@ def update_template(template, g_nodes): g_nodes[node_index]["data"]["node"]["template"][field]["display_name"] = display_name -def update_target_handle( - new_edge, - g_nodes, - group_node_id, -): +def update_target_handle(new_edge, g_nodes, group_node_id): """ Updates the target handle of a given edge if it is a proxy node. @@ -162,8 +124,6 @@ def update_target_handle( proxy_id = target_handle["proxy"]["id"] if node := next((n for n in g_nodes if n["id"] == proxy_id), None): set_new_target_handle(proxy_id, new_edge, target_handle, node) - else: - raise ValueError(f"Group node {group_node_id} has an invalid target proxy node {proxy_id}") return new_edge diff --git a/src/backend/base/langflow/graph/schema.py b/src/backend/base/langflow/graph/schema.py new file mode 100644 index 000000000..0bbdfa720 --- /dev/null +++ b/src/backend/base/langflow/graph/schema.py @@ -0,0 +1,55 @@ +from enum import Enum +from typing import Any, List, Optional + +from pydantic import BaseModel, Field, field_serializer + +from langflow.graph.utils import serialize_field +from langflow.utils.schemas import ChatOutputResponse, ContainsEnumMeta + + +class ResultData(BaseModel): + results: Optional[Any] = Field(default_factory=dict) + artifacts: Optional[Any] = Field(default_factory=dict) + messages: Optional[list[ChatOutputResponse]] = Field(default_factory=list) + timedelta: Optional[float] = None + duration: Optional[str] = None + component_display_name: Optional[str] = None + component_id: Optional[str] = None + + @field_serializer("results") + def serialize_results(self, value): + if isinstance(value, dict): + return {key: serialize_field(val) for key, val in value.items()} + return serialize_field(value) + + +class InterfaceComponentTypes(str, Enum, metaclass=ContainsEnumMeta): + # ChatInput and ChatOutput are the only ones that are + # power components + ChatInput = "ChatInput" + ChatOutput = "ChatOutput" + TextInput = "TextInput" + TextOutput = "TextOutput" + + def __contains__(cls, item): + try: + cls(item) + except ValueError: + return False + else: + return True + + +INPUT_COMPONENTS = [ + InterfaceComponentTypes.ChatInput, + InterfaceComponentTypes.TextInput, +] +OUTPUT_COMPONENTS = [ + InterfaceComponentTypes.ChatOutput, + InterfaceComponentTypes.TextOutput, +] + + +class RunOutputs(BaseModel): + inputs: dict = Field(default_factory=dict) + outputs: List[Optional[ResultData]] = Field(default_factory=list) diff --git a/src/backend/langflow/graph/utils.py b/src/backend/base/langflow/graph/utils.py similarity index 53% rename from src/backend/langflow/graph/utils.py rename to src/backend/base/langflow/graph/utils.py index e163d76c4..83e2177b1 100644 --- a/src/backend/langflow/graph/utils.py +++ b/src/backend/base/langflow/graph/utils.py @@ -1,5 +1,8 @@ from typing import Any, Union +from langchain_core.documents import Document +from pydantic import BaseModel + from langflow.interface.utils import extract_input_variables_from_prompt @@ -7,6 +10,10 @@ class UnbuiltObject: pass +class UnbuiltResult: + pass + + def validate_prompt(prompt: str): """Validate prompt.""" if extract_input_variables_from_prompt(prompt): @@ -29,3 +36,17 @@ def flatten_list(list_of_lists: list[Union[list, Any]]) -> list: else: new_list.append(item) return new_list + + +def serialize_field(value): + """Unified serialization function for handling both BaseModel and Document types, + including handling lists of these types.""" + if isinstance(value, (list, tuple)): + return [serialize_field(v) for v in value] + elif isinstance(value, Document): + return value.to_json() + elif isinstance(value, BaseModel): + return value.model_dump() + elif isinstance(value, str): + return {"result": value} + return value diff --git a/src/backend/langflow/services/chat/__init__.py b/src/backend/base/langflow/graph/vertex/__init__.py similarity index 100% rename from src/backend/langflow/services/chat/__init__.py rename to src/backend/base/langflow/graph/vertex/__init__.py diff --git a/src/backend/base/langflow/graph/vertex/base.py b/src/backend/base/langflow/graph/vertex/base.py new file mode 100644 index 000000000..5d855fcdb --- /dev/null +++ b/src/backend/base/langflow/graph/vertex/base.py @@ -0,0 +1,749 @@ +import ast +import asyncio +import inspect +import types +from enum import Enum +from typing import TYPE_CHECKING, Any, AsyncIterator, Callable, Dict, Iterator, List, Optional + +from loguru import logger + +from langflow.graph.schema import INPUT_COMPONENTS, OUTPUT_COMPONENTS, InterfaceComponentTypes, ResultData +from langflow.graph.utils import UnbuiltObject, UnbuiltResult +from langflow.graph.vertex.utils import generate_result +from langflow.interface.initialize import loading +from langflow.interface.listing import lazy_load_dict +from langflow.schema.schema import INPUT_FIELD_NAME +from langflow.services.deps import get_storage_service +from langflow.utils.constants import DIRECT_TYPES +from langflow.utils.schemas import ChatOutputResponse +from langflow.utils.util import sync_to_async, unescape_string + +if TYPE_CHECKING: + from langflow.graph.edge.base import ContractEdge + from langflow.graph.graph.base import Graph + + +class VertexStates(str, Enum): + """Vertex are related to it being active, inactive, or in an error state.""" + + ACTIVE = "active" + INACTIVE = "inactive" + ERROR = "error" + + +class Vertex: + def __init__( + self, + data: Dict, + graph: "Graph", + base_type: Optional[str] = None, + is_task: bool = False, + params: Optional[Dict] = None, + ) -> None: + # is_external means that the Vertex send or receives data from + # an external source (e.g the chat) + self._lock = asyncio.Lock() + self.will_stream = False + self.updated_raw_params = False + self.id: str = data["id"] + self.base_name = self.id.split("-")[0] + self.is_state = False + self.is_input = any(input_component_name in self.id for input_component_name in INPUT_COMPONENTS) + self.is_output = any(output_component_name in self.id for output_component_name in OUTPUT_COMPONENTS) + self.has_session_id = None + self._custom_component = None + self.has_external_input = False + self.has_external_output = False + self.graph = graph + self._data = data + self.base_type: Optional[str] = base_type + self._parse_data() + self._built_object = UnbuiltObject() + self._built_result = None + self._built = False + self.artifacts: Dict[str, Any] = {} + self.steps: List[Callable] = [self._build] + self.steps_ran: List[Callable] = [] + self.task_id: Optional[str] = None + self.is_task = is_task + self.params = params or {} + self.parent_node_id: Optional[str] = self._data.get("parent_node_id") + self.load_from_db_fields: List[str] = [] + self.parent_is_top_level = False + self.layer = None + self.should_run = True + self.result: Optional[ResultData] = None + try: + self.is_interface_component = self.vertex_type in InterfaceComponentTypes + except ValueError: + self.is_interface_component = False + + self.use_result = False + self.build_times: List[float] = [] + self.state = VertexStates.ACTIVE + + def update_graph_state(self, key, new_state, append: bool): + if append: + self.graph.append_state(key, new_state, caller=self.id) + else: + self.graph.update_state(key, new_state, caller=self.id) + + def set_state(self, state: str): + self.state = VertexStates[state] + if self.state == VertexStates.INACTIVE and self.graph.in_degree_map[self.id] < 2: + # If the vertex is inactive and has only one in degree + # it means that it is not a merge point in the graph + self.graph.inactivated_vertices.add(self.id) + elif self.state == VertexStates.ACTIVE and self.id in self.graph.inactivated_vertices: + self.graph.inactivated_vertices.remove(self.id) + + @property + def avg_build_time(self): + return sum(self.build_times) / len(self.build_times) if self.build_times else 0 + + def add_build_time(self, time): + self.build_times.append(time) + + def set_result(self, result: ResultData) -> None: + self.result = result + + def get_built_result(self): + # If the Vertex.type is a power component + # then we need to return the built object + # instead of the result dict + if self.is_interface_component and not isinstance(self._built_object, UnbuiltObject): + result = self._built_object + # if it is not a dict or a string and hasattr model_dump then + # return the model_dump + if not isinstance(result, (dict, str)) and hasattr(result, "content"): + return result.content + return result + if isinstance(self._built_object, str): + self._built_result = self._built_object + + if isinstance(self._built_result, UnbuiltResult): + return {} + return self._built_result if isinstance(self._built_result, dict) else {"result": self._built_result} + + def set_artifacts(self) -> None: + pass + + @property + def edges(self) -> List["ContractEdge"]: + return self.graph.get_vertex_edges(self.id) + + @property + def predecessors(self) -> List["Vertex"]: + return self.graph.get_predecessors(self) + + @property + def successors(self) -> List["Vertex"]: + return self.graph.get_successors(self) + + @property + def successors_ids(self) -> List[str]: + return self.graph.successor_map.get(self.id, []) + + def __getstate__(self): + return { + "_data": self._data, + "params": {}, + "base_type": self.base_type, + "base_name": self.base_name, + "is_task": self.is_task, + "id": self.id, + "_built_object": UnbuiltObject(), + "_built": False, + "parent_node_id": self.parent_node_id, + "parent_is_top_level": self.parent_is_top_level, + "load_from_db_fields": self.load_from_db_fields, + "is_input": self.is_input, + "is_output": self.is_output, + } + + def __setstate__(self, state): + self._lock = asyncio.Lock() + self._data = state["_data"] + self.params = state["params"] + self.base_type = state["base_type"] + self.is_task = state["is_task"] + self.id = state["id"] + self.frozen = state.get("frozen", False) + self.is_input = state.get("is_input", False) + self.is_output = state.get("is_output", False) + self.base_name = state["base_name"] + self._parse_data() + if "_built_object" in state: + self._built_object = state["_built_object"] + self._built = state["_built"] + else: + self._built_object = UnbuiltObject() + self._built = False + if "_built_result" in state: + self._built_result = state["_built_result"] + else: + self._built_result = UnbuiltResult() + self.artifacts: Dict[str, Any] = {} + self.task_id: Optional[str] = None + self.parent_node_id = state["parent_node_id"] + self.parent_is_top_level = state["parent_is_top_level"] + self.load_from_db_fields = state["load_from_db_fields"] + self.layer = state.get("layer") + self.steps = state.get("steps", [self._build]) + + def set_top_level(self, top_level_vertices: List[str]) -> None: + self.parent_is_top_level = self.parent_node_id in top_level_vertices + + def _parse_data(self) -> None: + self.data = self._data["data"] + self.output = self.data["node"]["base_classes"] + self.display_name = self.data["node"].get("display_name", self.id.split("-")[0]) + + self.description = self.data["node"].get("description", "") + self.frozen = self.data["node"].get("frozen", False) + self.selected_output_type = self.data["node"].get("selected_output_type") + self.is_input = self.data["node"].get("is_input") or self.is_input + self.is_output = self.data["node"].get("is_output") or self.is_output + template_dicts = {key: value for key, value in self.data["node"]["template"].items() if isinstance(value, dict)} + + self.has_session_id = "session_id" in template_dicts + + self.required_inputs = [ + template_dicts[key]["type"] for key, value in template_dicts.items() if value["required"] + ] + self.optional_inputs = [ + template_dicts[key]["type"] for key, value in template_dicts.items() if not value["required"] + ] + # Add the template_dicts[key]["input_types"] to the optional_inputs + self.optional_inputs.extend( + [input_type for value in template_dicts.values() for input_type in value.get("input_types", [])] + ) + + template_dict = self.data["node"]["template"] + self.vertex_type = ( + self.data["type"] + if "Tool" not in self.output or template_dict["_type"].islower() + else template_dict["_type"] + ) + + if self.base_type is None: + for base_type, value in lazy_load_dict.ALL_TYPES_DICT.items(): + if self.vertex_type in value: + self.base_type = base_type + break + + def get_task(self): + # using the task_id, get the task from celery + # and return it + from celery.result import AsyncResult # type: ignore + + return AsyncResult(self.task_id) + + def _build_params(self): + # sourcery skip: merge-list-append, remove-redundant-if + # Some params are required, some are optional + # but most importantly, some params are python base classes + # like str and others are LangChain objects like LLMChain, BasePromptTemplate + # so we need to be able to distinguish between the two + + # The dicts with "type" == "str" are the ones that are python base classes + # and most likely have a "value" key + + # So for each key besides "_type" in the template dict, we have a dict + # with a "type" key. If the type is not "str", then we need to get the + # edge that connects to that node and get the Node with the required data + # and use that as the value for the param + # If the type is "str", then we need to get the value of the "value" key + # and use that as the value for the param + + if self.graph is None: + raise ValueError("Graph not found") + + if self.updated_raw_params: + self.updated_raw_params = False + return + + template_dict = {key: value for key, value in self.data["node"]["template"].items() if isinstance(value, dict)} + params = {} + + for edge in self.edges: + if not hasattr(edge, "target_param"): + continue + param_key = edge.target_param + + # If the param_key is in the template_dict and the edge.target_id is the current node + # We check this to make sure params with the same name but different target_id + # don't get overwritten + if param_key in template_dict and edge.target_id == self.id: + if template_dict[param_key]["list"]: + if param_key not in params: + params[param_key] = [] + params[param_key].append(self.graph.get_vertex(edge.source_id)) + elif edge.target_id == self.id: + if isinstance(template_dict[param_key].get("value"), dict): + # we don't know the key of the dict but we need to set the value + # to the vertex that is the source of the edge + param_dict = template_dict[param_key]["value"] + params[param_key] = {key: self.graph.get_vertex(edge.source_id) for key in param_dict.keys()} + else: + params[param_key] = self.graph.get_vertex(edge.source_id) + + load_from_db_fields = [] + for field_name, field in template_dict.items(): + if field_name in params: + continue + # Skip _type and any value that has show == False and is not code + # If we don't want to show code but we want to use it + if field_name == "_type" or (not field.get("show") and field_name != "code"): + continue + # If the type is not transformable to a python base class + # then we need to get the edge that connects to this node + if field.get("type") == "file": + # Load the type in value.get('fileTypes') using + # what is inside value.get('content') + # value.get('value') is the file name + if file_path := field.get("file_path"): + storage_service = get_storage_service() + try: + flow_id, file_name = file_path.split("/") + full_path = storage_service.build_full_path(flow_id, file_name) + except ValueError as e: + if "too many values to unpack" in str(e): + full_path = file_path + else: + raise e + params[field_name] = full_path + elif field.get("required"): + raise ValueError(f"File path not found for {self.display_name}") + elif field.get("type") in DIRECT_TYPES and params.get(field_name) is None: + val = field.get("value") + if field.get("type") == "code": + try: + params[field_name] = ast.literal_eval(val) if val else None + except Exception: + params[field_name] = val + elif field.get("type") in ["dict", "NestedDict"]: + # When dict comes from the frontend it comes as a + # list of dicts, so we need to convert it to a dict + # before passing it to the build method + if isinstance(val, list): + params[field_name] = {k: v for item in field.get("value", []) for k, v in item.items()} + elif isinstance(val, dict): + params[field_name] = val + elif field.get("type") == "int" and val is not None: + try: + params[field_name] = int(val) + except ValueError: + params[field_name] = val + elif field.get("type") == "float" and val is not None: + try: + params[field_name] = float(val) + except ValueError: + params[field_name] = val + params[field_name] = val + elif field.get("type") == "str" and val is not None: + # val may contain escaped \n, \t, etc. + # so we need to unescape it + if isinstance(val, list): + params[field_name] = [unescape_string(v) for v in val] + elif isinstance(val, str): + params[field_name] = unescape_string(val) + elif val is not None and val != "": + params[field_name] = val + + elif val is not None and val != "": + params[field_name] = val + if field.get("load_from_db"): + load_from_db_fields.append(field_name) + + if not field.get("required") and params.get(field_name) is None: + if field.get("default"): + params[field_name] = field.get("default") + else: + params.pop(field_name, None) + # Add _type to params + self.params = params + self.load_from_db_fields = load_from_db_fields + self._raw_params = params.copy() + + def update_raw_params(self, new_params: Dict[str, str], overwrite: bool = False): + """ + Update the raw parameters of the vertex with the given new parameters. + + Args: + new_params (Dict[str, Any]): The new parameters to update. + + Raises: + ValueError: If any key in new_params is not found in self._raw_params. + """ + # First check if the input_value in _raw_params is not a vertex + if not new_params: + return + if any(isinstance(self._raw_params.get(key), Vertex) for key in new_params): + return + if not overwrite: + for key in new_params.copy(): + if key not in self._raw_params: + new_params.pop(key) + self._raw_params.update(new_params) + self.params = self._raw_params.copy() + self.updated_raw_params = True + + async def _build(self, user_id=None): + """ + Initiate the build process. + """ + logger.debug(f"Building {self.display_name}") + await self._build_each_vertex_in_params_dict(user_id) + await self._get_and_instantiate_class(user_id) + self._validate_built_object() + + self._built = True + + def extract_messages_from_artifacts(self, artifacts: Dict[str, Any]) -> List[dict]: + """ + Extracts messages from the artifacts. + + Args: + artifacts (Dict[str, Any]): The artifacts to extract messages from. + + Returns: + List[str]: The extracted messages. + """ + try: + messages = [ + ChatOutputResponse( + message=artifacts["message"], + sender=artifacts.get("sender"), + sender_name=artifacts.get("sender_name"), + session_id=artifacts.get("session_id"), + component_id=self.id, + ).model_dump(exclude_none=True) + ] + except KeyError: + messages = [] + + return messages + + def _finalize_build(self): + result_dict = self.get_built_result() + # We need to set the artifacts to pass information + # to the frontend + self.set_artifacts() + artifacts = self.artifacts + messages = self.extract_messages_from_artifacts(artifacts) + result_dict = ResultData( + results=result_dict, + artifacts=artifacts, + messages=messages, + component_display_name=self.display_name, + component_id=self.id, + ) + self.set_result(result_dict) + + async def _run( + self, + user_id: str, + inputs: Optional[dict] = None, + session_id: Optional[str] = None, + ): + # user_id is just for compatibility with the other build methods + inputs = inputs or {} + # inputs = {key: value or "" for key, value in inputs.items()} + # if hasattr(self._built_object, "input_keys"): + # # test if all keys are in inputs + # # and if not add them with empty string + # # for key in self._built_object.input_keys: + # # if key not in inputs: + # # inputs[key] = "" + # if inputs == {} and hasattr(self._built_object, "prompt"): + # inputs = self._built_object.prompt.partial_variables + if isinstance(self._built_object, str): + self._built_result = self._built_object + + result = await generate_result(self._built_object, inputs, self.has_external_output, session_id) + self._built_result = result + + async def _build_each_vertex_in_params_dict(self, user_id=None): + """ + Iterates over each vertex in the params dictionary and builds it. + """ + for key, value in self._raw_params.items(): + if self._is_vertex(value): + if value == self: + del self.params[key] + continue + await self._build_vertex_and_update_params( + key, + value, + ) + elif isinstance(value, list) and self._is_list_of_vertices(value): + await self._build_list_of_vertices_and_update_params(key, value) + elif isinstance(value, dict): + await self._build_dict_and_update_params( + key, + value, + ) + elif key not in self.params or self.updated_raw_params: + self.params[key] = value + + async def _build_dict_and_update_params( + self, + key, + vertices_dict: Dict[str, "Vertex"], + ): + """ + Iterates over a dictionary of vertices, builds each and updates the params dictionary. + """ + for sub_key, value in vertices_dict.items(): + if not self._is_vertex(value): + self.params[key][sub_key] = value + else: + result = await value.get_result() + self.params[key][sub_key] = result + + def _is_vertex(self, value): + """ + Checks if the provided value is an instance of Vertex. + """ + return isinstance(value, Vertex) + + def _is_list_of_vertices(self, value): + """ + Checks if the provided value is a list of Vertex instances. + """ + return all(self._is_vertex(vertex) for vertex in value) + + async def get_result( + self, + ) -> Any: + """ + Retrieves the result of the vertex. + + This is a read-only method so it raises an error if the vertex has not been built yet. + + Returns: + The result of the vertex. + """ + async with self._lock: + return await self._get_result() + + async def _get_result(self) -> Any: + """ + Retrieves the result of the built component. + + If the component has not been built yet, a ValueError is raised. + + Returns: + The built result if use_result is True, else the built object. + """ + if not self._built: + raise ValueError(f"Component {self.display_name} has not been built yet") + return self._built_result if self.use_result else self._built_object + + async def _build_vertex_and_update_params(self, key, vertex: "Vertex"): + """ + Builds a given vertex and updates the params dictionary accordingly. + """ + + result = await vertex.get_result() + self._handle_func(key, result) + if isinstance(result, list): + self._extend_params_list_with_result(key, result) + self.params[key] = result + + async def _build_list_of_vertices_and_update_params( + self, + key, + vertices: List["Vertex"], + ): + """ + Iterates over a list of vertices, builds each and updates the params dictionary. + """ + self.params[key] = [] + for vertex in vertices: + result = await vertex.get_result() + # Weird check to see if the params[key] is a list + # because sometimes it is a Record and breaks the code + if not isinstance(self.params[key], list): + self.params[key] = [self.params[key]] + + if isinstance(result, list): + self.params[key].extend(result) + else: + try: + if self.params[key] == result: + continue + + self.params[key].append(result) + except AttributeError as e: + logger.exception(e) + raise ValueError( + f"Params {key} ({self.params[key]}) is not a list and cannot be extended with {result}" + f"Error building vertex {self.display_name}: {str(e)}" + ) from e + + def _handle_func(self, key, result): + """ + Handles 'func' key by checking if the result is a function and setting it as coroutine. + """ + if key == "func": + if not isinstance(result, types.FunctionType): + if hasattr(result, "run"): + result = result.run # type: ignore + elif hasattr(result, "get_function"): + result = result.get_function() # type: ignore + elif inspect.iscoroutinefunction(result): + self.params["coroutine"] = result + else: + self.params["coroutine"] = sync_to_async(result) + + def _extend_params_list_with_result(self, key, result): + """ + Extends a list in the params dictionary with the given result if it exists. + """ + if isinstance(self.params[key], list): + self.params[key].extend(result) + + async def _get_and_instantiate_class(self, user_id=None): + """ + Gets the class from a dictionary and instantiates it with the params. + """ + if self.base_type is None: + raise ValueError(f"Base type for vertex {self.display_name} not found") + try: + result = await loading.instantiate_class( + user_id=user_id, + vertex=self, + ) + self._update_built_object_and_artifacts(result) + except Exception as exc: + logger.exception(exc) + + raise ValueError(f"Error building vertex {self.display_name}: {str(exc)}") from exc + + def _update_built_object_and_artifacts(self, result): + """ + Updates the built object and its artifacts. + """ + if isinstance(result, tuple): + if len(result) == 2: + self._built_object, self.artifacts = result + elif len(result) == 3: + self._custom_component, self._built_object, self.artifacts = result + else: + self._built_object = result + + def _validate_built_object(self): + """ + Checks if the built object is None and raises a ValueError if so. + """ + if isinstance(self._built_object, UnbuiltObject): + raise ValueError(f"{self.display_name}: {self._built_object_repr()}") + elif self._built_object is None: + message = f"{self.display_name} returned None." + if self.base_type == "custom_components": + message += " Make sure your build method returns a component." + + logger.warning(message) + elif isinstance(self._built_object, (Iterator, AsyncIterator)): + if self.display_name in ["Text Output"]: + raise ValueError(f"You are trying to stream to a {self.display_name}. Try using a Chat Output instead.") + + def _reset(self, params_update: Optional[Dict[str, Any]] = None): + self._built = False + self._built_object = UnbuiltObject() + self._built_result = UnbuiltResult() + self.artifacts = {} + self.steps_ran = [] + self._build_params() + + def _is_chat_input(self): + return False + + def build_inactive(self): + # Just set the results to None + self._built = True + self._built_object = None + self._built_result = None + + async def build( + self, + user_id=None, + inputs: Optional[Dict[str, Any]] = None, + requester: Optional["Vertex"] = None, + **kwargs, + ) -> Any: + async with self._lock: + if self.state == VertexStates.INACTIVE: + # If the vertex is inactive, return None + self.build_inactive() + return + + if self.frozen and self._built: + return self.get_requester_result(requester) + elif self._built and requester is not None: + # This means that the vertex has already been built + # and we are just getting the result for the requester + return await self.get_requester_result(requester) + self._reset() + + if self._is_chat_input() and inputs: + inputs = {"input_value": inputs.get(INPUT_FIELD_NAME, "")} + self.update_raw_params(inputs, overwrite=True) + + # Run steps + for step in self.steps: + if step not in self.steps_ran: + if inspect.iscoroutinefunction(step): + await step(user_id=user_id, **kwargs) + else: + step(user_id=user_id, **kwargs) + self.steps_ran.append(step) + + self._finalize_build() + + return await self.get_requester_result(requester) + + async def get_requester_result(self, requester: Optional["Vertex"]): + # If the requester is None, this means that + # the Vertex is the root of the graph + if requester is None: + return self._built_object + + # Get the requester edge + requester_edge = next((edge for edge in self.edges if edge.target_id == requester.id), None) + # Return the result of the requester edge + return ( + None + if requester_edge is None + else await requester_edge.get_result_from_source(source=self, target=requester) + ) + + def add_edge(self, edge: "ContractEdge") -> None: + if edge not in self.edges: + self.edges.append(edge) + + def __repr__(self) -> str: + return f"Vertex(display_name={self.display_name}, id={self.id}, data={self.data})" + + def __eq__(self, __o: object) -> bool: + try: + if not isinstance(__o, Vertex): + return False + # We should create a more robust comparison + # for the Vertex class + ids_are_equal = self.id == __o.id + # self._data is a dict and we need to compare them + # to check if they are equal + data_are_equal = self.data == __o.data + return ids_are_equal and data_are_equal + except AttributeError: + return False + + def __hash__(self) -> int: + return id(self) + + def _built_object_repr(self): + # Add a message with an emoji, stars for sucess, + return "Built sucessfully ✨" if self._built_object is not None else "Failed to build 😵‍💫" diff --git a/src/backend/langflow/graph/vertex/constants.py b/src/backend/base/langflow/graph/vertex/constants.py similarity index 100% rename from src/backend/langflow/graph/vertex/constants.py rename to src/backend/base/langflow/graph/vertex/constants.py diff --git a/src/backend/base/langflow/graph/vertex/types.py b/src/backend/base/langflow/graph/vertex/types.py new file mode 100644 index 000000000..d7a98df8a --- /dev/null +++ b/src/backend/base/langflow/graph/vertex/types.py @@ -0,0 +1,491 @@ +import ast +import json +from typing import AsyncIterator, Callable, Dict, Iterator, List, Optional, Union +import yaml +from langchain_core.messages import AIMessage +from loguru import logger + +from langflow.graph.schema import InterfaceComponentTypes +from langflow.graph.utils import UnbuiltObject, flatten_list, serialize_field +from langflow.graph.vertex.base import Vertex +from langflow.interface.utils import extract_input_variables_from_prompt +from langflow.schema import Record +from langflow.schema.schema import INPUT_FIELD_NAME +from langflow.services.monitor.utils import log_vertex_build +from langflow.utils.schemas import ChatOutputResponse +from langflow.utils.util import unescape_string + + +class AgentVertex(Vertex): + def __init__(self, data: Dict, graph, params: Optional[Dict] = None): + super().__init__(data, graph=graph, base_type="agents", params=params) + + self.tools: List[Union[ToolkitVertex, ToolVertex]] = [] + self.chains: List[ChainVertex] = [] + self.steps: List[Callable] = [self._custom_build] + + def __getstate__(self): + state = super().__getstate__() + state["tools"] = self.tools + state["chains"] = self.chains + return state + + def __setstate__(self, state): + self.tools = state["tools"] + self.chains = state["chains"] + super().__setstate__(state) + + def _set_tools_and_chains(self) -> None: + for edge in self.edges: + if not hasattr(edge, "source"): + continue + source_node = edge.source + if isinstance(source_node, (ToolVertex, ToolkitVertex)): + self.tools.append(source_node) + elif isinstance(source_node, ChainVertex): + self.chains.append(source_node) + + async def _custom_build(self, *args, **kwargs): + user_id = kwargs.get("user_id", None) + self._set_tools_and_chains() + # First, build the tools + for tool_node in self.tools: + await tool_node.build(user_id=user_id) + + # Next, build the chains and the rest + for chain_node in self.chains: + await chain_node.build(tools=self.tools, user_id=user_id) + + await self._build(user_id=user_id) + + +class ToolVertex(Vertex): + def __init__(self, data: Dict, graph, params: Optional[Dict] = None): + super().__init__(data, graph=graph, base_type="tools", params=params) + + +class LLMVertex(Vertex): + built_node_type = None + class_built_object = None + + def __init__(self, data: Dict, graph, params: Optional[Dict] = None): + super().__init__(data, graph=graph, base_type="models", params=params) + self.steps: List[Callable] = [self._custom_build] + + async def _custom_build(self, *args, **kwargs): + # LLM is different because some models might take up too much memory + # or time to load. So we only load them when we need them. + # Avoid deepcopying the LLM + # that are loaded from a file + force = kwargs.get("force", False) + user_id = kwargs.get("user_id", None) + if self.vertex_type == self.built_node_type: + self._built_object = self.class_built_object + if not self._built or force: + await self._build(user_id=user_id) + self.built_node_type = self.vertex_type + self.class_built_object = self._built_object + + +class ToolkitVertex(Vertex): + def __init__(self, data: Dict, graph, params=None): + super().__init__(data, graph=graph, base_type="toolkits", params=params) + + +class FileToolVertex(ToolVertex): + def __init__(self, data: Dict, graph, params=None): + super().__init__( + data, + params=params, + graph=graph, + ) + + +class WrapperVertex(Vertex): + def __init__(self, data: Dict, graph, params=None): + super().__init__(data, graph=graph, base_type="wrappers") + self.steps: List[Callable] = [self._custom_build] + + async def _custom_build(self, *args, **kwargs): + force = kwargs.get("force", False) + user_id = kwargs.get("user_id", None) + if not self._built or force: + if "headers" in self.params: + self.params["headers"] = ast.literal_eval(self.params["headers"]) + await self._build(user_id=user_id) + + +class DocumentLoaderVertex(Vertex): + def __init__(self, data: Dict, graph, params: Optional[Dict] = None): + super().__init__(data, graph=graph, base_type="documentloaders", params=params) + + def _built_object_repr(self): + # This built_object is a list of documents. Maybe we should + # show how many documents are in the list? + + if not isinstance(self._built_object, UnbuiltObject): + avg_length = sum(len(record.get_text()) for record in self._built_object if hasattr(record, "text")) / len( + self._built_object + ) + return f"""{self.display_name}({len(self._built_object)} records) + \nAvg. Record Length (characters): {int(avg_length)} + Records: {self._built_object[:3]}...""" + return f"{self.vertex_type}()" + + +class EmbeddingVertex(Vertex): + def __init__(self, data: Dict, graph, params: Optional[Dict] = None): + super().__init__(data, graph=graph, base_type="embeddings", params=params) + + +class VectorStoreVertex(Vertex): + def __init__(self, data: Dict, graph, params=None): + super().__init__(data, graph=graph, base_type="vectorstores") + + self.params = params or {} + + # VectorStores may contain databse connections + # so we need to define the __reduce__ method and the __setstate__ method + # to avoid pickling errors + def clean_edges_for_pickling(self): + # for each edge that has self as source + # we need to clear the _built_object of the target + # so that we don't try to pickle a database connection + for edge in self.edges: + if edge.source == self: + edge.target._built_object = None + edge.target._built = False + edge.target.params[edge.target_param] = self + + def remove_docs_and_texts_from_params(self): + # remove documents and texts from params + # so that we don't try to pickle a database connection + self.params.pop("documents", None) + self.params.pop("texts", None) + + def __getstate__(self): + # We want to save the params attribute + # and if "documents" or "texts" are in the params + # we want to remove them because they have already + # been processed. + params = self.params.copy() + params.pop("documents", None) + params.pop("texts", None) + self.clean_edges_for_pickling() + + return super().__getstate__() + + def __setstate__(self, state): + super().__setstate__(state) + self.remove_docs_and_texts_from_params() + + +class MemoryVertex(Vertex): + def __init__(self, data: Dict, graph): + super().__init__(data, graph=graph, base_type="memory") + + +class RetrieverVertex(Vertex): + def __init__(self, data: Dict, graph): + super().__init__(data, graph=graph, base_type="retrievers") + + +class TextSplitterVertex(Vertex): + def __init__(self, data: Dict, graph, params: Optional[Dict] = None): + super().__init__(data, graph=graph, base_type="textsplitters", params=params) + + def _built_object_repr(self): + # This built_object is a list of documents. Maybe we should + # show how many documents are in the list? + + if not isinstance(self._built_object, UnbuiltObject): + avg_length = sum(len(doc.page_content) for doc in self._built_object) / len(self._built_object) + return f"""{self.vertex_type}({len(self._built_object)} documents) + \nAvg. Document Length (characters): {int(avg_length)} + \nDocuments: {self._built_object[:3]}...""" + return f"{self.vertex_type}()" + + +class ChainVertex(Vertex): + def __init__(self, data: Dict, graph): + super().__init__(data, graph=graph, base_type="chains") + self.steps = [self._custom_build] + + async def _custom_build(self, *args, **kwargs): + force = kwargs.get("force", False) + user_id = kwargs.get("user_id", None) + # Remove this once LLMChain is CustomComponent + self.params.pop("code", None) + for key, value in self.params.items(): + if isinstance(value, PromptVertex): + # Build the PromptVertex, passing the tools if available + tools = kwargs.get("tools", None) + self.params[key] = value.build(tools=tools, frozen=force) + + await self._build(user_id=user_id) + + def set_artifacts(self) -> None: + if isinstance(self._built_object, UnbuiltObject): + return + if self._built_object and hasattr(self._built_object, "input_keys"): + self.artifacts = dict(input_keys=self._built_object.input_keys) + + def _built_object_repr(self): + if isinstance(self._built_object, str): + return self._built_object + return super()._built_object_repr() + + +class PromptVertex(Vertex): + def __init__(self, data: Dict, graph): + super().__init__(data, graph=graph, base_type="prompts") + self.steps: List[Callable] = [self._custom_build] + + async def _custom_build(self, *args, **kwargs): + force = kwargs.get("force", False) + user_id = kwargs.get("user_id", None) + tools = kwargs.get("tools", []) + if not self._built or force: + if "input_variables" not in self.params or self.params["input_variables"] is None: + self.params["input_variables"] = [] + # Check if it is a ZeroShotPrompt and needs a tool + if "ShotPrompt" in self.vertex_type: + tools = [tool_node.build(user_id=user_id) for tool_node in tools] if tools is not None else [] + # flatten the list of tools if it is a list of lists + # first check if it is a list + if tools and isinstance(tools, list) and isinstance(tools[0], list): + tools = flatten_list(tools) + self.params["tools"] = tools + prompt_params = [ + key for key, value in self.params.items() if isinstance(value, str) and key != "format_instructions" + ] + else: + prompt_params = ["template"] + + if "prompt" not in self.params and "messages" not in self.params: + for param in prompt_params: + prompt_text = self.params[param] + variables = extract_input_variables_from_prompt(prompt_text) + self.params["input_variables"].extend(variables) + self.params["input_variables"] = list(set(self.params["input_variables"])) + elif isinstance(self.params, dict): + self.params.pop("input_variables", None) + + await self._build(user_id=user_id) + + def _built_object_repr(self): + if not self.artifacts or self._built_object is None or not hasattr(self._built_object, "format"): + return super()._built_object_repr() + elif isinstance(self._built_object, UnbuiltObject): + return super()._built_object_repr() + # We'll build the prompt with the artifacts + # to show the user what the prompt looks like + # with the variables filled in + artifacts = self.artifacts.copy() + # Remove the handle_keys from the artifacts + # so the prompt format doesn't break + artifacts.pop("handle_keys", None) + try: + if not hasattr(self._built_object, "template") and hasattr(self._built_object, "prompt"): + template = self._built_object.prompt.template + else: + template = self._built_object.template + for key, value in artifacts.items(): + if value: + replace_key = "{" + key + "}" + template = template.replace(replace_key, value) + return template if isinstance(template, str) else f"{self.vertex_type}({template})" + except KeyError: + return str(self._built_object) + + +class OutputParserVertex(Vertex): + def __init__(self, data: Dict, graph): + super().__init__(data, graph=graph, base_type="output_parsers") + + +class CustomComponentVertex(Vertex): + def __init__(self, data: Dict, graph): + super().__init__(data, graph=graph, base_type="custom_components") + + def _built_object_repr(self): + if self.artifacts and "repr" in self.artifacts: + return self.artifacts["repr"] or super()._built_object_repr() + + +class ChatVertex(Vertex): + def __init__(self, data: Dict, graph): + super().__init__(data, graph=graph, base_type="custom_components", is_task=True) + self.steps = [self._build, self._run] + + def build_stream_url(self): + return f"/api/v1/build/{self.graph.flow_id}/{self.id}/stream" + + def _built_object_repr(self): + if self.task_id and self.is_task: + if task := self.get_task(): + return str(task.info) + else: + return f"Task {self.task_id} is not running" + if self.artifacts: + # dump as a yaml string + artifacts = {k.title().replace("_", " "): v for k, v in self.artifacts.items() if v is not None} + yaml_str = yaml.dump(artifacts, default_flow_style=False, allow_unicode=True) + return yaml_str + return super()._built_object_repr() + + async def _run(self, *args, **kwargs): + if self.is_interface_component: + if self.vertex_type in ["ChatOutput", "ChatInput"]: + artifacts = None + sender = self.params.get("sender", None) + sender_name = self.params.get("sender_name", None) + message = self.params.get(INPUT_FIELD_NAME, None) + if isinstance(message, str): + message = unescape_string(message) + stream_url = None + if isinstance(self._built_object, AIMessage): + artifacts = ChatOutputResponse.from_message( + self._built_object, + sender=sender, + sender_name=sender_name, + ) + elif not isinstance(self._built_object, UnbuiltObject): + if isinstance(self._built_object, dict): + # Turn the dict into a pleasing to + # read JSON inside a code block + message = dict_to_codeblock(self._built_object) + elif isinstance(self._built_object, Record): + message = self._built_object.text + elif isinstance(message, (AsyncIterator, Iterator)): + stream_url = self.build_stream_url() + message = "" + elif not isinstance(self._built_object, str): + message = str(self._built_object) + # if the message is a generator or iterator + # it means that it is a stream of messages + else: + message = self._built_object + + artifacts = ChatOutputResponse( + message=message, + sender=sender, + sender_name=sender_name, + stream_url=stream_url, + ) + + self.will_stream = stream_url is not None + if artifacts: + self.artifacts = artifacts.model_dump(exclude_none=True) + if isinstance(self._built_object, (AsyncIterator, Iterator)): + if self.params["return_record"]: + self._built_object = Record(text=message, data=self.artifacts) + else: + self._built_object = message + self._built_result = self._built_object + + else: + await super()._run(*args, **kwargs) + + async def stream(self): + iterator = self.params.get(INPUT_FIELD_NAME, None) + if not isinstance(iterator, (AsyncIterator, Iterator)): + raise ValueError("The message must be an iterator or an async iterator.") + is_async = isinstance(iterator, AsyncIterator) + complete_message = "" + if is_async: + async for message in iterator: + message = message.content if hasattr(message, "content") else message + message = message.text if hasattr(message, "text") else message + yield message + complete_message += message + else: + for message in iterator: + message = message.content if hasattr(message, "content") else message + message = message.text if hasattr(message, "text") else message + yield message + complete_message += message + self.artifacts = ChatOutputResponse( + message=complete_message, + sender=self.params.get("sender", ""), + sender_name=self.params.get("sender_name", ""), + ).model_dump() + self.params[INPUT_FIELD_NAME] = complete_message + self._built_object = Record(text=complete_message, data=self.artifacts) + self._built_result = complete_message + # Update artifacts with the message + # and remove the stream_url + self._finalize_build() + logger.debug(f"Streamed message: {complete_message}") + + await log_vertex_build( + flow_id=self.graph.flow_id, + vertex_id=self.id, + valid=True, + params=self._built_object_repr(), + data=self.result, + artifacts=self.artifacts, + ) + + self._validate_built_object() + self._built = True + + async def consume_async_generator(self): + async for _ in self.stream(): + pass + + def _is_chat_input(self): + return self.vertex_type == InterfaceComponentTypes.ChatInput and self.is_input + + +class RoutingVertex(Vertex): + def __init__(self, data: Dict, graph): + super().__init__(data, graph=graph, base_type="custom_components") + self.use_result = True + self.steps = [self._build] + + def _built_object_repr(self): + if self.artifacts and "repr" in self.artifacts: + return self.artifacts["repr"] or super()._built_object_repr() + return super()._built_object_repr() + + @property + def successors_ids(self): + if isinstance(self._built_object, bool): + ids = super().successors_ids + if self._built_object: + return ids + return [] + raise ValueError("RoutingVertex should return a boolean value.") + + def _run(self, *args, **kwargs): + if self._built_object: + condition = self._built_object.get("condition") + result = self._built_object.get("result") + if condition is None: + raise ValueError("Condition is required for the routing vertex.") + if result is None: + raise ValueError("Result is required for the routing vertex.") + if condition is True: + self._built_result = result + else: + self.graph.mark_branch(self.id, "INACTIVE") + self._built_result = None + + +class StateVertex(Vertex): + def __init__(self, data: Dict, graph): + super().__init__(data, graph=graph, base_type="custom_components") + self.steps = [self._build] + self.is_state = True + + @property + def successors_ids(self) -> List[str]: + successors = self.graph.successor_map.get(self.id, []) + return successors + self.graph.activated_vertices + + +def dict_to_codeblock(d: dict) -> str: + serialized = {key: serialize_field(val) for key, val in d.items()} + json_str = json.dumps(serialized, indent=4) + return f"```json\n{json_str}\n```" diff --git a/src/backend/base/langflow/graph/vertex/utils.py b/src/backend/base/langflow/graph/vertex/utils.py new file mode 100644 index 000000000..ea2693e29 --- /dev/null +++ b/src/backend/base/langflow/graph/vertex/utils.py @@ -0,0 +1,65 @@ +from typing import Any, Optional, Union + +from langchain_core.messages import BaseMessage +from langchain_core.runnables import Runnable +from loguru import logger + +from langflow.utils.constants import PYTHON_BASIC_TYPES + + +def is_basic_type(obj): + return type(obj) in PYTHON_BASIC_TYPES + + +async def invoke_lc_runnable( + built_object: Runnable, inputs: dict, has_external_output: bool, session_id: Optional[str] = None, **kwargs +) -> Union[str, BaseMessage]: + # Setup callbacks for asynchronous execution + from langflow.processing.base import setup_callbacks + + callbacks = setup_callbacks(sync=False, trace_id=session_id, **kwargs) + + try: + if has_external_output and hasattr(built_object, "astream"): + # Asynchronous stream handling if supported and required + output = "" + async for chunk in built_object.astream(inputs, {"callbacks": callbacks}): + output += chunk + return output + else: + # Direct asynchronous invocation + return await built_object.ainvoke(inputs, {"callbacks": callbacks}) + except Exception as async_exc: + logger.debug(f"Async error, falling back to sync: {str(async_exc)}") + + # Setup synchronous callbacks for the fallback + sync_callbacks = setup_callbacks(sync=True, trace_id=session_id, **kwargs) + try: + # Synchronous fallback if asynchronous execution fails + if has_external_output and hasattr(built_object, "stream"): + # Synchronous stream handling if supported and required + output = "" + for chunk in built_object.stream(inputs, {"callbacks": sync_callbacks}): + output += chunk + return output + else: + # Direct synchronous invocation + return built_object.invoke(inputs, {"callbacks": sync_callbacks}) + except Exception as sync_exc: + logger.error(f"Sync error after async failure: {str(sync_exc)}") + # Handle or re-raise exception as appropriate for your application + raise sync_exc from async_exc + + +async def generate_result(built_object: Any, inputs: dict, has_external_output: bool, session_id: Optional[str] = None): + # If the built_object is instance of Runnable + # we can call `invoke` or `stream` on it + # if it has_external_outputl, we need to call `stream` if it has it + # if not, we call `invoke` if it has it + if isinstance(built_object, Runnable): + result = await invoke_lc_runnable( + built_object=built_object, inputs=inputs, has_external_output=has_external_output, session_id=session_id + ) + else: + result = built_object + return result diff --git a/src/backend/base/langflow/helpers/__init__.py b/src/backend/base/langflow/helpers/__init__.py new file mode 100644 index 000000000..adfa72088 --- /dev/null +++ b/src/backend/base/langflow/helpers/__init__.py @@ -0,0 +1,3 @@ +from .record import docs_to_records, records_to_text + +__all__ = ["docs_to_records", "records_to_text"] diff --git a/src/backend/base/langflow/helpers/flow.py b/src/backend/base/langflow/helpers/flow.py new file mode 100644 index 000000000..654122dcd --- /dev/null +++ b/src/backend/base/langflow/helpers/flow.py @@ -0,0 +1,201 @@ +from typing import TYPE_CHECKING, Any, Callable, Coroutine, List, Optional, Tuple, Type, Union, cast + +from pydantic.v1 import BaseModel, Field, create_model +from sqlmodel import select + +from langflow.schema.schema import INPUT_FIELD_NAME, Record +from langflow.services.database.models.flow.model import Flow +from langflow.services.deps import session_scope + +if TYPE_CHECKING: + from langflow.graph.graph.base import Graph + from langflow.graph.vertex.base import Vertex + +INPUT_TYPE_MAP = { + "ChatInput": {"type_hint": "Optional[str]", "default": '""'}, + "TextInput": {"type_hint": "Optional[str]", "default": '""'}, + "JSONInput": {"type_hint": "Optional[dict]", "default": "{}"}, +} + + +def list_flows(*, user_id: Optional[str] = None) -> List[Record]: + if not user_id: + raise ValueError("Session is invalid") + try: + with session_scope() as session: + flows = session.exec( + select(Flow).where(Flow.user_id == user_id).where(Flow.is_component == False) # noqa + ).all() + + flows_records = [flow.to_record() for flow in flows] + return flows_records + except Exception as e: + raise ValueError(f"Error listing flows: {e}") + + +async def load_flow( + user_id: str, flow_id: Optional[str] = None, flow_name: Optional[str] = None, tweaks: Optional[dict] = None +) -> "Graph": + from langflow.graph.graph.base import Graph + from langflow.processing.process import process_tweaks + + if not flow_id and not flow_name: + raise ValueError("Flow ID or Flow Name is required") + if not flow_id and flow_name: + flow_id = find_flow(flow_name, user_id) + if not flow_id: + raise ValueError(f"Flow {flow_name} not found") + + with session_scope() as session: + graph_data = flow.data if (flow := session.get(Flow, flow_id)) else None + if not graph_data: + raise ValueError(f"Flow {flow_id} not found") + if tweaks: + graph_data = process_tweaks(graph_data=graph_data, tweaks=tweaks) + graph = Graph.from_payload(graph_data, flow_id=flow_id) + return graph + + +def find_flow(flow_name: str, user_id: str) -> Optional[str]: + with session_scope() as session: + flow = session.exec(select(Flow).where(Flow.name == flow_name).where(Flow.user_id == user_id)).first() + return flow.id if flow else None + + +async def run_flow( + inputs: Optional[Union[dict, List[dict]]] = None, + tweaks: Optional[dict] = None, + flow_id: Optional[str] = None, + flow_name: Optional[str] = None, + user_id: Optional[str] = None, +) -> Any: + if user_id is None: + raise ValueError("Session is invalid") + graph = await load_flow(user_id, flow_id, flow_name, tweaks) + + if inputs is None: + inputs = [] + inputs_list = [] + inputs_components = [] + types = [] + for input_dict in inputs: + inputs_list.append({INPUT_FIELD_NAME: cast(str, input_dict.get("input_value"))}) + inputs_components.append(input_dict.get("components", [])) + types.append(input_dict.get("type", [])) + + return await graph.arun(inputs_list, inputs_components=inputs_components, types=types) + + +def generate_function_for_flow(inputs: List["Vertex"], flow_id: str) -> Coroutine: + """ + Generate a dynamic flow function based on the given inputs and flow ID. + + Args: + inputs (List[Vertex]): The list of input vertices for the flow. + flow_id (str): The ID of the flow. + + Returns: + Coroutine: The dynamic flow function. + + Raises: + None + + Example: + inputs = [vertex1, vertex2] + flow_id = "my_flow" + function = generate_function_for_flow(inputs, flow_id) + result = function(input1, input2) + """ + # Prepare function arguments with type hints and default values + args = [ + f"{input_.display_name.lower().replace(' ', '_')}: {INPUT_TYPE_MAP[input_.base_name]['type_hint']} = {INPUT_TYPE_MAP[input_.base_name]['default']}" + for input_ in inputs + ] + + # Maintain original argument names for constructing the tweaks dictionary + original_arg_names = [input_.display_name for input_ in inputs] + + # Prepare a Pythonic, valid function argument string + func_args = ", ".join(args) + + # Map original argument names to their corresponding Pythonic variable names in the function + arg_mappings = ", ".join( + f'"{original_name}": {name}' + for original_name, name in zip(original_arg_names, [arg.split(":")[0] for arg in args]) + ) + + func_body = f""" +from typing import Optional +async def flow_function({func_args}): + tweaks = {{ {arg_mappings} }} + from langflow.helpers.flow import run_flow + from langchain_core.tools import ToolException + try: + return await run_flow( + tweaks={{key: {{'input_value': value}} for key, value in tweaks.items()}}, + flow_id="{flow_id}", + ) + except Exception as e: + raise ToolException(f'Error running flow: ' + e) +""" + + compiled_func = compile(func_body, "", "exec") + local_scope: dict = {} + exec(compiled_func, globals(), local_scope) + return local_scope["flow_function"] + + +def build_function_and_schema(flow_record: Record, graph: "Graph") -> Tuple[Callable, Type[BaseModel]]: + """ + Builds a dynamic function and schema for a given flow. + + Args: + flow_record (Record): The flow record containing information about the flow. + graph (Graph): The graph representing the flow. + + Returns: + Tuple[Callable, BaseModel]: A tuple containing the dynamic function and the schema. + """ + flow_id = flow_record.id + inputs = get_flow_inputs(graph) + dynamic_flow_function = generate_function_for_flow(inputs, flow_id) + schema = build_schema_from_inputs(flow_record.name, inputs) + return dynamic_flow_function, schema + + +def get_flow_inputs(graph: "Graph") -> List["Vertex"]: + """ + Retrieves the flow inputs from the given graph. + + Args: + graph (Graph): The graph object representing the flow. + + Returns: + List[Record]: A list of input records, where each record contains the ID, name, and description of the input vertex. + """ + inputs = [] + for vertex in graph.vertices: + if vertex.is_input: + inputs.append(vertex) + return inputs + + +def build_schema_from_inputs(name: str, inputs: List["Vertex"]) -> Type[BaseModel]: + """ + Builds a schema from the given inputs. + + Args: + name (str): The name of the schema. + inputs (List[tuple[str, str, str]]): A list of tuples representing the inputs. + Each tuple contains three elements: the input name, the input type, and the input description. + + Returns: + BaseModel: The schema model. + + """ + fields = {} + for input_ in inputs: + field_name = input_.display_name.lower().replace(" ", "_") + description = input_.description + fields[field_name] = (str, Field(default="", description=description)) + return create_model(name, **fields) # type: ignore diff --git a/src/backend/base/langflow/helpers/record.py b/src/backend/base/langflow/helpers/record.py new file mode 100644 index 000000000..7c13a9ad4 --- /dev/null +++ b/src/backend/base/langflow/helpers/record.py @@ -0,0 +1,41 @@ +from typing import Union +from langchain_core.documents import Document + +from langflow.schema import Record + + +def docs_to_records(documents: list[Document]) -> list[Record]: + """ + Converts a list of Documents to a list of Records. + + Args: + documents (list[Document]): The list of Documents to convert. + + Returns: + list[Record]: The converted list of Records. + """ + return [Record.from_document(document) for document in documents] + + +def records_to_text(template: str, records: Union[Record, list[Record]]) -> str: + """ + Converts a list of Records to a list of texts. + + Args: + records (list[Record]): The list of Records to convert. + + Returns: + list[str]: The converted list of texts. + """ + if isinstance(records, Record): + records = [records] + # Check if there are any format strings in the template + _records = [] + for record in records: + # If it is not a record, create one with the key "text" + if not isinstance(record, Record): + record = Record(text=record) + _records.append(record) + + formated_records = [template.format(data=record.data, **record.data) for record in _records] + return "\n".join(formated_records) diff --git a/src/backend/langflow/services/credentials/__init__.py b/src/backend/base/langflow/initial_setup/__init__.py similarity index 100% rename from src/backend/langflow/services/credentials/__init__.py rename to src/backend/base/langflow/initial_setup/__init__.py diff --git a/src/backend/base/langflow/initial_setup/setup.py b/src/backend/base/langflow/initial_setup/setup.py new file mode 100644 index 000000000..f2181f85e --- /dev/null +++ b/src/backend/base/langflow/initial_setup/setup.py @@ -0,0 +1,239 @@ +from collections import defaultdict +from copy import deepcopy +from datetime import datetime, timezone +from pathlib import Path + +import orjson +from emoji import demojize, purely_emoji # type: ignore +from loguru import logger +from sqlmodel import select + +from langflow.base.constants import FIELD_FORMAT_ATTRIBUTES, NODE_FORMAT_ATTRIBUTES +from langflow.interface.types import get_all_components +from langflow.services.database.models.flow.model import Flow, FlowCreate +from langflow.services.deps import get_settings_service, session_scope + +STARTER_FOLDER_NAME = "Starter Projects" + + +# In the folder ./starter_projects we have a few JSON files that represent +# starter projects. We want to load these into the database so that users +# can use them as a starting point for their own projects. + + +def update_projects_components_with_latest_component_versions(project_data, all_types_dict): + # project data has a nodes key, which is a list of nodes + # we want to run through each node and see if it exists in the all_types_dict + # if so, we go into the template key and also get the template from all_types_dict + # and update it all + node_changes_log = defaultdict(list) + project_data_copy = deepcopy(project_data) + for node in project_data_copy.get("nodes", []): + node_data = node.get("data").get("node") + if node_data.get("display_name") in all_types_dict: + latest_node = all_types_dict.get(node_data.get("display_name")) + latest_template = latest_node.get("template") + node_data["template"]["code"] = latest_template["code"] + + for attr in NODE_FORMAT_ATTRIBUTES: + if attr in latest_node: + # Check if it needs to be updated + if latest_node[attr] != node_data.get(attr): + node_changes_log[node_data["display_name"]].append( + { + "attr": attr, + "old_value": node_data.get(attr), + "new_value": latest_node[attr], + } + ) + node_data[attr] = latest_node[attr] + + for field_name, field_dict in latest_template.items(): + if field_name not in node_data["template"]: + continue + # The idea here is to update some attributes of the field + for attr in FIELD_FORMAT_ATTRIBUTES: + if attr in field_dict and attr in node_data["template"].get(field_name): + # Check if it needs to be updated + if field_dict[attr] != node_data["template"][field_name][attr]: + node_changes_log[node_data["display_name"]].append( + { + "attr": f"{field_name}.{attr}", + "old_value": node_data["template"][field_name][attr], + "new_value": field_dict[attr], + } + ) + node_data["template"][field_name][attr] = field_dict[attr] + log_node_changes(node_changes_log) + return project_data_copy + + +def log_node_changes(node_changes_log): + # The idea here is to log the changes that were made to the nodes in debug + # Something like: + # Node: "Node Name" was updated with the following changes: + # attr_name: old_value -> new_value + # let's create one log per node + formatted_messages = [] + for node_name, changes in node_changes_log.items(): + message = f"\nNode: {node_name} was updated with the following changes:" + for change in changes: + message += f"\n- {change['attr']}: {change['old_value']} -> {change['new_value']}" + formatted_messages.append(message) + if formatted_messages: + logger.debug("\n".join(formatted_messages)) + + +def load_starter_projects(): + starter_projects = [] + folder = Path(__file__).parent / "starter_projects" + for file in folder.glob("*.json"): + project = orjson.loads(file.read_text(encoding="utf-8")) + starter_projects.append((file, project)) + logger.info(f"Loaded starter project {file}") + return starter_projects + + +def get_project_data(project): + project_name = project.get("name") + project_description = project.get("description") + project_is_component = project.get("is_component") + project_updated_at = project.get("updated_at") + if not project_updated_at: + project_updated_at = datetime.now(tz=timezone.utc).isoformat() + updated_at_datetime = datetime.strptime(project_updated_at, "%Y-%m-%dT%H:%M:%S.%f%z") + else: + updated_at_datetime = datetime.strptime(project_updated_at, "%Y-%m-%dT%H:%M:%S.%f") + project_data = project.get("data") + project_icon = project.get("icon") + if project_icon and purely_emoji(project_icon): + project_icon = demojize(project_icon) + else: + project_icon = "" + project_icon_bg_color = project.get("icon_bg_color") + return ( + project_name, + project_description, + project_is_component, + updated_at_datetime, + project_data, + project_icon, + project_icon_bg_color, + ) + + +def update_project_file(project_path, project, updated_project_data): + project["data"] = updated_project_data + with open(project_path, "w", encoding="utf-8") as f: + f.write(orjson.dumps(project, option=orjson.OPT_INDENT_2).decode()) + logger.info(f"Updated starter project {project['name']} file") + + +def update_existing_project( + existing_project, + project_name, + project_description, + project_is_component, + updated_at_datetime, + project_data, + project_icon, + project_icon_bg_color, +): + logger.info(f"Updating starter project {project_name}") + existing_project.data = project_data + existing_project.folder = STARTER_FOLDER_NAME + existing_project.description = project_description + existing_project.is_component = project_is_component + existing_project.updated_at = updated_at_datetime + existing_project.icon = project_icon + existing_project.icon_bg_color = project_icon_bg_color + + +def create_new_project( + session, + project_name, + project_description, + project_is_component, + updated_at_datetime, + project_data, + project_icon, + project_icon_bg_color, +): + logger.debug(f"Creating starter project {project_name}") + new_project = FlowCreate( + name=project_name, + description=project_description, + icon=project_icon, + icon_bg_color=project_icon_bg_color, + data=project_data, + is_component=project_is_component, + updated_at=updated_at_datetime, + folder=STARTER_FOLDER_NAME, + ) + db_flow = Flow.model_validate(new_project, from_attributes=True) + session.add(db_flow) + + +def get_all_flows_similar_to_project(session, project_name): + flows = session.exec( + select(Flow).where( + Flow.name == project_name, + Flow.folder == STARTER_FOLDER_NAME, + ) + ).all() + return flows + + +def delete_start_projects(session): + flows = session.exec( + select(Flow).where( + Flow.folder == STARTER_FOLDER_NAME, + ) + ).all() + for flow in flows: + session.delete(flow) + session.commit() + + +def create_or_update_starter_projects(): + components_paths = get_settings_service().settings.COMPONENTS_PATH + try: + all_types_dict = get_all_components(components_paths, as_dict=True) + except Exception as e: + logger.exception(f"Error loading components: {e}") + raise e + with session_scope() as session: + starter_projects = load_starter_projects() + delete_start_projects(session) + for project_path, project in starter_projects: + ( + project_name, + project_description, + project_is_component, + updated_at_datetime, + project_data, + project_icon, + project_icon_bg_color, + ) = get_project_data(project) + updated_project_data = update_projects_components_with_latest_component_versions( + project_data, all_types_dict + ) + if updated_project_data != project_data: + project_data = updated_project_data + # We also need to update the project data in the file + + update_project_file(project_path, project, updated_project_data) + if project_name and project_data: + for existing_project in get_all_flows_similar_to_project(session, project_name): + session.delete(existing_project) + + create_new_project( + session, + project_name, + project_description, + project_is_component, + updated_at_datetime, + project_data, + project_icon, + project_icon_bg_color, + ) diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, world!).json b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, world!).json new file mode 100644 index 000000000..ca465fc23 --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/Basic Prompting (Hello, world!).json @@ -0,0 +1,888 @@ +{ + "id": "c091a57f-43a7-4a5e-b352-035ae8d8379c", + "data": { + "nodes": [ + { + "id": "Prompt-uxBqP", + "type": "genericNode", + "position": { + "x": 53.588791333410654, + "y": -107.07318910019967 + }, + "data": { + "type": "Prompt", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "template": { + "type": "prompt", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "Answer the user as if you were a pirate.\n\nUser: {user_input}\n\nAnswer: ", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "template", + "display_name": "Template", + "advanced": false, + "input_types": [ + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent", + "user_input": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "user_input", + "display_name": "user_input", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "type": "str" + } + }, + "description": "Create a prompt template with dynamic variables.", + "icon": "prompts", + "is_input": null, + "is_output": null, + "is_composition": null, + "base_classes": [ + "object", + "str", + "Text" + ], + "name": "", + "display_name": "Prompt", + "documentation": "", + "custom_fields": { + "template": [ + "user_input" + ] + }, + "output_types": [ + "Text" + ], + "full_path": null, + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false, + "error": null + }, + "id": "Prompt-uxBqP", + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt" + }, + "selected": true, + "width": 384, + "height": 383, + "dragging": false, + "positionAbsolute": { + "x": 53.588791333410654, + "y": -107.07318910019967 + } + }, + { + "id": "OpenAIModel-k39HS", + "type": "genericNode", + "position": { + "x": 634.8148772766217, + "y": 27.035057029045305 + }, + "data": { + "type": "OpenAIModel", + "node": { + "template": { + "input_value": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Input", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-3.5-turbo\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n \"value\": \"gpt-4-turbo-preview\",\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=openai_api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "max_tokens": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 256, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_tokens", + "display_name": "Max Tokens", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model_kwargs": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model_name": { + "type": "str", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "gpt-3.5-turbo", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "gpt-4-turbo-preview", + "gpt-3.5-turbo", + "gpt-4-0125-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-1106" + ], + "name": "model_name", + "display_name": "Model Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_key": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "load_from_db": true, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "" + }, + "stream": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "stream", + "display_name": "Stream", + "advanced": true, + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "load_from_db": false, + "title_case": false + }, + "system_message": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "system_message", + "display_name": "System Message", + "advanced": true, + "dynamic": false, + "info": "System message to pass to the model.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "temperature": { + "type": "float", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 0.1, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "temperature", + "display_name": "Temperature", + "advanced": false, + "dynamic": false, + "info": "", + "rangeSpec": { + "step_type": "float", + "min": -1, + "max": 1, + "step": 0.1 + }, + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent" + }, + "description": "Generates text using OpenAI LLMs.", + "icon": "OpenAI", + "base_classes": [ + "object", + "Text", + "str" + ], + "display_name": "OpenAI", + "documentation": "", + "custom_fields": { + "input_value": null, + "openai_api_key": null, + "temperature": null, + "model_name": null, + "max_tokens": null, + "model_kwargs": null, + "openai_api_base": null, + "stream": null, + "system_message": null + }, + "output_types": [ + "Text" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "max_tokens", + "model_kwargs", + "model_name", + "openai_api_base", + "openai_api_key", + "temperature", + "input_value", + "system_message", + "stream" + ], + "beta": false + }, + "id": "OpenAIModel-k39HS", + "description": "Generates text using OpenAI LLMs.", + "display_name": "OpenAI" + }, + "selected": false, + "width": 384, + "height": 563, + "positionAbsolute": { + "x": 634.8148772766217, + "y": 27.035057029045305 + }, + "dragging": false + }, + { + "id": "ChatOutput-njtka", + "type": "genericNode", + "position": { + "x": 1193.250417197867, + "y": 71.88476890163852 + }, + "data": { + "type": "ChatOutput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Interaction Panel.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Message", + "advanced": false, + "input_types": [ + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "record_template": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "{text}", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "record_template", + "display_name": "Record Template", + "advanced": true, + "dynamic": false, + "info": "In case of Message being a Record, this template will be used to convert it to text.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "return_record": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_record", + "display_name": "Return Record", + "advanced": true, + "dynamic": false, + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "load_from_db": false, + "title_case": false + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Machine", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Machine", + "User" + ], + "name": "sender", + "display_name": "Sender Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "AI", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": true, + "dynamic": false, + "info": "If provided, the message will be stored in the memory.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Display a chat message in the Interaction Panel.", + "icon": "ChatOutput", + "base_classes": [ + "Record", + "Text", + "str", + "object" + ], + "display_name": "Chat Output", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "input_value": null, + "session_id": null, + "return_record": null, + "record_template": null + }, + "output_types": [ + "Text", + "Record" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "ChatOutput-njtka" + }, + "selected": false, + "width": 384, + "height": 383, + "positionAbsolute": { + "x": 1193.250417197867, + "y": 71.88476890163852 + }, + "dragging": false + }, + { + "id": "ChatInput-P3fgL", + "type": "genericNode", + "position": { + "x": -495.2223093083827, + "y": -232.56998443685862 + }, + "data": { + "type": "ChatInput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Interaction Panel.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Message", + "advanced": false, + "input_types": [], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "value": "hi" + }, + "return_record": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_record", + "display_name": "Return Record", + "advanced": true, + "dynamic": false, + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "load_from_db": false, + "title_case": false + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "User", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Machine", + "User" + ], + "name": "sender", + "display_name": "Sender Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "User", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": true, + "dynamic": false, + "info": "If provided, the message will be stored in the memory.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Get chat inputs from the Interaction Panel.", + "icon": "ChatInput", + "base_classes": [ + "object", + "Record", + "str", + "Text" + ], + "display_name": "Chat Input", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "input_value": null, + "session_id": null, + "return_record": null + }, + "output_types": [ + "Text", + "Record" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "ChatInput-P3fgL" + }, + "selected": false, + "width": 384, + "height": 375, + "positionAbsolute": { + "x": -495.2223093083827, + "y": -232.56998443685862 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "OpenAIModel-k39HS", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-k39HSœ}", + "target": "ChatOutput-njtka", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-njtkaœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-njtka", + "inputTypes": [ + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "object", + "Text", + "str" + ], + "dataType": "OpenAIModel", + "id": "OpenAIModel-k39HS" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-OpenAIModel-k39HS{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-k39HSœ}-ChatOutput-njtka{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-njtkaœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "Prompt-uxBqP", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œstrœ,œTextœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-uxBqPœ}", + "target": "OpenAIModel-k39HS", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-k39HSœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-k39HS", + "inputTypes": [ + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "object", + "str", + "Text" + ], + "dataType": "Prompt", + "id": "Prompt-uxBqP" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-Prompt-uxBqP{œbaseClassesœ:[œobjectœ,œstrœ,œTextœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-uxBqPœ}-OpenAIModel-k39HS{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-k39HSœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "ChatInput-P3fgL", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œRecordœ,œstrœ,œTextœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-P3fgLœ}", + "target": "Prompt-uxBqP", + "targetHandle": "{œfieldNameœ:œuser_inputœ,œidœ:œPrompt-uxBqPœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "user_input", + "id": "Prompt-uxBqP", + "inputTypes": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "object", + "Record", + "str", + "Text" + ], + "dataType": "ChatInput", + "id": "ChatInput-P3fgL" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-ChatInput-P3fgL{œbaseClassesœ:[œobjectœ,œRecordœ,œstrœ,œTextœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-P3fgLœ}-Prompt-uxBqP{œfieldNameœ:œuser_inputœ,œidœ:œPrompt-uxBqPœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}" + } + ], + "viewport": { + "x": 260.58251815500563, + "y": 318.2261172111936, + "zoom": 0.43514115784696294 + } + }, + "description": "This flow will get you experimenting with the basics of the UI, the Chat and the Prompt component. \n\nTry changing the Template in it to see how the model behaves. \nYou can change it to this and a Text Input into the `type_of_person` variable : \"Answer the user as if you were a pirate.\n\nUser: {user_input}\n\nAnswer: \" ", + "name": "Basic Prompting (Hello, world!)", + "last_tested_version": "1.0.0a4", + "is_component": false +} \ No newline at end of file diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Blog Writter.json b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Blog Writter.json new file mode 100644 index 000000000..9663a78ef --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Blog Writter.json @@ -0,0 +1,989 @@ +{ + "id": "6ad5559d-fb66-4fdc-8f98-96f4ac12799d", + "data": { + "nodes": [ + { + "id": "Prompt-Rse03", + "type": "genericNode", + "position": { + "x": 1331.381712783371, + "y": 535.0279854229713 + }, + "data": { + "type": "Prompt", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "template": { + "type": "prompt", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "Reference 1:\n\n{reference_1}\n\n---\n\nReference 2:\n\n{reference_2}\n\n---\n\n{instructions}\n\nBlog: \n\n\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "template", + "display_name": "Template", + "advanced": false, + "input_types": ["Text"], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent", + "reference_1": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "reference_1", + "display_name": "reference_1", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "type": "str" + }, + "reference_2": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "reference_2", + "display_name": "reference_2", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "type": "str" + }, + "instructions": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "instructions", + "display_name": "instructions", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "type": "str" + } + }, + "description": "Create a prompt template with dynamic variables.", + "icon": "prompts", + "is_input": null, + "is_output": null, + "is_composition": null, + "base_classes": ["object", "Text", "str"], + "name": "", + "display_name": "Prompt", + "documentation": "", + "custom_fields": { + "template": ["reference_1", "reference_2", "instructions"] + }, + "output_types": ["Text"], + "full_path": null, + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false, + "error": null + }, + "id": "Prompt-Rse03", + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt" + }, + "selected": false, + "width": 384, + "height": 571, + "dragging": false, + "positionAbsolute": { + "x": 1331.381712783371, + "y": 535.0279854229713 + } + }, + { + "id": "URL-HYPkR", + "type": "genericNode", + "position": { + "x": 568.2971412887712, + "y": 700.9983368007821 + }, + "data": { + "type": "URL", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Any, Dict\n\nfrom langchain_community.document_loaders.web_base import WebBaseLoader\n\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\n\n\nclass URLComponent(CustomComponent):\n display_name = \"URL\"\n description = \"Fetch content from one or more URLs.\"\n icon = \"layout-template\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"urls\": {\"display_name\": \"URL\"},\n }\n\n def build(\n self,\n urls: list[str],\n ) -> list[Record]:\n loader = WebBaseLoader(web_paths=urls)\n docs = loader.load()\n records = self.to_records(docs)\n self.status = records\n return records\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "urls": { + "type": "str", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "urls", + "display_name": "URL", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"], + "value": [ + "https://www.promptingguide.ai/techniques/prompt_chaining" + ] + }, + "_type": "CustomComponent" + }, + "description": "Fetch content from one or more URLs.", + "icon": "layout-template", + "base_classes": ["Record"], + "display_name": "URL", + "documentation": "", + "custom_fields": { + "urls": null + }, + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "URL-HYPkR" + }, + "selected": false, + "width": 384, + "height": 281, + "positionAbsolute": { + "x": 568.2971412887712, + "y": 700.9983368007821 + }, + "dragging": false + }, + { + "id": "ChatOutput-JPlxl", + "type": "genericNode", + "position": { + "x": 2503.8617424688505, + "y": 789.3005578928434 + }, + "data": { + "type": "ChatOutput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Interaction Panel.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Message", + "advanced": false, + "input_types": ["Text"], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "record_template": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "{text}", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "record_template", + "display_name": "Record Template", + "advanced": true, + "dynamic": false, + "info": "In case of Message being a Record, this template will be used to convert it to text.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "return_record": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_record", + "display_name": "Return Record", + "advanced": true, + "dynamic": false, + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "load_from_db": false, + "title_case": false + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Machine", + "fileTypes": [], + "file_path": "", + "password": false, + "options": ["Machine", "User"], + "name": "sender", + "display_name": "Sender Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "AI", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": true, + "dynamic": false, + "info": "If provided, the message will be stored in the memory.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" + }, + "description": "Display a chat message in the Interaction Panel.", + "icon": "ChatOutput", + "base_classes": ["Text", "Record", "object", "str"], + "display_name": "Chat Output", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "input_value": null, + "session_id": null, + "return_record": null, + "record_template": null + }, + "output_types": ["Text", "Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "ChatOutput-JPlxl" + }, + "selected": false, + "width": 384, + "height": 383 + }, + { + "id": "OpenAIModel-gi29P", + "type": "genericNode", + "position": { + "x": 1917.7089968570963, + "y": 575.9186499244129 + }, + "data": { + "type": "OpenAIModel", + "node": { + "template": { + "input_value": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Input", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-3.5-turbo\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n \"value\": \"gpt-4-turbo-preview\",\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=openai_api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "max_tokens": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "1024", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_tokens", + "display_name": "Max Tokens", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model_kwargs": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model_name": { + "type": "str", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "gpt-3.5-turbo-0125", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "gpt-4-turbo-preview", + "gpt-3.5-turbo", + "gpt-4-0125-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-1106" + ], + "name": "model_name", + "display_name": "Model Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "openai_api_key": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"], + "value": "" + }, + "stream": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "stream", + "display_name": "Stream", + "advanced": true, + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "load_from_db": false, + "title_case": false + }, + "system_message": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "system_message", + "display_name": "System Message", + "advanced": true, + "dynamic": false, + "info": "System message to pass to the model.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "temperature": { + "type": "float", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "0.1", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "temperature", + "display_name": "Temperature", + "advanced": false, + "dynamic": false, + "info": "", + "rangeSpec": { + "step_type": "float", + "min": -1, + "max": 1, + "step": 0.1 + }, + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent" + }, + "description": "Generates text using OpenAI LLMs.", + "icon": "OpenAI", + "base_classes": ["str", "Text", "object"], + "display_name": "OpenAI", + "documentation": "", + "custom_fields": { + "input_value": null, + "openai_api_key": null, + "temperature": null, + "model_name": null, + "max_tokens": null, + "model_kwargs": null, + "openai_api_base": null, + "stream": null, + "system_message": null + }, + "output_types": ["Text"], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "max_tokens", + "model_kwargs", + "model_name", + "openai_api_base", + "openai_api_key", + "temperature", + "input_value", + "system_message", + "stream" + ], + "beta": false + }, + "id": "OpenAIModel-gi29P" + }, + "selected": false, + "width": 384, + "height": 563, + "positionAbsolute": { + "x": 1917.7089968570963, + "y": 575.9186499244129 + }, + "dragging": false + }, + { + "id": "URL-2cX90", + "type": "genericNode", + "position": { + "x": 573.961301764604, + "y": 336.41463436122086 + }, + "data": { + "type": "URL", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Any, Dict\n\nfrom langchain_community.document_loaders.web_base import WebBaseLoader\n\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\n\n\nclass URLComponent(CustomComponent):\n display_name = \"URL\"\n description = \"Fetch content from one or more URLs.\"\n icon = \"layout-template\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"urls\": {\"display_name\": \"URL\"},\n }\n\n def build(\n self,\n urls: list[str],\n ) -> list[Record]:\n loader = WebBaseLoader(web_paths=urls)\n docs = loader.load()\n records = self.to_records(docs)\n self.status = records\n return records\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "urls": { + "type": "str", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "urls", + "display_name": "URL", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"], + "value": ["https://www.promptingguide.ai/introduction/basics"] + }, + "_type": "CustomComponent" + }, + "description": "Fetch content from one or more URLs.", + "icon": "layout-template", + "base_classes": ["Record"], + "display_name": "URL", + "documentation": "", + "custom_fields": { + "urls": null + }, + "output_types": ["Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "URL-2cX90" + }, + "selected": false, + "width": 384, + "height": 281, + "positionAbsolute": { + "x": 573.961301764604, + "y": 336.41463436122086 + }, + "dragging": false + }, + { + "id": "TextInput-og8Or", + "type": "genericNode", + "position": { + "x": 569.9387927203336, + "y": 1095.3352160671316 + }, + "data": { + "type": "TextInput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextInput(TextComponent):\n display_name = \"Text Input\"\n description = \"Get text inputs from the Interaction Panel.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as input.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Optional[str] = \"\",\n record_template: Optional[str] = \"\",\n ) -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "Use the references above for style to write a new blog/tutorial about prompt engineering techniques. Suggest non-covered topics.", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Value", + "advanced": false, + "input_types": ["Record", "Text"], + "dynamic": false, + "info": "Text or Record to be passed as input.", + "load_from_db": false, + "title_case": false + }, + "record_template": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "record_template", + "display_name": "Record Template", + "advanced": true, + "dynamic": false, + "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" + }, + "description": "Get text inputs from the Interaction Panel.", + "icon": "type", + "base_classes": ["object", "Text", "str"], + "display_name": "Instructions", + "documentation": "", + "custom_fields": { + "input_value": null, + "record_template": null + }, + "output_types": ["Text"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "TextInput-og8Or" + }, + "selected": false, + "width": 384, + "height": 289, + "positionAbsolute": { + "x": 569.9387927203336, + "y": 1095.3352160671316 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "URL-HYPkR", + "target": "Prompt-Rse03", + "sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œURLœ,œidœ:œURL-HYPkRœ}", + "targetHandle": "{œfieldNameœ:œreference_2œ,œidœ:œPrompt-Rse03œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "id": "reactflow__edge-URL-HYPkR{œbaseClassesœ:[œRecordœ],œdataTypeœ:œURLœ,œidœ:œURL-HYPkRœ}-Prompt-Rse03{œfieldNameœ:œreference_2œ,œidœ:œPrompt-Rse03œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "reference_2", + "id": "Prompt-Rse03", + "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"], + "type": "str" + }, + "sourceHandle": { + "baseClasses": ["Record"], + "dataType": "URL", + "id": "URL-HYPkR" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "selected": false + }, + { + "source": "OpenAIModel-gi29P", + "sourceHandle": "{œbaseClassesœ:[œstrœ,œTextœ,œobjectœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-gi29Pœ}", + "target": "ChatOutput-JPlxl", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-JPlxlœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-JPlxl", + "inputTypes": ["Text"], + "type": "str" + }, + "sourceHandle": { + "baseClasses": ["str", "Text", "object"], + "dataType": "OpenAIModel", + "id": "OpenAIModel-gi29P" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-OpenAIModel-gi29P{œbaseClassesœ:[œstrœ,œTextœ,œobjectœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-gi29Pœ}-ChatOutput-JPlxl{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-JPlxlœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "URL-2cX90", + "sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œURLœ,œidœ:œURL-2cX90œ}", + "target": "Prompt-Rse03", + "targetHandle": "{œfieldNameœ:œreference_1œ,œidœ:œPrompt-Rse03œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "reference_1", + "id": "Prompt-Rse03", + "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"], + "type": "str" + }, + "sourceHandle": { + "baseClasses": ["Record"], + "dataType": "URL", + "id": "URL-2cX90" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-URL-2cX90{œbaseClassesœ:[œRecordœ],œdataTypeœ:œURLœ,œidœ:œURL-2cX90œ}-Prompt-Rse03{œfieldNameœ:œreference_1œ,œidœ:œPrompt-Rse03œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "TextInput-og8Or", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œTextInputœ,œidœ:œTextInput-og8Orœ}", + "target": "Prompt-Rse03", + "targetHandle": "{œfieldNameœ:œinstructionsœ,œidœ:œPrompt-Rse03œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "instructions", + "id": "Prompt-Rse03", + "inputTypes": ["Document", "BaseOutputParser", "Record", "Text"], + "type": "str" + }, + "sourceHandle": { + "baseClasses": ["object", "Text", "str"], + "dataType": "TextInput", + "id": "TextInput-og8Or" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-TextInput-og8Or{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œTextInputœ,œidœ:œTextInput-og8Orœ}-Prompt-Rse03{œfieldNameœ:œinstructionsœ,œidœ:œPrompt-Rse03œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "Prompt-Rse03", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-Rse03œ}", + "target": "OpenAIModel-gi29P", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-gi29Pœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-gi29P", + "inputTypes": ["Text"], + "type": "str" + }, + "sourceHandle": { + "baseClasses": ["object", "Text", "str"], + "dataType": "Prompt", + "id": "Prompt-Rse03" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-Prompt-Rse03{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-Rse03œ}-OpenAIModel-gi29P{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-gi29Pœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "selected": false + } + ], + "viewport": { + "x": -214.14726025721177, + "y": -35.83855793844168, + "zoom": 0.47344308394045925 + } + }, + "description": "This flow can be used to create a blog post following instructions from the user, using two other blogs as reference.", + "name": "Blog Writer", + "last_tested_version": "1.0.0a0", + "is_component": false +} diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Document QA.json b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Document QA.json new file mode 100644 index 000000000..b8449f430 --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Document QA.json @@ -0,0 +1,1031 @@ +{ + "id": "fecbce42-6f11-454c-8ab2-db6eddbbbb0f", + "data": { + "nodes": [ + { + "id": "Prompt-tHwPf", + "type": "genericNode", + "position": { + "x": 585.7906101139403, + "y": 117.52115876762832 + }, + "data": { + "type": "Prompt", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "template": { + "type": "prompt", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "Answer user's questions based on the document below:\n\n---\n\n{Document}\n\n---\n\nQuestion:\n{Question}\n\nAnswer:\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "template", + "display_name": "Template", + "advanced": false, + "input_types": [ + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent", + "Document": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "Document", + "display_name": "Document", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "type": "str" + }, + "Question": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "Question", + "display_name": "Question", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "type": "str" + } + }, + "description": "Create a prompt template with dynamic variables.", + "icon": "prompts", + "is_input": null, + "is_output": null, + "is_composition": null, + "base_classes": [ + "object", + "str", + "Text" + ], + "name": "", + "display_name": "Prompt", + "documentation": "", + "custom_fields": { + "template": [ + "Document", + "Question" + ] + }, + "output_types": [ + "Text" + ], + "full_path": null, + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false, + "error": null + }, + "id": "Prompt-tHwPf", + "description": "A component for creating prompt templates using dynamic variables.", + "display_name": "Prompt" + }, + "selected": false, + "width": 384, + "height": 479, + "positionAbsolute": { + "x": 585.7906101139403, + "y": 117.52115876762832 + }, + "dragging": false + }, + { + "id": "File-6TEsD", + "type": "genericNode", + "position": { + "x": -18.636536329280602, + "y": 3.951948774836353 + }, + "data": { + "type": "File", + "node": { + "template": { + "path": { + "type": "file", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [ + ".txt", + ".md", + ".mdx", + ".csv", + ".json", + ".yaml", + ".yml", + ".xml", + ".html", + ".htm", + ".pdf", + ".docx" + ], + "password": false, + "name": "path", + "display_name": "Path", + "advanced": false, + "dynamic": false, + "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx", + "load_from_db": false, + "title_case": false, + "value": "" + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from pathlib import Path\nfrom typing import Any, Dict\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_record\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\n\n\nclass FileComponent(CustomComponent):\n display_name = \"Files\"\n description = \"A generic file loader.\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"path\": {\n \"display_name\": \"Path\",\n \"field_type\": \"file\",\n \"file_types\": TEXT_FILE_TYPES,\n \"info\": f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n },\n \"silent_errors\": {\n \"display_name\": \"Silent Errors\",\n \"advanced\": True,\n \"info\": \"If true, errors will not raise an exception.\",\n },\n }\n\n def load_file(self, path: str, silent_errors: bool = False) -> Record:\n resolved_path = self.resolve_path(path)\n path_obj = Path(resolved_path)\n extension = path_obj.suffix[1:].lower()\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n record = parse_text_file_to_record(resolved_path, silent_errors)\n self.status = record if record else \"No data\"\n return record or Record()\n\n def build(\n self,\n path: str,\n silent_errors: bool = False,\n ) -> Record:\n record = self.load_file(path, silent_errors)\n self.status = record\n return record\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "silent_errors": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "silent_errors", + "display_name": "Silent Errors", + "advanced": true, + "dynamic": false, + "info": "If true, errors will not raise an exception.", + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent" + }, + "description": "A generic file loader.", + "base_classes": [ + "Record" + ], + "display_name": "Files", + "documentation": "", + "custom_fields": { + "path": null, + "silent_errors": null + }, + "output_types": [ + "Record" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "File-6TEsD" + }, + "selected": false, + "width": 384, + "height": 282, + "positionAbsolute": { + "x": -18.636536329280602, + "y": 3.951948774836353 + }, + "dragging": false + }, + { + "id": "ChatInput-MsSJ9", + "type": "genericNode", + "position": { + "x": -28.80036300619821, + "y": 379.81180230285355 + }, + "data": { + "type": "ChatInput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Interaction Panel.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Message", + "advanced": false, + "input_types": [], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "value": "" + }, + "return_record": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_record", + "display_name": "Return Record", + "advanced": true, + "dynamic": false, + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "load_from_db": false, + "title_case": false + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "User", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Machine", + "User" + ], + "name": "sender", + "display_name": "Sender Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "User", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": true, + "dynamic": false, + "info": "If provided, the message will be stored in the memory.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Get chat inputs from the Interaction Panel.", + "icon": "ChatInput", + "base_classes": [ + "str", + "Record", + "Text", + "object" + ], + "display_name": "Chat Input", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "input_value": null, + "session_id": null, + "return_record": null + }, + "output_types": [ + "Text", + "Record" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "ChatInput-MsSJ9" + }, + "selected": true, + "width": 384, + "height": 377, + "positionAbsolute": { + "x": -28.80036300619821, + "y": 379.81180230285355 + }, + "dragging": false + }, + { + "id": "ChatOutput-F5Awj", + "type": "genericNode", + "position": { + "x": 1733.3012915204283, + "y": 168.76098809939327 + }, + "data": { + "type": "ChatOutput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Interaction Panel.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Message", + "advanced": false, + "input_types": [ + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "return_record": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_record", + "display_name": "Return Record", + "advanced": true, + "dynamic": false, + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "load_from_db": false, + "title_case": false + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Machine", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Machine", + "User" + ], + "name": "sender", + "display_name": "Sender Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "AI", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": true, + "dynamic": false, + "info": "If provided, the message will be stored in the memory.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Display a chat message in the Interaction Panel.", + "icon": "ChatOutput", + "base_classes": [ + "str", + "Record", + "Text", + "object" + ], + "display_name": "Chat Output", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "input_value": null, + "session_id": null, + "return_record": null + }, + "output_types": [ + "Text", + "Record" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "ChatOutput-F5Awj" + }, + "selected": false, + "width": 384, + "height": 385, + "positionAbsolute": { + "x": 1733.3012915204283, + "y": 168.76098809939327 + }, + "dragging": false + }, + { + "id": "OpenAIModel-Bt067", + "type": "genericNode", + "position": { + "x": 1137.6078582863759, + "y": -14.41920034020356 + }, + "data": { + "type": "OpenAIModel", + "node": { + "template": { + "input_value": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Input", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-3.5-turbo\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n \"value\": \"gpt-4-turbo-preview\",\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=openai_api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "max_tokens": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 256, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_tokens", + "display_name": "Max Tokens", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model_kwargs": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model_name": { + "type": "str", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "gpt-4-turbo-preview", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "gpt-4-turbo-preview", + "gpt-3.5-turbo", + "gpt-4-0125-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-1106" + ], + "name": "model_name", + "display_name": "Model Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_key": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "" + }, + "stream": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "stream", + "display_name": "Stream", + "advanced": false, + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "load_from_db": false, + "title_case": false + }, + "system_message": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "system_message", + "display_name": "System Message", + "advanced": true, + "dynamic": false, + "info": "System message to pass to the model.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "temperature": { + "type": "float", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 0.1, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "temperature", + "display_name": "Temperature", + "advanced": false, + "dynamic": false, + "info": "", + "rangeSpec": { + "step_type": "float", + "min": -1, + "max": 1, + "step": 0.1 + }, + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent" + }, + "description": "Generates text using OpenAI LLMs.", + "icon": "OpenAI", + "base_classes": [ + "object", + "str", + "Text" + ], + "display_name": "OpenAI", + "documentation": "", + "custom_fields": { + "input_value": null, + "openai_api_key": null, + "temperature": null, + "model_name": null, + "max_tokens": null, + "model_kwargs": null, + "openai_api_base": null, + "stream": null, + "system_message": null + }, + "output_types": [ + "Text" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "max_tokens", + "model_kwargs", + "model_name", + "openai_api_base", + "openai_api_key", + "temperature", + "input_value", + "system_message", + "stream" + ], + "beta": false + }, + "id": "OpenAIModel-Bt067" + }, + "selected": false, + "width": 384, + "height": 642, + "positionAbsolute": { + "x": 1137.6078582863759, + "y": -14.41920034020356 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "ChatInput-MsSJ9", + "sourceHandle": "{œbaseClassesœ:[œstrœ,œRecordœ,œTextœ,œobjectœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-MsSJ9œ}", + "target": "Prompt-tHwPf", + "targetHandle": "{œfieldNameœ:œQuestionœ,œidœ:œPrompt-tHwPfœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "Question", + "id": "Prompt-tHwPf", + "inputTypes": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "str", + "Record", + "Text", + "object" + ], + "dataType": "ChatInput", + "id": "ChatInput-MsSJ9" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-ChatInput-MsSJ9{œbaseClassesœ:[œstrœ,œRecordœ,œTextœ,œobjectœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-MsSJ9œ}-Prompt-tHwPf{œfieldNameœ:œQuestionœ,œidœ:œPrompt-tHwPfœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "File-6TEsD", + "sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œFileœ,œidœ:œFile-6TEsDœ}", + "target": "Prompt-tHwPf", + "targetHandle": "{œfieldNameœ:œDocumentœ,œidœ:œPrompt-tHwPfœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "Document", + "id": "Prompt-tHwPf", + "inputTypes": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "Record" + ], + "dataType": "File", + "id": "File-6TEsD" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-File-6TEsD{œbaseClassesœ:[œRecordœ],œdataTypeœ:œFileœ,œidœ:œFile-6TEsDœ}-Prompt-tHwPf{œfieldNameœ:œDocumentœ,œidœ:œPrompt-tHwPfœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "Prompt-tHwPf", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œstrœ,œTextœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-tHwPfœ}", + "target": "OpenAIModel-Bt067", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-Bt067œ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-Bt067", + "inputTypes": [ + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "object", + "str", + "Text" + ], + "dataType": "Prompt", + "id": "Prompt-tHwPf" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-Prompt-tHwPf{œbaseClassesœ:[œobjectœ,œstrœ,œTextœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-tHwPfœ}-OpenAIModel-Bt067{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-Bt067œ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "OpenAIModel-Bt067", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œstrœ,œTextœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-Bt067œ}", + "target": "ChatOutput-F5Awj", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-F5Awjœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-F5Awj", + "inputTypes": [ + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "object", + "str", + "Text" + ], + "dataType": "OpenAIModel", + "id": "OpenAIModel-Bt067" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-OpenAIModel-Bt067{œbaseClassesœ:[œobjectœ,œstrœ,œTextœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-Bt067œ}-ChatOutput-F5Awj{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-F5Awjœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}" + } + ], + "viewport": { + "x": 352.20899206064655, + "y": 56.054900898593075, + "zoom": 0.9023391400011 + } + }, + "description": "This flow integrates PDF reading with a language model to answer document-specific questions. Ideal for small-scale texts, it facilitates direct queries with immediate insights.", + "name": "Document QA", + "last_tested_version": "1.0.0a0", + "is_component": false +} \ No newline at end of file diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Memory Conversation.json b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Memory Conversation.json new file mode 100644 index 000000000..92dbdd6f2 --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Memory Conversation.json @@ -0,0 +1,1274 @@ +{ + "id": "08d5cccf-d098-4367-b14b-1078429c9ed9", + "icon": "🤖", + "icon_bg_color": "#FFD700", + "data": { + "nodes": [ + { + "id": "ChatInput-t7F8v", + "type": "genericNode", + "position": { + "x": 1283.2700598313072, + "y": 982.5953650473145 + }, + "data": { + "type": "ChatInput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Interaction Panel.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Message", + "advanced": false, + "input_types": [], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "value": "" + }, + "return_record": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_record", + "display_name": "Return Record", + "advanced": true, + "dynamic": false, + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "load_from_db": false, + "title_case": false + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "User", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Machine", + "User" + ], + "name": "sender", + "display_name": "Sender Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "User", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": false, + "dynamic": false, + "info": "If provided, the message will be stored in the memory.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "MySessionID" + }, + "_type": "CustomComponent" + }, + "description": "Get chat inputs from the Interaction Panel.", + "icon": "ChatInput", + "base_classes": [ + "Text", + "object", + "Record", + "str" + ], + "display_name": "Chat Input", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "input_value": null, + "session_id": null, + "return_record": null + }, + "output_types": [ + "Text", + "Record" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "ChatInput-t7F8v" + }, + "selected": false, + "width": 384, + "height": 469, + "positionAbsolute": { + "x": 1283.2700598313072, + "y": 982.5953650473145 + }, + "dragging": false + }, + { + "id": "ChatOutput-P1jEe", + "type": "genericNode", + "position": { + "x": 3154.916355514023, + "y": 851.051882666333 + }, + "data": { + "type": "ChatOutput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Interaction Panel.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Message", + "advanced": false, + "input_types": [ + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "return_record": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_record", + "display_name": "Return Record", + "advanced": true, + "dynamic": false, + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "load_from_db": false, + "title_case": false + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Machine", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Machine", + "User" + ], + "name": "sender", + "display_name": "Sender Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "AI", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": false, + "dynamic": false, + "info": "If provided, the message will be stored in the memory.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "MySessionID" + }, + "_type": "CustomComponent" + }, + "description": "Display a chat message in the Interaction Panel.", + "icon": "ChatOutput", + "base_classes": [ + "Text", + "object", + "Record", + "str" + ], + "display_name": "Chat Output", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "input_value": null, + "session_id": null, + "return_record": null + }, + "output_types": [ + "Text", + "Record" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "ChatOutput-P1jEe" + }, + "selected": false, + "width": 384, + "height": 477, + "dragging": false, + "positionAbsolute": { + "x": 3154.916355514023, + "y": 851.051882666333 + } + }, + { + "id": "MemoryComponent-cdA1J", + "type": "genericNode", + "position": { + "x": 1289.9606870058817, + "y": 442.16804561053766 + }, + "data": { + "type": "MemoryComponent", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langflow.field_typing import Text\nfrom langflow.helpers.record import records_to_text\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.memory import get_messages\n\n\nclass MemoryComponent(CustomComponent):\n display_name = \"Chat Memory\"\n description = \"Retrieves stored chat messages given a specific Session ID.\"\n beta: bool = True\n icon = \"history\"\n\n def build_config(self):\n return {\n \"sender\": {\n \"options\": [\"Machine\", \"User\", \"Machine and User\"],\n \"display_name\": \"Sender Type\",\n },\n \"sender_name\": {\"display_name\": \"Sender Name\", \"advanced\": True},\n \"n_messages\": {\n \"display_name\": \"Number of Messages\",\n \"info\": \"Number of messages to retrieve.\",\n },\n \"session_id\": {\n \"display_name\": \"Session ID\",\n \"info\": \"Session ID of the chat history.\",\n \"input_types\": [\"Text\"],\n },\n \"order\": {\n \"options\": [\"Ascending\", \"Descending\"],\n \"display_name\": \"Order\",\n \"info\": \"Order of the messages.\",\n \"advanced\": True,\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n sender: Optional[str] = \"Machine and User\",\n sender_name: Optional[str] = None,\n session_id: Optional[str] = None,\n n_messages: int = 5,\n order: Optional[str] = \"Descending\",\n record_template: Optional[str] = \"{sender_name}: {text}\",\n ) -> Text:\n order = \"DESC\" if order == \"Descending\" else \"ASC\"\n if sender == \"Machine and User\":\n sender = None\n messages = get_messages(\n sender=sender,\n sender_name=sender_name,\n session_id=session_id,\n limit=n_messages,\n order=order,\n )\n messages_str = records_to_text(template=record_template, records=messages)\n self.status = messages_str\n return messages_str\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "n_messages": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 5, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "n_messages", + "display_name": "Number of Messages", + "advanced": false, + "dynamic": false, + "info": "Number of messages to retrieve.", + "load_from_db": false, + "title_case": false + }, + "order": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Descending", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Ascending", + "Descending" + ], + "name": "order", + "display_name": "Order", + "advanced": true, + "dynamic": false, + "info": "Order of the messages.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "record_template": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "{sender_name}: {text}", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "record_template", + "display_name": "Record Template", + "advanced": true, + "dynamic": false, + "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Machine and User", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Machine", + "User", + "Machine and User" + ], + "name": "sender", + "display_name": "Sender Type", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": false, + "input_types": [ + "Text" + ], + "dynamic": false, + "info": "Session ID of the chat history.", + "load_from_db": false, + "title_case": false, + "value": "MySessionID" + }, + "_type": "CustomComponent" + }, + "description": "Retrieves stored chat messages given a specific Session ID.", + "icon": "history", + "base_classes": [ + "str", + "Text", + "object" + ], + "display_name": "Chat Memory", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "session_id": null, + "n_messages": null, + "order": null, + "record_template": null + }, + "output_types": [ + "Text" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": true + }, + "id": "MemoryComponent-cdA1J", + "description": "Retrieves stored chat messages given a specific Session ID.", + "display_name": "Chat Memory" + }, + "selected": false, + "width": 384, + "height": 489, + "dragging": false, + "positionAbsolute": { + "x": 1289.9606870058817, + "y": 442.16804561053766 + } + }, + { + "id": "Prompt-ODkUx", + "type": "genericNode", + "position": { + "x": 1894.594426342426, + "y": 753.3797365481901 + }, + "data": { + "type": "Prompt", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "template": { + "type": "prompt", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "{context}\n\nUser: {user_message}\nAI: ", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "template", + "display_name": "Template", + "advanced": false, + "input_types": [ + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent", + "context": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "context", + "display_name": "context", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "type": "str" + }, + "user_message": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "user_message", + "display_name": "user_message", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "type": "str" + } + }, + "description": "Create a prompt template with dynamic variables.", + "icon": "prompts", + "is_input": null, + "is_output": null, + "is_composition": null, + "base_classes": [ + "Text", + "str", + "object" + ], + "name": "", + "display_name": "Prompt", + "documentation": "", + "custom_fields": { + "template": [ + "context", + "user_message" + ] + }, + "output_types": [ + "Text" + ], + "full_path": null, + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false, + "error": null + }, + "id": "Prompt-ODkUx", + "description": "A component for creating prompt templates using dynamic variables.", + "display_name": "Prompt" + }, + "selected": false, + "width": 384, + "height": 477, + "dragging": false, + "positionAbsolute": { + "x": 1894.594426342426, + "y": 753.3797365481901 + } + }, + { + "id": "OpenAIModel-9RykF", + "type": "genericNode", + "position": { + "x": 2561.5850334731617, + "y": 553.2745131130916 + }, + "data": { + "type": "OpenAIModel", + "node": { + "template": { + "input_value": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Input", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-3.5-turbo\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n \"value\": \"gpt-4-turbo-preview\",\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=openai_api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "max_tokens": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 256, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_tokens", + "display_name": "Max Tokens", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model_kwargs": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model_name": { + "type": "str", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "gpt-4-1106-preview", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "gpt-4-turbo-preview", + "gpt-3.5-turbo", + "gpt-4-0125-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-1106" + ], + "name": "model_name", + "display_name": "Model Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_key": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "" + }, + "stream": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "stream", + "display_name": "Stream", + "advanced": true, + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "load_from_db": false, + "title_case": false + }, + "system_message": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "system_message", + "display_name": "System Message", + "advanced": true, + "dynamic": false, + "info": "System message to pass to the model.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "temperature": { + "type": "float", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "0.2", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "temperature", + "display_name": "Temperature", + "advanced": false, + "dynamic": false, + "info": "", + "rangeSpec": { + "step_type": "float", + "min": -1, + "max": 1, + "step": 0.1 + }, + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent" + }, + "description": "Generates text using OpenAI LLMs.", + "icon": "OpenAI", + "base_classes": [ + "str", + "object", + "Text" + ], + "display_name": "OpenAI", + "documentation": "", + "custom_fields": { + "input_value": null, + "openai_api_key": null, + "temperature": null, + "model_name": null, + "max_tokens": null, + "model_kwargs": null, + "openai_api_base": null, + "stream": null, + "system_message": null + }, + "output_types": [ + "Text" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "max_tokens", + "model_kwargs", + "model_name", + "openai_api_base", + "openai_api_key", + "temperature", + "input_value", + "system_message", + "stream" + ], + "beta": false + }, + "id": "OpenAIModel-9RykF" + }, + "selected": false, + "width": 384, + "height": 563, + "positionAbsolute": { + "x": 2561.5850334731617, + "y": 553.2745131130916 + }, + "dragging": false + }, + { + "id": "TextOutput-vrs6T", + "type": "genericNode", + "position": { + "x": 1911.4785906252087, + "y": 247.39079954376987 + }, + "data": { + "type": "TextOutput", + "node": { + "template": { + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Value", + "advanced": false, + "input_types": [ + "Record", + "Text" + ], + "dynamic": false, + "info": "Text or Record to be passed as output.", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Interaction Panel.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "record_template": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "record_template", + "display_name": "Record Template", + "advanced": true, + "dynamic": false, + "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Display a text output in the Interaction Panel.", + "icon": "type", + "base_classes": [ + "str", + "object", + "Text" + ], + "display_name": "Inspect Memory", + "documentation": "", + "custom_fields": { + "input_value": null, + "record_template": null + }, + "output_types": [ + "Text" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "TextOutput-vrs6T" + }, + "selected": false, + "width": 384, + "height": 289, + "positionAbsolute": { + "x": 1911.4785906252087, + "y": 247.39079954376987 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "MemoryComponent-cdA1J", + "sourceHandle": "{œbaseClassesœ:[œstrœ,œTextœ,œobjectœ],œdataTypeœ:œMemoryComponentœ,œidœ:œMemoryComponent-cdA1Jœ}", + "target": "Prompt-ODkUx", + "targetHandle": "{œfieldNameœ:œcontextœ,œidœ:œPrompt-ODkUxœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "context", + "type": "str", + "id": "Prompt-ODkUx", + "inputTypes": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ] + }, + "sourceHandle": { + "baseClasses": [ + "str", + "Text", + "object" + ], + "dataType": "MemoryComponent", + "id": "MemoryComponent-cdA1J" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-MemoryComponent-cdA1J{œbaseClassesœ:[œstrœ,œTextœ,œobjectœ],œdataTypeœ:œMemoryComponentœ,œidœ:œMemoryComponent-cdA1Jœ}-Prompt-ODkUx{œfieldNameœ:œcontextœ,œidœ:œPrompt-ODkUxœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "selected": false + }, + { + "source": "ChatInput-t7F8v", + "sourceHandle": "{œbaseClassesœ:[œTextœ,œobjectœ,œRecordœ,œstrœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-t7F8vœ}", + "target": "Prompt-ODkUx", + "targetHandle": "{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-ODkUxœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "user_message", + "type": "str", + "id": "Prompt-ODkUx", + "inputTypes": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ] + }, + "sourceHandle": { + "baseClasses": [ + "Text", + "object", + "Record", + "str" + ], + "dataType": "ChatInput", + "id": "ChatInput-t7F8v" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-ChatInput-t7F8v{œbaseClassesœ:[œTextœ,œobjectœ,œRecordœ,œstrœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-t7F8vœ}-Prompt-ODkUx{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-ODkUxœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "selected": false + }, + { + "source": "Prompt-ODkUx", + "sourceHandle": "{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-ODkUxœ}", + "target": "OpenAIModel-9RykF", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-9RykFœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-9RykF", + "inputTypes": [ + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "Text", + "str", + "object" + ], + "dataType": "Prompt", + "id": "Prompt-ODkUx" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-Prompt-ODkUx{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-ODkUxœ}-OpenAIModel-9RykF{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-9RykFœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "OpenAIModel-9RykF", + "sourceHandle": "{œbaseClassesœ:[œstrœ,œobjectœ,œTextœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-9RykFœ}", + "target": "ChatOutput-P1jEe", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-P1jEeœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-P1jEe", + "inputTypes": [ + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "str", + "object", + "Text" + ], + "dataType": "OpenAIModel", + "id": "OpenAIModel-9RykF" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-OpenAIModel-9RykF{œbaseClassesœ:[œstrœ,œobjectœ,œTextœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-9RykFœ}-ChatOutput-P1jEe{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-P1jEeœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "MemoryComponent-cdA1J", + "sourceHandle": "{œbaseClassesœ:[œstrœ,œTextœ,œobjectœ],œdataTypeœ:œMemoryComponentœ,œidœ:œMemoryComponent-cdA1Jœ}", + "target": "TextOutput-vrs6T", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-vrs6Tœ,œinputTypesœ:[œRecordœ,œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "TextOutput-vrs6T", + "inputTypes": [ + "Record", + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "str", + "Text", + "object" + ], + "dataType": "MemoryComponent", + "id": "MemoryComponent-cdA1J" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-foreground stroke-connection", + "id": "reactflow__edge-MemoryComponent-cdA1J{œbaseClassesœ:[œstrœ,œTextœ,œobjectœ],œdataTypeœ:œMemoryComponentœ,œidœ:œMemoryComponent-cdA1Jœ}-TextOutput-vrs6T{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-vrs6Tœ,œinputTypesœ:[œRecordœ,œTextœ],œtypeœ:œstrœ}" + } + ], + "viewport": { + "x": -569.862554459756, + "y": -42.08339711050985, + "zoom": 0.4868590524514978 + } +}, + "description": "This project can be used as a starting point for building a Chat experience with user specific memory. You can set a different Session ID to start a new message history.", + "name": "Memory Chatbot", + "last_tested_version": "1.0.0a0", + "is_component": false +} \ No newline at end of file diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Langflow Prompt Chaining.json b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Prompt Chaining.json new file mode 100644 index 000000000..43e8d729c --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/Langflow Prompt Chaining.json @@ -0,0 +1,1773 @@ +{ + "id": "85392e54-20f3-4ab5-a179-cb4bef16f639", + "data": { + "nodes": [ + { + "id": "Prompt-amqBu", + "type": "genericNode", + "position": { + "x": 2191.5837146441663, + "y": 1047.9307944451873 + }, + "data": { + "type": "Prompt", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "template": { + "type": "prompt", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "You are a helpful assistant. Given a long document, your task is to create a concise summary that captures the main points and key details. The summary should be clear, accurate, and succinct. Please provide the summary in the format below:\n####\n{document}\n####\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "template", + "display_name": "Template", + "advanced": false, + "input_types": [ + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent", + "document": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "document", + "display_name": "document", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "type": "str" + } + }, + "description": "Create a prompt template with dynamic variables.", + "icon": "prompts", + "is_input": null, + "is_output": null, + "is_composition": null, + "base_classes": [ + "object", + "str", + "Text" + ], + "name": "", + "display_name": "Prompt", + "documentation": "", + "custom_fields": { + "template": [ + "document" + ] + }, + "output_types": [ + "Text" + ], + "full_path": null, + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false, + "error": null + }, + "id": "Prompt-amqBu", + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt" + }, + "selected": false, + "width": 384, + "height": 385, + "positionAbsolute": { + "x": 2191.5837146441663, + "y": 1047.9307944451873 + }, + "dragging": false + }, + { + "id": "Prompt-gTNiz", + "type": "genericNode", + "position": { + "x": 3731.0813766902447, + "y": 799.631909121391 + }, + "data": { + "type": "Prompt", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "template": { + "type": "prompt", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "Given a summary of an article, please create two multiple-choice questions that cover the key points and details mentioned. Ensure the questions are clear and provide three options (A, B, C), with one correct answer.\n####\n{summary}\n####", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "template", + "display_name": "Template", + "advanced": false, + "input_types": [ + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent", + "summary": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "summary", + "display_name": "summary", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "type": "str" + } + }, + "description": "Create a prompt template with dynamic variables.", + "icon": "prompts", + "is_input": null, + "is_output": null, + "is_composition": null, + "base_classes": [ + "object", + "str", + "Text" + ], + "name": "", + "display_name": "Prompt", + "documentation": "", + "custom_fields": { + "template": [ + "summary" + ] + }, + "output_types": [ + "Text" + ], + "full_path": null, + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false, + "error": null + }, + "id": "Prompt-gTNiz", + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt" + }, + "selected": false, + "width": 384, + "height": 385, + "dragging": false + }, + { + "id": "ChatOutput-EJkG3", + "type": "genericNode", + "position": { + "x": 3722.1747844849388, + "y": 1283.413553222214 + }, + "data": { + "type": "ChatOutput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Interaction Panel.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Message", + "advanced": false, + "input_types": [ + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "record_template": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "{text}", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "record_template", + "display_name": "Record Template", + "advanced": true, + "dynamic": false, + "info": "In case of Message being a Record, this template will be used to convert it to text.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "return_record": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_record", + "display_name": "Return Record", + "advanced": true, + "dynamic": false, + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "load_from_db": false, + "title_case": false + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Machine", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Machine", + "User" + ], + "name": "sender", + "display_name": "Sender Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "Summarizer", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": true, + "dynamic": false, + "info": "If provided, the message will be stored in the memory.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Display a chat message in the Interaction Panel.", + "icon": "ChatOutput", + "base_classes": [ + "object", + "Record", + "Text", + "str" + ], + "display_name": "Chat Output", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "input_value": null, + "session_id": null, + "return_record": null, + "record_template": null + }, + "output_types": [ + "Text", + "Record" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "ChatOutput-EJkG3" + }, + "selected": false, + "width": 384, + "height": 385, + "dragging": false + }, + { + "id": "ChatOutput-DNmvg", + "type": "genericNode", + "position": { + "x": 5077.71285886074, + "y": 1232.9152769735522 + }, + "data": { + "type": "ChatOutput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Interaction Panel.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Message", + "advanced": false, + "input_types": [ + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "record_template": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "{text}", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "record_template", + "display_name": "Record Template", + "advanced": true, + "dynamic": false, + "info": "In case of Message being a Record, this template will be used to convert it to text.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "return_record": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_record", + "display_name": "Return Record", + "advanced": true, + "dynamic": false, + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "load_from_db": false, + "title_case": false + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Machine", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Machine", + "User" + ], + "name": "sender", + "display_name": "Sender Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "Question Generator", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": true, + "dynamic": false, + "info": "If provided, the message will be stored in the memory.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Display a chat message in the Interaction Panel.", + "icon": "ChatOutput", + "base_classes": [ + "object", + "Record", + "Text", + "str" + ], + "display_name": "Chat Output", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "input_value": null, + "session_id": null, + "return_record": null, + "record_template": null + }, + "output_types": [ + "Text", + "Record" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "ChatOutput-DNmvg" + }, + "selected": false, + "width": 384, + "height": 385 + }, + { + "id": "TextInput-sptaH", + "type": "genericNode", + "position": { + "x": 1700.5624822024752, + "y": 1039.603088937466 + }, + "data": { + "type": "TextInput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextInput(TextComponent):\n display_name = \"Text Input\"\n description = \"Get text inputs from the Interaction Panel.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as input.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Optional[str] = \"\",\n record_template: Optional[str] = \"\",\n ) -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "Revolutionary Nano-Battery Technology Unveiled In a groundbreaking announcement yesterday, researchers from the fictional Tech Innovations Institute revealed the development of a new nano-battery technology that promises to revolutionize energy storage. The new battery, dubbed the \"EnerGCell\", uses advanced nanomaterials to achieve unprecedented efficiency and storage capacities. According to lead researcher Dr. Ada Byron, the EnerGCell can store up to ten times more energy than the best lithium-ion batteries available today, while charging in just a fraction of the time. \"We're talking about charging your electric vehicle in just five minutes for a range of over 1,000 miles,\" Dr. Byron stated during the press conference. The technology behind the EnerGCell involves a complex arrangement of nanostructured electrodes that allow for rapid ion transfer and extremely high energy density. This breakthrough was achieved after a decade of research into nanomaterials and their applications in energy storage. The implications of this technology are vast, promising to accelerate the adoption of renewable energy by making it more practical and affordable to store wind and solar power. It could also lead to significant advancements in electric vehicles, mobile devices, and any other technology that relies on batteries. Despite the excitement, some experts are calling for patience, noting that the EnerGCell is still in its early stages of development and may take several years before it's commercially available. However, the potential impact of such a technology on the environment and the global economy is undeniable. Tech Innovations Institute plans to continue refining the EnerGCell and begin pilot projects with select partners in the coming year. If successful, this nano-battery technology could indeed be the breakthrough needed to usher in a new era of clean energy and technology.", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Value", + "advanced": false, + "input_types": [ + "Record", + "Text" + ], + "dynamic": false, + "info": "Text or Record to be passed as input.", + "load_from_db": false, + "title_case": false + }, + "record_template": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "record_template", + "display_name": "Record Template", + "advanced": true, + "dynamic": false, + "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Get text inputs from the Interaction Panel.", + "icon": "type", + "base_classes": [ + "str", + "Text", + "object" + ], + "display_name": "Text Input", + "documentation": "", + "custom_fields": { + "input_value": null, + "record_template": null + }, + "output_types": [ + "Text" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "TextInput-sptaH" + }, + "selected": false, + "width": 384, + "height": 290, + "positionAbsolute": { + "x": 1700.5624822024752, + "y": 1039.603088937466 + }, + "dragging": false + }, + { + "id": "TextOutput-2MS4a", + "type": "genericNode", + "position": { + "x": 2917.216113690115, + "y": 513.0058511435552 + }, + "data": { + "type": "TextOutput", + "node": { + "template": { + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Value", + "advanced": false, + "input_types": [ + "Record", + "Text" + ], + "dynamic": false, + "info": "Text or Record to be passed as output.", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Interaction Panel.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "record_template": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "record_template", + "display_name": "Record Template", + "advanced": true, + "dynamic": false, + "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Display a text output in the Interaction Panel.", + "icon": "type", + "base_classes": [ + "str", + "Text", + "object" + ], + "display_name": "First Prompt", + "documentation": "", + "custom_fields": { + "input_value": null, + "record_template": null + }, + "output_types": [ + "Text" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "TextOutput-2MS4a" + }, + "selected": false, + "width": 384, + "height": 290, + "positionAbsolute": { + "x": 2917.216113690115, + "y": 513.0058511435552 + }, + "dragging": false + }, + { + "id": "OpenAIModel-uYXZJ", + "type": "genericNode", + "position": { + "x": 2925.784767523062, + "y": 933.6465680967775 + }, + "data": { + "type": "OpenAIModel", + "node": { + "template": { + "input_value": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Input", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-3.5-turbo\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n \"value\": \"gpt-4-turbo-preview\",\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=openai_api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "max_tokens": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 256, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_tokens", + "display_name": "Max Tokens", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model_kwargs": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model_name": { + "type": "str", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "gpt-4-turbo-preview", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "gpt-4-turbo-preview", + "gpt-3.5-turbo", + "gpt-4-0125-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-1106" + ], + "name": "model_name", + "display_name": "Model Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_key": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "" + }, + "stream": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "stream", + "display_name": "Stream", + "advanced": true, + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "load_from_db": false, + "title_case": false + }, + "system_message": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "system_message", + "display_name": "System Message", + "advanced": true, + "dynamic": false, + "info": "System message to pass to the model.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "temperature": { + "type": "float", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 0.1, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "temperature", + "display_name": "Temperature", + "advanced": false, + "dynamic": false, + "info": "", + "rangeSpec": { + "step_type": "float", + "min": -1, + "max": 1, + "step": 0.1 + }, + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent" + }, + "description": "Generates text using OpenAI LLMs.", + "icon": "OpenAI", + "base_classes": [ + "str", + "Text", + "object" + ], + "display_name": "OpenAI", + "documentation": "", + "custom_fields": { + "input_value": null, + "openai_api_key": null, + "temperature": null, + "model_name": null, + "max_tokens": null, + "model_kwargs": null, + "openai_api_base": null, + "stream": null, + "system_message": null + }, + "output_types": [ + "Text" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "max_tokens", + "model_kwargs", + "model_name", + "openai_api_base", + "openai_api_key", + "temperature", + "input_value", + "system_message", + "stream" + ], + "beta": false + }, + "id": "OpenAIModel-uYXZJ" + }, + "selected": false, + "width": 384, + "height": 565, + "positionAbsolute": { + "x": 2925.784767523062, + "y": 933.6465680967775 + }, + "dragging": false + }, + { + "id": "TextOutput-MUDOR", + "type": "genericNode", + "position": { + "x": 4446.064323520379, + "y": 633.833297518702 + }, + "data": { + "type": "TextOutput", + "node": { + "template": { + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Value", + "advanced": false, + "input_types": [ + "Record", + "Text" + ], + "dynamic": false, + "info": "Text or Record to be passed as output.", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Interaction Panel.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "record_template": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "record_template", + "display_name": "Record Template", + "advanced": true, + "dynamic": false, + "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Display a text output in the Interaction Panel.", + "icon": "type", + "base_classes": [ + "str", + "Text", + "object" + ], + "display_name": "Second Prompt", + "documentation": "", + "custom_fields": { + "input_value": null, + "record_template": null + }, + "output_types": [ + "Text" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "TextOutput-MUDOR" + }, + "selected": false, + "width": 384, + "height": 290, + "dragging": false, + "positionAbsolute": { + "x": 4446.064323520379, + "y": 633.833297518702 + } + }, + { + "id": "OpenAIModel-XawYB", + "type": "genericNode", + "position": { + "x": 4500.152018344182, + "y": 1027.7382026227656 + }, + "data": { + "type": "OpenAIModel", + "node": { + "template": { + "input_value": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Input", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-3.5-turbo\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n \"value\": \"gpt-4-turbo-preview\",\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=openai_api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "max_tokens": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 256, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_tokens", + "display_name": "Max Tokens", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model_kwargs": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model_name": { + "type": "str", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "gpt-4-turbo-preview", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "gpt-4-turbo-preview", + "gpt-3.5-turbo", + "gpt-4-0125-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-1106" + ], + "name": "model_name", + "display_name": "Model Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_key": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "" + }, + "stream": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "stream", + "display_name": "Stream", + "advanced": true, + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "load_from_db": false, + "title_case": false + }, + "system_message": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "system_message", + "display_name": "System Message", + "advanced": true, + "dynamic": false, + "info": "System message to pass to the model.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "temperature": { + "type": "float", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 0.1, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "temperature", + "display_name": "Temperature", + "advanced": false, + "dynamic": false, + "info": "", + "rangeSpec": { + "step_type": "float", + "min": -1, + "max": 1, + "step": 0.1 + }, + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent" + }, + "description": "Generates text using OpenAI LLMs.", + "icon": "OpenAI", + "base_classes": [ + "str", + "Text", + "object" + ], + "display_name": "OpenAI", + "documentation": "", + "custom_fields": { + "input_value": null, + "openai_api_key": null, + "temperature": null, + "model_name": null, + "max_tokens": null, + "model_kwargs": null, + "openai_api_base": null, + "stream": null, + "system_message": null + }, + "output_types": [ + "Text" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "max_tokens", + "model_kwargs", + "model_name", + "openai_api_base", + "openai_api_key", + "temperature", + "input_value", + "system_message", + "stream" + ], + "beta": false + }, + "id": "OpenAIModel-XawYB" + }, + "selected": false, + "width": 384, + "height": 565, + "positionAbsolute": { + "x": 4500.152018344182, + "y": 1027.7382026227656 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "TextInput-sptaH", + "sourceHandle": "{œbaseClassesœ:[œstrœ,œTextœ,œobjectœ],œdataTypeœ:œTextInputœ,œidœ:œTextInput-sptaHœ}", + "target": "Prompt-amqBu", + "targetHandle": "{œfieldNameœ:œdocumentœ,œidœ:œPrompt-amqBuœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "document", + "id": "Prompt-amqBu", + "inputTypes": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "str", + "Text", + "object" + ], + "dataType": "TextInput", + "id": "TextInput-sptaH" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-TextInput-sptaH{œbaseClassesœ:[œstrœ,œTextœ,œobjectœ],œdataTypeœ:œTextInputœ,œidœ:œTextInput-sptaHœ}-Prompt-amqBu{œfieldNameœ:œdocumentœ,œidœ:œPrompt-amqBuœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "Prompt-amqBu", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œstrœ,œTextœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-amqBuœ}", + "target": "TextOutput-2MS4a", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-2MS4aœ,œinputTypesœ:[œRecordœ,œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "TextOutput-2MS4a", + "inputTypes": [ + "Record", + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "object", + "str", + "Text" + ], + "dataType": "Prompt", + "id": "Prompt-amqBu" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-Prompt-amqBu{œbaseClassesœ:[œobjectœ,œstrœ,œTextœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-amqBuœ}-TextOutput-2MS4a{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-2MS4aœ,œinputTypesœ:[œRecordœ,œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "Prompt-amqBu", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œstrœ,œTextœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-amqBuœ}", + "target": "OpenAIModel-uYXZJ", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-uYXZJœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-uYXZJ", + "inputTypes": [ + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "object", + "str", + "Text" + ], + "dataType": "Prompt", + "id": "Prompt-amqBu" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-Prompt-amqBu{œbaseClassesœ:[œobjectœ,œstrœ,œTextœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-amqBuœ}-OpenAIModel-uYXZJ{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-uYXZJœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "OpenAIModel-uYXZJ", + "sourceHandle": "{œbaseClassesœ:[œstrœ,œTextœ,œobjectœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-uYXZJœ}", + "target": "Prompt-gTNiz", + "targetHandle": "{œfieldNameœ:œsummaryœ,œidœ:œPrompt-gTNizœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "summary", + "id": "Prompt-gTNiz", + "inputTypes": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "str", + "Text", + "object" + ], + "dataType": "OpenAIModel", + "id": "OpenAIModel-uYXZJ" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-OpenAIModel-uYXZJ{œbaseClassesœ:[œstrœ,œTextœ,œobjectœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-uYXZJœ}-Prompt-gTNiz{œfieldNameœ:œsummaryœ,œidœ:œPrompt-gTNizœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "OpenAIModel-uYXZJ", + "sourceHandle": "{œbaseClassesœ:[œstrœ,œTextœ,œobjectœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-uYXZJœ}", + "target": "ChatOutput-EJkG3", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-EJkG3œ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-EJkG3", + "inputTypes": [ + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "str", + "Text", + "object" + ], + "dataType": "OpenAIModel", + "id": "OpenAIModel-uYXZJ" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-OpenAIModel-uYXZJ{œbaseClassesœ:[œstrœ,œTextœ,œobjectœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-uYXZJœ}-ChatOutput-EJkG3{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-EJkG3œ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "Prompt-gTNiz", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œstrœ,œTextœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-gTNizœ}", + "target": "TextOutput-MUDOR", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-MUDORœ,œinputTypesœ:[œRecordœ,œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "TextOutput-MUDOR", + "inputTypes": [ + "Record", + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "object", + "str", + "Text" + ], + "dataType": "Prompt", + "id": "Prompt-gTNiz" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-Prompt-gTNiz{œbaseClassesœ:[œobjectœ,œstrœ,œTextœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-gTNizœ}-TextOutput-MUDOR{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-MUDORœ,œinputTypesœ:[œRecordœ,œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "Prompt-gTNiz", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œstrœ,œTextœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-gTNizœ}", + "target": "OpenAIModel-XawYB", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-XawYBœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-XawYB", + "inputTypes": [ + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "object", + "str", + "Text" + ], + "dataType": "Prompt", + "id": "Prompt-gTNiz" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-Prompt-gTNiz{œbaseClassesœ:[œobjectœ,œstrœ,œTextœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-gTNizœ}-OpenAIModel-XawYB{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-XawYBœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "OpenAIModel-XawYB", + "sourceHandle": "{œbaseClassesœ:[œstrœ,œTextœ,œobjectœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-XawYBœ}", + "target": "ChatOutput-DNmvg", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-DNmvgœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-DNmvg", + "inputTypes": [ + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "str", + "Text", + "object" + ], + "dataType": "OpenAIModel", + "id": "OpenAIModel-XawYB" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-OpenAIModel-XawYB{œbaseClassesœ:[œstrœ,œTextœ,œobjectœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-XawYBœ}-ChatOutput-DNmvg{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-DNmvgœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}" + } + ], + "viewport": { + "x": -383.7251879618552, + "y": 69.19813933800037, + "zoom": 0.3105753483695743 + } + }, + "description": "The Prompt Chaining flow chains prompts with LLMs, refining outputs through iterative stages.", + "name": "Prompt Chaining", + "last_tested_version": "1.0.0a0", + "is_component": false +} \ No newline at end of file diff --git a/src/backend/base/langflow/initial_setup/starter_projects/VectorStore-RAG-Flows.json b/src/backend/base/langflow/initial_setup/starter_projects/VectorStore-RAG-Flows.json new file mode 100644 index 000000000..5706a0fbf --- /dev/null +++ b/src/backend/base/langflow/initial_setup/starter_projects/VectorStore-RAG-Flows.json @@ -0,0 +1,3403 @@ +{ + "id": "51e2b78a-199b-4054-9f32-e288eef6924c", + "data": { + "nodes": [ + { + "id": "ChatInput-yxMKE", + "type": "genericNode", + "position": { + "x": 1195.5276981160775, + "y": 209.421875 + }, + "data": { + "type": "ChatInput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Interaction Panel.\"\n icon = \"ChatInput\"\n\n def build_config(self):\n build_config = super().build_config()\n build_config[\"input_value\"] = {\n \"input_types\": [],\n \"display_name\": \"Message\",\n \"multiline\": True,\n }\n\n return build_config\n\n def build(\n self,\n sender: Optional[str] = \"User\",\n sender_name: Optional[str] = \"User\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Message", + "advanced": false, + "input_types": [], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "value": "what is a line" + }, + "return_record": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_record", + "display_name": "Return Record", + "advanced": true, + "dynamic": false, + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "load_from_db": false, + "title_case": false + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "User", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Machine", + "User" + ], + "name": "sender", + "display_name": "Sender Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "User", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": true, + "dynamic": false, + "info": "If provided, the message will be stored in the memory.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Get chat inputs from the Interaction Panel.", + "icon": "ChatInput", + "base_classes": [ + "Text", + "str", + "object", + "Record" + ], + "display_name": "Chat Input", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "input_value": null, + "session_id": null, + "return_record": null + }, + "output_types": [ + "Text", + "Record" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "ChatInput-yxMKE" + }, + "selected": false, + "width": 384, + "height": 383 + }, + { + "id": "TextOutput-BDknO", + "type": "genericNode", + "position": { + "x": 2322.600672827879, + "y": 604.9467307442569 + }, + "data": { + "type": "TextOutput", + "node": { + "template": { + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Value", + "advanced": false, + "input_types": [ + "Record", + "Text" + ], + "dynamic": false, + "info": "Text or Record to be passed as output.", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langflow.base.io.text import TextComponent\nfrom langflow.field_typing import Text\n\n\nclass TextOutput(TextComponent):\n display_name = \"Text Output\"\n description = \"Display a text output in the Interaction Panel.\"\n icon = \"type\"\n\n def build_config(self):\n return {\n \"input_value\": {\n \"display_name\": \"Value\",\n \"input_types\": [\"Record\", \"Text\"],\n \"info\": \"Text or Record to be passed as output.\",\n },\n \"record_template\": {\n \"display_name\": \"Record Template\",\n \"multiline\": True,\n \"info\": \"Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.\",\n \"advanced\": True,\n },\n }\n\n def build(self, input_value: Optional[Text] = \"\", record_template: str = \"\") -> Text:\n return super().build(input_value=input_value, record_template=record_template)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "record_template": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "{text}", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "record_template", + "display_name": "Record Template", + "advanced": true, + "dynamic": false, + "info": "Template to convert Record to Text. If left empty, it will be dynamically set to the Record's text key.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Display a text output in the Interaction Panel.", + "icon": "type", + "base_classes": [ + "object", + "Text", + "str" + ], + "display_name": "Extracted Chunks", + "documentation": "", + "custom_fields": { + "input_value": null, + "record_template": null + }, + "output_types": [ + "Text" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "TextOutput-BDknO" + }, + "selected": false, + "width": 384, + "height": 289, + "positionAbsolute": { + "x": 2322.600672827879, + "y": 604.9467307442569 + }, + "dragging": false + }, + { + "id": "OpenAIEmbeddings-ZlOk1", + "type": "genericNode", + "position": { + "x": 1183.667250865064, + "y": 687.3171828430261 + }, + "data": { + "type": "OpenAIEmbeddings", + "node": { + "template": { + "allowed_special": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": [], + "fileTypes": [], + "file_path": "", + "password": false, + "name": "allowed_special", + "display_name": "Allowed Special", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "chunk_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 1000, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chunk_size", + "display_name": "Chunk Size", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "client": { + "type": "Any", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "client", + "display_name": "Client", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=openai_api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "default_headers": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "default_headers", + "display_name": "Default Headers", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "default_query": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "default_query", + "display_name": "Default Query", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "deployment": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "text-embedding-ada-002", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "deployment", + "display_name": "Deployment", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "disallowed_special": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": [ + "all" + ], + "fileTypes": [], + "file_path": "", + "password": false, + "name": "disallowed_special", + "display_name": "Disallowed Special", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "embedding_ctx_length": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 8191, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "embedding_ctx_length", + "display_name": "Embedding Context Length", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "max_retries": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 6, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_retries", + "display_name": "Max Retries", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "text-embedding-ada-002", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "text-embedding-3-small", + "text-embedding-3-large", + "text-embedding-ada-002" + ], + "name": "model", + "display_name": "Model", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "model_kwargs": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_key": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "" + }, + "openai_api_type": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_type", + "display_name": "OpenAI API Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_version": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_version", + "display_name": "OpenAI API Version", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_organization": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_organization", + "display_name": "OpenAI Organization", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_proxy": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_proxy", + "display_name": "OpenAI Proxy", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "request_timeout": { + "type": "float", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "request_timeout", + "display_name": "Request Timeout", + "advanced": true, + "dynamic": false, + "info": "", + "rangeSpec": { + "step_type": "float", + "min": -1, + "max": 1, + "step": 0.1 + }, + "load_from_db": false, + "title_case": false + }, + "show_progress_bar": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "show_progress_bar", + "display_name": "Show Progress Bar", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "skip_empty": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "skip_empty", + "display_name": "Skip Empty", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "tiktoken_enable": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "tiktoken_enable", + "display_name": "TikToken Enable", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "tiktoken_model_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "tiktoken_model_name", + "display_name": "TikToken Model Name", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Generate embeddings using OpenAI models.", + "base_classes": [ + "Embeddings" + ], + "display_name": "OpenAI Embeddings", + "documentation": "", + "custom_fields": { + "openai_api_key": null, + "default_headers": null, + "default_query": null, + "allowed_special": null, + "disallowed_special": null, + "chunk_size": null, + "client": null, + "deployment": null, + "embedding_ctx_length": null, + "max_retries": null, + "model": null, + "model_kwargs": null, + "openai_api_base": null, + "openai_api_type": null, + "openai_api_version": null, + "openai_organization": null, + "openai_proxy": null, + "request_timeout": null, + "show_progress_bar": null, + "skip_empty": null, + "tiktoken_enable": null, + "tiktoken_model_name": null + }, + "output_types": [ + "Embeddings" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "OpenAIEmbeddings-ZlOk1" + }, + "selected": false, + "width": 384, + "height": 383, + "dragging": false + }, + { + "id": "OpenAIModel-EjXlN", + "type": "genericNode", + "position": { + "x": 3410.117202077183, + "y": 431.2038048137648 + }, + "data": { + "type": "OpenAIModel", + "node": { + "template": { + "input_value": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Input", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langchain_openai import ChatOpenAI\n\nfrom langflow.base.constants import STREAM_INFO_TEXT\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import NestedDict, Text\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n\n field_order = [\n \"max_tokens\",\n \"model_kwargs\",\n \"model_name\",\n \"openai_api_base\",\n \"openai_api_key\",\n \"temperature\",\n \"input_value\",\n \"system_message\",\n \"stream\",\n ]\n\n def build_config(self):\n return {\n \"input_value\": {\"display_name\": \"Input\"},\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": True,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-3.5-turbo\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n \"value\": \"gpt-4-turbo-preview\",\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": True,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"info\": \"The OpenAI API Key to use for the OpenAI model.\",\n \"advanced\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"value\": 0.1,\n },\n \"stream\": {\n \"display_name\": \"Stream\",\n \"info\": STREAM_INFO_TEXT,\n \"advanced\": True,\n },\n \"system_message\": {\n \"display_name\": \"System Message\",\n \"info\": \"System message to pass to the model.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n input_value: Text,\n openai_api_key: str,\n temperature: float,\n model_name: str,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n stream: bool = False,\n system_message: Optional[str] = None,\n ) -> Text:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n output = ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=openai_api_key,\n temperature=temperature,\n )\n\n return self.get_chat_result(output, stream, input_value, system_message)\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "max_tokens": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 256, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_tokens", + "display_name": "Max Tokens", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model_kwargs": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model_name": { + "type": "str", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "gpt-3.5-turbo", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "gpt-4-turbo-preview", + "gpt-3.5-turbo", + "gpt-4-0125-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-1106" + ], + "name": "model_name", + "display_name": "Model Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_key": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "The OpenAI API Key to use for the OpenAI model.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "" + }, + "stream": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "stream", + "display_name": "Stream", + "advanced": true, + "dynamic": false, + "info": "Stream the response from the model. Streaming works only in Chat.", + "load_from_db": false, + "title_case": false + }, + "system_message": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "system_message", + "display_name": "System Message", + "advanced": true, + "dynamic": false, + "info": "System message to pass to the model.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "temperature": { + "type": "float", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 0.1, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "temperature", + "display_name": "Temperature", + "advanced": false, + "dynamic": false, + "info": "", + "rangeSpec": { + "step_type": "float", + "min": -1, + "max": 1, + "step": 0.1 + }, + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent" + }, + "description": "Generates text using OpenAI LLMs.", + "icon": "OpenAI", + "base_classes": [ + "object", + "Text", + "str" + ], + "display_name": "OpenAI", + "documentation": "", + "custom_fields": { + "input_value": null, + "openai_api_key": null, + "temperature": null, + "model_name": null, + "max_tokens": null, + "model_kwargs": null, + "openai_api_base": null, + "stream": null, + "system_message": null + }, + "output_types": [ + "Text" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "max_tokens", + "model_kwargs", + "model_name", + "openai_api_base", + "openai_api_key", + "temperature", + "input_value", + "system_message", + "stream" + ], + "beta": false + }, + "id": "OpenAIModel-EjXlN" + }, + "selected": true, + "width": 384, + "height": 563, + "positionAbsolute": { + "x": 3410.117202077183, + "y": 431.2038048137648 + }, + "dragging": false + }, + { + "id": "Prompt-xeI6K", + "type": "genericNode", + "position": { + "x": 2969.0261961391298, + "y": 442.1613649809069 + }, + "data": { + "type": "Prompt", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from langchain_core.prompts import PromptTemplate\n\nfrom langflow.field_typing import Prompt, TemplateField, Text\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass PromptComponent(CustomComponent):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n\n def build_config(self):\n return {\n \"template\": TemplateField(display_name=\"Template\"),\n \"code\": TemplateField(advanced=True),\n }\n\n def build(\n self,\n template: Prompt,\n **kwargs,\n ) -> Text:\n from langflow.base.prompts.utils import dict_values_to_string\n\n prompt_template = PromptTemplate.from_template(Text(template))\n kwargs = dict_values_to_string(kwargs)\n kwargs = {k: \"\\n\".join(v) if isinstance(v, list) else v for k, v in kwargs.items()}\n try:\n formated_prompt = prompt_template.format(**kwargs)\n except Exception as exc:\n raise ValueError(f\"Error formatting prompt: {exc}\") from exc\n self.status = f'Prompt:\\n\"{formated_prompt}\"'\n return formated_prompt\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "template": { + "type": "prompt", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "{context}\n\n---\n\nGiven the context above, answer the question as best as possible.\n\nQuestion: {question}\n\nAnswer: ", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "template", + "display_name": "Template", + "advanced": false, + "input_types": [ + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent", + "context": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "context", + "display_name": "context", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "type": "str" + }, + "question": { + "field_type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "question", + "display_name": "question", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "type": "str" + } + }, + "description": "Create a prompt template with dynamic variables.", + "icon": "prompts", + "is_input": null, + "is_output": null, + "is_composition": null, + "base_classes": [ + "object", + "Text", + "str" + ], + "name": "", + "display_name": "Prompt", + "documentation": "", + "custom_fields": { + "template": [ + "context", + "question" + ] + }, + "output_types": [ + "Text" + ], + "full_path": null, + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false, + "error": null + }, + "id": "Prompt-xeI6K", + "description": "Create a prompt template with dynamic variables.", + "display_name": "Prompt" + }, + "selected": false, + "width": 384, + "height": 477, + "positionAbsolute": { + "x": 2969.0261961391298, + "y": 442.1613649809069 + }, + "dragging": false + }, + { + "id": "ChatOutput-Q39I8", + "type": "genericNode", + "position": { + "x": 3887.2073667611485, + "y": 588.4801225794856 + }, + "data": { + "type": "ChatOutput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Interaction Panel.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n record_template: Optional[str] = \"{text}\",\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n record_template=record_template,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Message", + "advanced": false, + "input_types": [ + "Text" + ], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "record_template": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "{text}", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "record_template", + "display_name": "Record Template", + "advanced": true, + "dynamic": false, + "info": "In case of Message being a Record, this template will be used to convert it to text.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "return_record": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_record", + "display_name": "Return Record", + "advanced": true, + "dynamic": false, + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "load_from_db": false, + "title_case": false + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Machine", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Machine", + "User" + ], + "name": "sender", + "display_name": "Sender Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "AI", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": true, + "dynamic": false, + "info": "If provided, the message will be stored in the memory.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Display a chat message in the Interaction Panel.", + "icon": "ChatOutput", + "base_classes": [ + "object", + "Text", + "Record", + "str" + ], + "display_name": "Chat Output", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "input_value": null, + "session_id": null, + "return_record": null, + "record_template": null + }, + "output_types": [ + "Text", + "Record" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "ChatOutput-Q39I8" + }, + "selected": false, + "width": 384, + "height": 383, + "positionAbsolute": { + "x": 3887.2073667611485, + "y": 588.4801225794856 + }, + "dragging": false + }, + { + "id": "File-t0a6a", + "type": "genericNode", + "position": { + "x": 2257.233450682836, + "y": 1747.5389618367233 + }, + "data": { + "type": "File", + "node": { + "template": { + "path": { + "type": "file", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [ + ".txt", + ".md", + ".mdx", + ".csv", + ".json", + ".yaml", + ".yml", + ".xml", + ".html", + ".htm", + ".pdf", + ".docx" + ], + "file_path": "51e2b78a-199b-4054-9f32-e288eef6924c/Langflow conversation.pdf", + "password": false, + "name": "path", + "display_name": "Path", + "advanced": false, + "dynamic": false, + "info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx", + "load_from_db": false, + "title_case": false, + "value": "" + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from pathlib import Path\nfrom typing import Any, Dict\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_record\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\n\n\nclass FileComponent(CustomComponent):\n display_name = \"File\"\n description = \"A generic file loader.\"\n icon = \"file-text\"\n\n def build_config(self) -> Dict[str, Any]:\n return {\n \"path\": {\n \"display_name\": \"Path\",\n \"field_type\": \"file\",\n \"file_types\": TEXT_FILE_TYPES,\n \"info\": f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n },\n \"silent_errors\": {\n \"display_name\": \"Silent Errors\",\n \"advanced\": True,\n \"info\": \"If true, errors will not raise an exception.\",\n },\n }\n\n def load_file(self, path: str, silent_errors: bool = False) -> Record:\n resolved_path = self.resolve_path(path)\n path_obj = Path(resolved_path)\n extension = path_obj.suffix[1:].lower()\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n record = parse_text_file_to_record(resolved_path, silent_errors)\n self.status = record if record else \"No data\"\n return record or Record()\n\n def build(\n self,\n path: str,\n silent_errors: bool = False,\n ) -> Record:\n record = self.load_file(path, silent_errors)\n self.status = record\n return record\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "silent_errors": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "silent_errors", + "display_name": "Silent Errors", + "advanced": true, + "dynamic": false, + "info": "If true, errors will not raise an exception.", + "load_from_db": false, + "title_case": false + }, + "_type": "CustomComponent" + }, + "description": "A generic file loader.", + "icon": "file-text", + "base_classes": [ + "Record" + ], + "display_name": "File", + "documentation": "", + "custom_fields": { + "path": null, + "silent_errors": null + }, + "output_types": [ + "Record" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "File-t0a6a" + }, + "selected": false, + "width": 384, + "height": 281, + "positionAbsolute": { + "x": 2257.233450682836, + "y": 1747.5389618367233 + }, + "dragging": false + }, + { + "id": "RecursiveCharacterTextSplitter-tR9QM", + "type": "genericNode", + "position": { + "x": 2791.013514133929, + "y": 1462.9588953494142 + }, + "data": { + "type": "RecursiveCharacterTextSplitter", + "node": { + "template": { + "inputs": { + "type": "Document", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "inputs", + "display_name": "Input", + "advanced": false, + "input_types": [ + "Document", + "Record" + ], + "dynamic": false, + "info": "The texts to split.", + "load_from_db": false, + "title_case": false + }, + "chunk_overlap": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 200, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chunk_overlap", + "display_name": "Chunk Overlap", + "advanced": false, + "dynamic": false, + "info": "The amount of overlap between chunks.", + "load_from_db": false, + "title_case": false + }, + "chunk_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 1000, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chunk_size", + "display_name": "Chunk Size", + "advanced": false, + "dynamic": false, + "info": "The maximum length of each chunk.", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional\n\nfrom langchain.text_splitter import RecursiveCharacterTextSplitter\nfrom langchain_core.documents import Document\n\nfrom langflow.interface.custom.custom_component import CustomComponent\nfrom langflow.schema import Record\nfrom langflow.utils.util import build_loader_repr_from_records, unescape_string\n\n\nclass RecursiveCharacterTextSplitterComponent(CustomComponent):\n display_name: str = \"Recursive Character Text Splitter\"\n description: str = \"Split text into chunks of a specified length.\"\n documentation: str = \"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\"\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Input\",\n \"info\": \"The texts to split.\",\n \"input_types\": [\"Document\", \"Record\"],\n },\n \"separators\": {\n \"display_name\": \"Separators\",\n \"info\": 'The characters to split on.\\nIf left empty defaults to [\"\\\\n\\\\n\", \"\\\\n\", \" \", \"\"].',\n \"is_list\": True,\n },\n \"chunk_size\": {\n \"display_name\": \"Chunk Size\",\n \"info\": \"The maximum length of each chunk.\",\n \"field_type\": \"int\",\n \"value\": 1000,\n },\n \"chunk_overlap\": {\n \"display_name\": \"Chunk Overlap\",\n \"info\": \"The amount of overlap between chunks.\",\n \"field_type\": \"int\",\n \"value\": 200,\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n inputs: list[Document],\n separators: Optional[list[str]] = None,\n chunk_size: Optional[int] = 1000,\n chunk_overlap: Optional[int] = 200,\n ) -> list[Record]:\n \"\"\"\n Split text into chunks of a specified length.\n\n Args:\n separators (list[str]): The characters to split on.\n chunk_size (int): The maximum length of each chunk.\n chunk_overlap (int): The amount of overlap between chunks.\n length_function (function): The function to use to calculate the length of the text.\n\n Returns:\n list[str]: The chunks of text.\n \"\"\"\n\n if separators == \"\":\n separators = None\n elif separators:\n # check if the separators list has escaped characters\n # if there are escaped characters, unescape them\n separators = [unescape_string(x) for x in separators]\n\n # Make sure chunk_size and chunk_overlap are ints\n if isinstance(chunk_size, str):\n chunk_size = int(chunk_size)\n if isinstance(chunk_overlap, str):\n chunk_overlap = int(chunk_overlap)\n splitter = RecursiveCharacterTextSplitter(\n separators=separators,\n chunk_size=chunk_size,\n chunk_overlap=chunk_overlap,\n )\n documents = []\n for _input in inputs:\n if isinstance(_input, Record):\n documents.append(_input.to_lc_document())\n else:\n documents.append(_input)\n docs = splitter.split_documents(documents)\n records = self.to_records(docs)\n self.repr_value = build_loader_repr_from_records(records)\n return records\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "separators": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "separators", + "display_name": "Separators", + "advanced": false, + "dynamic": false, + "info": "The characters to split on.\nIf left empty defaults to [\"\\n\\n\", \"\\n\", \" \", \"\"].", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": [ + "" + ] + }, + "_type": "CustomComponent" + }, + "description": "Split text into chunks of a specified length.", + "base_classes": [ + "Record" + ], + "display_name": "Recursive Character Text Splitter", + "documentation": "https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter", + "custom_fields": { + "inputs": null, + "separators": null, + "chunk_size": null, + "chunk_overlap": null + }, + "output_types": [ + "Record" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "RecursiveCharacterTextSplitter-tR9QM" + }, + "selected": false, + "width": 384, + "height": 501, + "positionAbsolute": { + "x": 2791.013514133929, + "y": 1462.9588953494142 + }, + "dragging": false + }, + { + "id": "AstraDBSearch-41nRz", + "type": "genericNode", + "position": { + "x": 1723.976434815103, + "y": 277.03317407245913 + }, + "data": { + "type": "AstraDBSearch", + "node": { + "template": { + "embedding": { + "type": "Embeddings", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "embedding", + "display_name": "Embedding", + "advanced": false, + "dynamic": false, + "info": "Embedding to use", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Input Value", + "advanced": false, + "dynamic": false, + "info": "Input value to search", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "api_endpoint": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "api_endpoint", + "display_name": "API Endpoint", + "advanced": false, + "dynamic": false, + "info": "API endpoint URL for the Astra DB service.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "" + }, + "batch_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "batch_size", + "display_name": "Batch Size", + "advanced": true, + "dynamic": false, + "info": "Optional number of records to process in a single batch.", + "load_from_db": false, + "title_case": false + }, + "bulk_delete_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_delete_concurrency", + "display_name": "Bulk Delete Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk delete operations.", + "load_from_db": false, + "title_case": false + }, + "bulk_insert_batch_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_insert_batch_concurrency", + "display_name": "Bulk Insert Batch Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk insert operations.", + "load_from_db": false, + "title_case": false + }, + "bulk_insert_overwrite_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_insert_overwrite_concurrency", + "display_name": "Bulk Insert Overwrite Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk insert operations that overwrite existing records.", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import List, Optional\n\nfrom langflow.components.vectorstores.AstraDB import AstraDBVectorStoreComponent\nfrom langflow.components.vectorstores.base.model import LCVectorStoreComponent\nfrom langflow.field_typing import Embeddings, Text\nfrom langflow.schema import Record\n\n\nclass AstraDBSearchComponent(LCVectorStoreComponent):\n display_name = \"Astra DB Search\"\n description = \"Searches an existing Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"input_value\", \"embedding\"]\n\n def build_config(self):\n return {\n \"search_type\": {\n \"display_name\": \"Search Type\",\n \"options\": [\"Similarity\", \"MMR\"],\n },\n \"input_value\": {\n \"display_name\": \"Input Value\",\n \"info\": \"Input value to search\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n \"number_of_results\": {\n \"display_name\": \"Number of Results\",\n \"info\": \"Number of results to return.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n collection_name: str,\n input_value: Text,\n token: str,\n api_endpoint: str,\n search_type: str = \"Similarity\",\n number_of_results: int = 4,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Sync\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> List[Record]:\n vector_store = AstraDBVectorStoreComponent().build(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n try:\n return self.search_with_vector_store(input_value, search_type, vector_store, k=number_of_results)\n except KeyError as e:\n if \"content\" in str(e):\n raise ValueError(\n \"You should ingest data through Langflow (or LangChain) to query it in Langflow. Your collection does not contain a field name 'content'.\"\n )\n else:\n raise e\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "collection_indexing_policy": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "collection_indexing_policy", + "display_name": "Collection Indexing Policy", + "advanced": true, + "dynamic": false, + "info": "Optional dictionary defining the indexing policy for the collection.", + "load_from_db": false, + "title_case": false + }, + "collection_name": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "collection_name", + "display_name": "Collection Name", + "advanced": false, + "dynamic": false, + "info": "The name of the collection within Astra DB where the vectors will be stored.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "langflow" + }, + "metadata_indexing_exclude": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metadata_indexing_exclude", + "display_name": "Metadata Indexing Exclude", + "advanced": true, + "dynamic": false, + "info": "Optional list of metadata fields to exclude from the indexing.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "metadata_indexing_include": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metadata_indexing_include", + "display_name": "Metadata Indexing Include", + "advanced": true, + "dynamic": false, + "info": "Optional list of metadata fields to include in the indexing.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "metric": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metric", + "display_name": "Metric", + "advanced": true, + "dynamic": false, + "info": "Optional distance metric for vector comparisons in the vector store.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "namespace": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "namespace", + "display_name": "Namespace", + "advanced": true, + "dynamic": false, + "info": "Optional namespace within Astra DB to use for the collection.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "number_of_results": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 4, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "number_of_results", + "display_name": "Number of Results", + "advanced": true, + "dynamic": false, + "info": "Number of results to return.", + "load_from_db": false, + "title_case": false + }, + "pre_delete_collection": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "pre_delete_collection", + "display_name": "Pre Delete Collection", + "advanced": true, + "dynamic": false, + "info": "Boolean flag to determine whether to delete the collection before creating a new one.", + "load_from_db": false, + "title_case": false + }, + "search_type": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Similarity", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Similarity", + "MMR" + ], + "name": "search_type", + "display_name": "Search Type", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "setup_mode": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Sync", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Sync", + "Async", + "Off" + ], + "name": "setup_mode", + "display_name": "Setup Mode", + "advanced": true, + "dynamic": false, + "info": "Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "token": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "token", + "display_name": "Token", + "advanced": false, + "dynamic": false, + "info": "Authentication token for accessing Astra DB.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "" + }, + "_type": "CustomComponent" + }, + "description": "Searches an existing Astra DB Vector Store.", + "icon": "AstraDB", + "base_classes": [ + "Record" + ], + "display_name": "Astra DB Search", + "documentation": "", + "custom_fields": { + "embedding": null, + "collection_name": null, + "input_value": null, + "token": null, + "api_endpoint": null, + "search_type": null, + "number_of_results": null, + "namespace": null, + "metric": null, + "batch_size": null, + "bulk_insert_batch_concurrency": null, + "bulk_insert_overwrite_concurrency": null, + "bulk_delete_concurrency": null, + "setup_mode": null, + "pre_delete_collection": null, + "metadata_indexing_include": null, + "metadata_indexing_exclude": null, + "collection_indexing_policy": null + }, + "output_types": [ + "Record" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "token", + "api_endpoint", + "collection_name", + "input_value", + "embedding" + ], + "beta": false + }, + "id": "AstraDBSearch-41nRz" + }, + "selected": false, + "width": 384, + "height": 713, + "dragging": false, + "positionAbsolute": { + "x": 1723.976434815103, + "y": 277.03317407245913 + } + }, + { + "id": "AstraDB-eUCSS", + "type": "genericNode", + "position": { + "x": 3372.04958055989, + "y": 1611.0742035495277 + }, + "data": { + "type": "AstraDB", + "node": { + "template": { + "embedding": { + "type": "Embeddings", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "embedding", + "display_name": "Embedding", + "advanced": false, + "dynamic": false, + "info": "Embedding to use", + "load_from_db": false, + "title_case": false + }, + "inputs": { + "type": "Record", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "inputs", + "display_name": "Inputs", + "advanced": false, + "dynamic": false, + "info": "Optional list of records to be processed and stored in the vector store.", + "load_from_db": false, + "title_case": false + }, + "api_endpoint": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "api_endpoint", + "display_name": "API Endpoint", + "advanced": false, + "dynamic": false, + "info": "API endpoint URL for the Astra DB service.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "" + }, + "batch_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "batch_size", + "display_name": "Batch Size", + "advanced": true, + "dynamic": false, + "info": "Optional number of records to process in a single batch.", + "load_from_db": false, + "title_case": false + }, + "bulk_delete_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_delete_concurrency", + "display_name": "Bulk Delete Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk delete operations.", + "load_from_db": false, + "title_case": false + }, + "bulk_insert_batch_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_insert_batch_concurrency", + "display_name": "Bulk Insert Batch Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk insert operations.", + "load_from_db": false, + "title_case": false + }, + "bulk_insert_overwrite_concurrency": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "bulk_insert_overwrite_concurrency", + "display_name": "Bulk Insert Overwrite Concurrency", + "advanced": true, + "dynamic": false, + "info": "Optional concurrency level for bulk insert operations that overwrite existing records.", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import List, Optional\n\nfrom langchain_astradb import AstraDBVectorStore\nfrom langchain_astradb.utils.astradb import SetupMode\n\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import Embeddings, VectorStore\nfrom langflow.schema import Record\n\n\nclass AstraDBVectorStoreComponent(CustomComponent):\n display_name = \"Astra DB\"\n description = \"Builds or loads an Astra DB Vector Store.\"\n icon = \"AstraDB\"\n field_order = [\"token\", \"api_endpoint\", \"collection_name\", \"inputs\", \"embedding\"]\n\n def build_config(self):\n return {\n \"inputs\": {\n \"display_name\": \"Inputs\",\n \"info\": \"Optional list of records to be processed and stored in the vector store.\",\n },\n \"embedding\": {\"display_name\": \"Embedding\", \"info\": \"Embedding to use\"},\n \"collection_name\": {\n \"display_name\": \"Collection Name\",\n \"info\": \"The name of the collection within Astra DB where the vectors will be stored.\",\n },\n \"token\": {\n \"display_name\": \"Token\",\n \"info\": \"Authentication token for accessing Astra DB.\",\n \"password\": True,\n },\n \"api_endpoint\": {\n \"display_name\": \"API Endpoint\",\n \"info\": \"API endpoint URL for the Astra DB service.\",\n },\n \"namespace\": {\n \"display_name\": \"Namespace\",\n \"info\": \"Optional namespace within Astra DB to use for the collection.\",\n \"advanced\": True,\n },\n \"metric\": {\n \"display_name\": \"Metric\",\n \"info\": \"Optional distance metric for vector comparisons in the vector store.\",\n \"advanced\": True,\n },\n \"batch_size\": {\n \"display_name\": \"Batch Size\",\n \"info\": \"Optional number of records to process in a single batch.\",\n \"advanced\": True,\n },\n \"bulk_insert_batch_concurrency\": {\n \"display_name\": \"Bulk Insert Batch Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations.\",\n \"advanced\": True,\n },\n \"bulk_insert_overwrite_concurrency\": {\n \"display_name\": \"Bulk Insert Overwrite Concurrency\",\n \"info\": \"Optional concurrency level for bulk insert operations that overwrite existing records.\",\n \"advanced\": True,\n },\n \"bulk_delete_concurrency\": {\n \"display_name\": \"Bulk Delete Concurrency\",\n \"info\": \"Optional concurrency level for bulk delete operations.\",\n \"advanced\": True,\n },\n \"setup_mode\": {\n \"display_name\": \"Setup Mode\",\n \"info\": \"Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.\",\n \"options\": [\"Sync\", \"Async\", \"Off\"],\n \"advanced\": True,\n },\n \"pre_delete_collection\": {\n \"display_name\": \"Pre Delete Collection\",\n \"info\": \"Boolean flag to determine whether to delete the collection before creating a new one.\",\n \"advanced\": True,\n },\n \"metadata_indexing_include\": {\n \"display_name\": \"Metadata Indexing Include\",\n \"info\": \"Optional list of metadata fields to include in the indexing.\",\n \"advanced\": True,\n },\n \"metadata_indexing_exclude\": {\n \"display_name\": \"Metadata Indexing Exclude\",\n \"info\": \"Optional list of metadata fields to exclude from the indexing.\",\n \"advanced\": True,\n },\n \"collection_indexing_policy\": {\n \"display_name\": \"Collection Indexing Policy\",\n \"info\": \"Optional dictionary defining the indexing policy for the collection.\",\n \"advanced\": True,\n },\n }\n\n def build(\n self,\n embedding: Embeddings,\n token: str,\n api_endpoint: str,\n collection_name: str,\n inputs: Optional[List[Record]] = None,\n namespace: Optional[str] = None,\n metric: Optional[str] = None,\n batch_size: Optional[int] = None,\n bulk_insert_batch_concurrency: Optional[int] = None,\n bulk_insert_overwrite_concurrency: Optional[int] = None,\n bulk_delete_concurrency: Optional[int] = None,\n setup_mode: str = \"Async\",\n pre_delete_collection: bool = False,\n metadata_indexing_include: Optional[List[str]] = None,\n metadata_indexing_exclude: Optional[List[str]] = None,\n collection_indexing_policy: Optional[dict] = None,\n ) -> VectorStore:\n try:\n setup_mode_value = SetupMode[setup_mode.upper()]\n except KeyError:\n raise ValueError(f\"Invalid setup mode: {setup_mode}\")\n if inputs:\n documents = [_input.to_lc_document() for _input in inputs]\n\n vector_store = AstraDBVectorStore.from_documents(\n documents=documents,\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n else:\n vector_store = AstraDBVectorStore(\n embedding=embedding,\n collection_name=collection_name,\n token=token,\n api_endpoint=api_endpoint,\n namespace=namespace,\n metric=metric,\n batch_size=batch_size,\n bulk_insert_batch_concurrency=bulk_insert_batch_concurrency,\n bulk_insert_overwrite_concurrency=bulk_insert_overwrite_concurrency,\n bulk_delete_concurrency=bulk_delete_concurrency,\n setup_mode=setup_mode_value,\n pre_delete_collection=pre_delete_collection,\n metadata_indexing_include=metadata_indexing_include,\n metadata_indexing_exclude=metadata_indexing_exclude,\n collection_indexing_policy=collection_indexing_policy,\n )\n\n return vector_store\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "collection_indexing_policy": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "collection_indexing_policy", + "display_name": "Collection Indexing Policy", + "advanced": true, + "dynamic": false, + "info": "Optional dictionary defining the indexing policy for the collection.", + "load_from_db": false, + "title_case": false + }, + "collection_name": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "collection_name", + "display_name": "Collection Name", + "advanced": false, + "dynamic": false, + "info": "The name of the collection within Astra DB where the vectors will be stored.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "langflow" + }, + "metadata_indexing_exclude": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metadata_indexing_exclude", + "display_name": "Metadata Indexing Exclude", + "advanced": true, + "dynamic": false, + "info": "Optional list of metadata fields to exclude from the indexing.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "metadata_indexing_include": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metadata_indexing_include", + "display_name": "Metadata Indexing Include", + "advanced": true, + "dynamic": false, + "info": "Optional list of metadata fields to include in the indexing.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "metric": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "metric", + "display_name": "Metric", + "advanced": true, + "dynamic": false, + "info": "Optional distance metric for vector comparisons in the vector store.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "namespace": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "namespace", + "display_name": "Namespace", + "advanced": true, + "dynamic": false, + "info": "Optional namespace within Astra DB to use for the collection.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "pre_delete_collection": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "pre_delete_collection", + "display_name": "Pre Delete Collection", + "advanced": true, + "dynamic": false, + "info": "Boolean flag to determine whether to delete the collection before creating a new one.", + "load_from_db": false, + "title_case": false + }, + "setup_mode": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Async", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "Sync", + "Async", + "Off" + ], + "name": "setup_mode", + "display_name": "Setup Mode", + "advanced": true, + "dynamic": false, + "info": "Configuration mode for setting up the vector store, with options like “Sync”, “Async”, or “Off”.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "token": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "token", + "display_name": "Token", + "advanced": false, + "dynamic": false, + "info": "Authentication token for accessing Astra DB.", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "" + }, + "_type": "CustomComponent" + }, + "description": "Builds or loads an Astra DB Vector Store.", + "icon": "AstraDB", + "base_classes": [ + "VectorStore" + ], + "display_name": "Astra DB", + "documentation": "", + "custom_fields": { + "embedding": null, + "token": null, + "api_endpoint": null, + "collection_name": null, + "inputs": null, + "namespace": null, + "metric": null, + "batch_size": null, + "bulk_insert_batch_concurrency": null, + "bulk_insert_overwrite_concurrency": null, + "bulk_delete_concurrency": null, + "setup_mode": null, + "pre_delete_collection": null, + "metadata_indexing_include": null, + "metadata_indexing_exclude": null, + "collection_indexing_policy": null + }, + "output_types": [ + "VectorStore" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [ + "token", + "api_endpoint", + "collection_name", + "inputs", + "embedding" + ], + "beta": false + }, + "id": "AstraDB-eUCSS" + }, + "selected": false, + "width": 384, + "height": 573, + "positionAbsolute": { + "x": 3372.04958055989, + "y": 1611.0742035495277 + }, + "dragging": false + }, + { + "id": "OpenAIEmbeddings-9TPjc", + "type": "genericNode", + "position": { + "x": 2814.0402191223047, + "y": 1955.9268168273086 + }, + "data": { + "type": "OpenAIEmbeddings", + "node": { + "template": { + "allowed_special": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": [], + "fileTypes": [], + "file_path": "", + "password": false, + "name": "allowed_special", + "display_name": "Allowed Special", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "chunk_size": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 1000, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "chunk_size", + "display_name": "Chunk Size", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "client": { + "type": "Any", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "client", + "display_name": "Client", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Any, Dict, List, Optional\n\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\n\nfrom langflow.field_typing import Embeddings, NestedDict\nfrom langflow.interface.custom.custom_component import CustomComponent\n\n\nclass OpenAIEmbeddingsComponent(CustomComponent):\n display_name = \"OpenAI Embeddings\"\n description = \"Generate embeddings using OpenAI models.\"\n\n def build_config(self):\n return {\n \"allowed_special\": {\n \"display_name\": \"Allowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"default_headers\": {\n \"display_name\": \"Default Headers\",\n \"advanced\": True,\n \"field_type\": \"dict\",\n },\n \"default_query\": {\n \"display_name\": \"Default Query\",\n \"advanced\": True,\n \"field_type\": \"NestedDict\",\n },\n \"disallowed_special\": {\n \"display_name\": \"Disallowed Special\",\n \"advanced\": True,\n \"field_type\": \"str\",\n \"is_list\": True,\n },\n \"chunk_size\": {\"display_name\": \"Chunk Size\", \"advanced\": True},\n \"client\": {\"display_name\": \"Client\", \"advanced\": True},\n \"deployment\": {\"display_name\": \"Deployment\", \"advanced\": True},\n \"embedding_ctx_length\": {\n \"display_name\": \"Embedding Context Length\",\n \"advanced\": True,\n },\n \"max_retries\": {\"display_name\": \"Max Retries\", \"advanced\": True},\n \"model\": {\n \"display_name\": \"Model\",\n \"advanced\": False,\n \"options\": [\n \"text-embedding-3-small\",\n \"text-embedding-3-large\",\n \"text-embedding-ada-002\",\n ],\n },\n \"model_kwargs\": {\"display_name\": \"Model Kwargs\", \"advanced\": True},\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"password\": True,\n \"advanced\": True,\n },\n \"openai_api_key\": {\"display_name\": \"OpenAI API Key\", \"password\": True},\n \"openai_api_type\": {\n \"display_name\": \"OpenAI API Type\",\n \"advanced\": True,\n \"password\": True,\n },\n \"openai_api_version\": {\n \"display_name\": \"OpenAI API Version\",\n \"advanced\": True,\n },\n \"openai_organization\": {\n \"display_name\": \"OpenAI Organization\",\n \"advanced\": True,\n },\n \"openai_proxy\": {\"display_name\": \"OpenAI Proxy\", \"advanced\": True},\n \"request_timeout\": {\"display_name\": \"Request Timeout\", \"advanced\": True},\n \"show_progress_bar\": {\n \"display_name\": \"Show Progress Bar\",\n \"advanced\": True,\n },\n \"skip_empty\": {\"display_name\": \"Skip Empty\", \"advanced\": True},\n \"tiktoken_model_name\": {\n \"display_name\": \"TikToken Model Name\",\n \"advanced\": True,\n },\n \"tiktoken_enable\": {\"display_name\": \"TikToken Enable\", \"advanced\": True},\n }\n\n def build(\n self,\n openai_api_key: str,\n default_headers: Optional[Dict[str, str]] = None,\n default_query: Optional[NestedDict] = {},\n allowed_special: List[str] = [],\n disallowed_special: List[str] = [\"all\"],\n chunk_size: int = 1000,\n client: Optional[Any] = None,\n deployment: str = \"text-embedding-ada-002\",\n embedding_ctx_length: int = 8191,\n max_retries: int = 6,\n model: str = \"text-embedding-ada-002\",\n model_kwargs: NestedDict = {},\n openai_api_base: Optional[str] = None,\n openai_api_type: Optional[str] = None,\n openai_api_version: Optional[str] = None,\n openai_organization: Optional[str] = None,\n openai_proxy: Optional[str] = None,\n request_timeout: Optional[float] = None,\n show_progress_bar: bool = False,\n skip_empty: bool = False,\n tiktoken_enable: bool = True,\n tiktoken_model_name: Optional[str] = None,\n ) -> Embeddings:\n # This is to avoid errors with Vector Stores (e.g Chroma)\n if disallowed_special == [\"all\"]:\n disallowed_special = \"all\" # type: ignore\n\n return OpenAIEmbeddings(\n tiktoken_enabled=tiktoken_enable,\n default_headers=default_headers,\n default_query=default_query,\n allowed_special=set(allowed_special),\n disallowed_special=\"all\",\n chunk_size=chunk_size,\n client=client,\n deployment=deployment,\n embedding_ctx_length=embedding_ctx_length,\n max_retries=max_retries,\n model=model,\n model_kwargs=model_kwargs,\n base_url=openai_api_base,\n api_key=openai_api_key,\n openai_api_type=openai_api_type,\n api_version=openai_api_version,\n organization=openai_organization,\n openai_proxy=openai_proxy,\n timeout=request_timeout,\n show_progress_bar=show_progress_bar,\n skip_empty=skip_empty,\n tiktoken_model_name=tiktoken_model_name,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "default_headers": { + "type": "dict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "default_headers", + "display_name": "Default Headers", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "default_query": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "default_query", + "display_name": "Default Query", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "deployment": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "text-embedding-ada-002", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "deployment", + "display_name": "Deployment", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "disallowed_special": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": [ + "all" + ], + "fileTypes": [], + "file_path": "", + "password": false, + "name": "disallowed_special", + "display_name": "Disallowed Special", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "embedding_ctx_length": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 8191, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "embedding_ctx_length", + "display_name": "Embedding Context Length", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "max_retries": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 6, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_retries", + "display_name": "Max Retries", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "model": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "text-embedding-ada-002", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "text-embedding-3-small", + "text-embedding-3-large", + "text-embedding-ada-002" + ], + "name": "model", + "display_name": "Model", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "model_kwargs": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_key": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ], + "value": "" + }, + "openai_api_type": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_type", + "display_name": "OpenAI API Type", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_api_version": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_version", + "display_name": "OpenAI API Version", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_organization": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_organization", + "display_name": "OpenAI Organization", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "openai_proxy": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_proxy", + "display_name": "OpenAI Proxy", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "request_timeout": { + "type": "float", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "request_timeout", + "display_name": "Request Timeout", + "advanced": true, + "dynamic": false, + "info": "", + "rangeSpec": { + "step_type": "float", + "min": -1, + "max": 1, + "step": 0.1 + }, + "load_from_db": false, + "title_case": false + }, + "show_progress_bar": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "show_progress_bar", + "display_name": "Show Progress Bar", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "skip_empty": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "skip_empty", + "display_name": "Skip Empty", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "tiktoken_enable": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "tiktoken_enable", + "display_name": "TikToken Enable", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "tiktoken_model_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "tiktoken_model_name", + "display_name": "TikToken Model Name", + "advanced": true, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": [ + "Text" + ] + }, + "_type": "CustomComponent" + }, + "description": "Generate embeddings using OpenAI models.", + "base_classes": [ + "Embeddings" + ], + "display_name": "OpenAI Embeddings", + "documentation": "", + "custom_fields": { + "openai_api_key": null, + "default_headers": null, + "default_query": null, + "allowed_special": null, + "disallowed_special": null, + "chunk_size": null, + "client": null, + "deployment": null, + "embedding_ctx_length": null, + "max_retries": null, + "model": null, + "model_kwargs": null, + "openai_api_base": null, + "openai_api_type": null, + "openai_api_version": null, + "openai_organization": null, + "openai_proxy": null, + "request_timeout": null, + "show_progress_bar": null, + "skip_empty": null, + "tiktoken_enable": null, + "tiktoken_model_name": null + }, + "output_types": [ + "Embeddings" + ], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "OpenAIEmbeddings-9TPjc" + }, + "selected": false, + "width": 384, + "height": 383, + "positionAbsolute": { + "x": 2814.0402191223047, + "y": 1955.9268168273086 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "TextOutput-BDknO", + "target": "Prompt-xeI6K", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œTextOutputœ,œidœ:œTextOutput-BDknOœ}", + "targetHandle": "{œfieldNameœ:œcontextœ,œidœ:œPrompt-xeI6Kœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "id": "reactflow__edge-TextOutput-BDknO{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œTextOutputœ,œidœ:œTextOutput-BDknOœ}-Prompt-xeI6K{œfieldNameœ:œcontextœ,œidœ:œPrompt-xeI6Kœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "context", + "id": "Prompt-xeI6K", + "inputTypes": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "object", + "Text", + "str" + ], + "dataType": "TextOutput", + "id": "TextOutput-BDknO" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "selected": false + }, + { + "source": "ChatInput-yxMKE", + "target": "Prompt-xeI6K", + "sourceHandle": "{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ,œRecordœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-yxMKEœ}", + "targetHandle": "{œfieldNameœ:œquestionœ,œidœ:œPrompt-xeI6Kœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "id": "reactflow__edge-ChatInput-yxMKE{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ,œRecordœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-yxMKEœ}-Prompt-xeI6K{œfieldNameœ:œquestionœ,œidœ:œPrompt-xeI6Kœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œRecordœ,œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "question", + "id": "Prompt-xeI6K", + "inputTypes": [ + "Document", + "BaseOutputParser", + "Record", + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "Text", + "str", + "object", + "Record" + ], + "dataType": "ChatInput", + "id": "ChatInput-yxMKE" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "selected": false + }, + { + "source": "Prompt-xeI6K", + "target": "OpenAIModel-EjXlN", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-xeI6Kœ}", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-EjXlNœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "id": "reactflow__edge-Prompt-xeI6K{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œPromptœ,œidœ:œPrompt-xeI6Kœ}-OpenAIModel-EjXlN{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-EjXlNœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "OpenAIModel-EjXlN", + "inputTypes": [ + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "object", + "Text", + "str" + ], + "dataType": "Prompt", + "id": "Prompt-xeI6K" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "selected": false + }, + { + "source": "OpenAIModel-EjXlN", + "target": "ChatOutput-Q39I8", + "sourceHandle": "{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-EjXlNœ}", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-Q39I8œ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "id": "reactflow__edge-OpenAIModel-EjXlN{œbaseClassesœ:[œobjectœ,œTextœ,œstrœ],œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-EjXlNœ}-ChatOutput-Q39I8{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-Q39I8œ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "ChatOutput-Q39I8", + "inputTypes": [ + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "object", + "Text", + "str" + ], + "dataType": "OpenAIModel", + "id": "OpenAIModel-EjXlN" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "selected": false + }, + { + "source": "File-t0a6a", + "target": "RecursiveCharacterTextSplitter-tR9QM", + "sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œFileœ,œidœ:œFile-t0a6aœ}", + "targetHandle": "{œfieldNameœ:œinputsœ,œidœ:œRecursiveCharacterTextSplitter-tR9QMœ,œinputTypesœ:[œDocumentœ,œRecordœ],œtypeœ:œDocumentœ}", + "id": "reactflow__edge-File-t0a6a{œbaseClassesœ:[œRecordœ],œdataTypeœ:œFileœ,œidœ:œFile-t0a6aœ}-RecursiveCharacterTextSplitter-tR9QM{œfieldNameœ:œinputsœ,œidœ:œRecursiveCharacterTextSplitter-tR9QMœ,œinputTypesœ:[œDocumentœ,œRecordœ],œtypeœ:œDocumentœ}", + "data": { + "targetHandle": { + "fieldName": "inputs", + "id": "RecursiveCharacterTextSplitter-tR9QM", + "inputTypes": [ + "Document", + "Record" + ], + "type": "Document" + }, + "sourceHandle": { + "baseClasses": [ + "Record" + ], + "dataType": "File", + "id": "File-t0a6a" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "selected": false + }, + { + "source": "OpenAIEmbeddings-ZlOk1", + "sourceHandle": "{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-ZlOk1œ}", + "target": "AstraDBSearch-41nRz", + "targetHandle": "{œfieldNameœ:œembeddingœ,œidœ:œAstraDBSearch-41nRzœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}", + "data": { + "targetHandle": { + "fieldName": "embedding", + "id": "AstraDBSearch-41nRz", + "inputTypes": null, + "type": "Embeddings" + }, + "sourceHandle": { + "baseClasses": [ + "Embeddings" + ], + "dataType": "OpenAIEmbeddings", + "id": "OpenAIEmbeddings-ZlOk1" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-OpenAIEmbeddings-ZlOk1{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-ZlOk1œ}-AstraDBSearch-41nRz{œfieldNameœ:œembeddingœ,œidœ:œAstraDBSearch-41nRzœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}" + }, + { + "source": "ChatInput-yxMKE", + "sourceHandle": "{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ,œRecordœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-yxMKEœ}", + "target": "AstraDBSearch-41nRz", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œAstraDBSearch-41nRzœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "AstraDBSearch-41nRz", + "inputTypes": [ + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "Text", + "str", + "object", + "Record" + ], + "dataType": "ChatInput", + "id": "ChatInput-yxMKE" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-ChatInput-yxMKE{œbaseClassesœ:[œTextœ,œstrœ,œobjectœ,œRecordœ],œdataTypeœ:œChatInputœ,œidœ:œChatInput-yxMKEœ}-AstraDBSearch-41nRz{œfieldNameœ:œinput_valueœ,œidœ:œAstraDBSearch-41nRzœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}" + }, + { + "source": "RecursiveCharacterTextSplitter-tR9QM", + "sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-tR9QMœ}", + "target": "AstraDB-eUCSS", + "targetHandle": "{œfieldNameœ:œinputsœ,œidœ:œAstraDB-eUCSSœ,œinputTypesœ:null,œtypeœ:œRecordœ}", + "data": { + "targetHandle": { + "fieldName": "inputs", + "id": "AstraDB-eUCSS", + "inputTypes": null, + "type": "Record" + }, + "sourceHandle": { + "baseClasses": [ + "Record" + ], + "dataType": "RecursiveCharacterTextSplitter", + "id": "RecursiveCharacterTextSplitter-tR9QM" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-RecursiveCharacterTextSplitter-tR9QM{œbaseClassesœ:[œRecordœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-tR9QMœ}-AstraDB-eUCSS{œfieldNameœ:œinputsœ,œidœ:œAstraDB-eUCSSœ,œinputTypesœ:null,œtypeœ:œRecordœ}", + "selected": false + }, + { + "source": "OpenAIEmbeddings-9TPjc", + "sourceHandle": "{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-9TPjcœ}", + "target": "AstraDB-eUCSS", + "targetHandle": "{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-eUCSSœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}", + "data": { + "targetHandle": { + "fieldName": "embedding", + "id": "AstraDB-eUCSS", + "inputTypes": null, + "type": "Embeddings" + }, + "sourceHandle": { + "baseClasses": [ + "Embeddings" + ], + "dataType": "OpenAIEmbeddings", + "id": "OpenAIEmbeddings-9TPjc" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-OpenAIEmbeddings-9TPjc{œbaseClassesœ:[œEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-9TPjcœ}-AstraDB-eUCSS{œfieldNameœ:œembeddingœ,œidœ:œAstraDB-eUCSSœ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}", + "selected": false + }, + { + "source": "AstraDBSearch-41nRz", + "sourceHandle": "{œbaseClassesœ:[œRecordœ],œdataTypeœ:œAstraDBSearchœ,œidœ:œAstraDBSearch-41nRzœ}", + "target": "TextOutput-BDknO", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-BDknOœ,œinputTypesœ:[œRecordœ,œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "TextOutput-BDknO", + "inputTypes": [ + "Record", + "Text" + ], + "type": "str" + }, + "sourceHandle": { + "baseClasses": [ + "Record" + ], + "dataType": "AstraDBSearch", + "id": "AstraDBSearch-41nRz" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-AstraDBSearch-41nRz{œbaseClassesœ:[œRecordœ],œdataTypeœ:œAstraDBSearchœ,œidœ:œAstraDBSearch-41nRzœ}-TextOutput-BDknO{œfieldNameœ:œinput_valueœ,œidœ:œTextOutput-BDknOœ,œinputTypesœ:[œRecordœ,œTextœ],œtypeœ:œstrœ}" + } + ], + "viewport": { + "x": -259.6782520315529, + "y": 90.3428735006047, + "zoom": 0.2687057134854984 + } + }, + "description": "Visit https://pre-release.langflow.org/guides/rag-with-astradb for a detailed guide of this project.\nThis project give you both Ingestion and RAG in a single file. You'll need to visit https://astra.datastax.com/ to create an Astra DB instance, your Token and grab an API Endpoint.\nRunning this project requires you to add a file in the Files component, then define a Collection Name and click on the Play icon on the Astra DB component. \n\nAfter the ingestion ends you are ready to click on the Run button at the lower left corner and start asking questions about your data.", + "name": "Vector Store RAG", + "last_tested_version": "1.0.0a0", + "is_component": false +} \ No newline at end of file diff --git a/src/backend/langflow/services/database/__init__.py b/src/backend/base/langflow/interface/__init__.py similarity index 100% rename from src/backend/langflow/services/database/__init__.py rename to src/backend/base/langflow/interface/__init__.py diff --git a/src/backend/langflow/interface/agents/__init__.py b/src/backend/base/langflow/interface/agents/__init__.py similarity index 100% rename from src/backend/langflow/interface/agents/__init__.py rename to src/backend/base/langflow/interface/agents/__init__.py diff --git a/src/backend/langflow/interface/agents/base.py b/src/backend/base/langflow/interface/agents/base.py similarity index 97% rename from src/backend/langflow/interface/agents/base.py rename to src/backend/base/langflow/interface/agents/base.py index 9c0210b27..5d50b3aff 100644 --- a/src/backend/langflow/interface/agents/base.py +++ b/src/backend/base/langflow/interface/agents/base.py @@ -1,14 +1,13 @@ from typing import ClassVar, Dict, List, Optional from langchain.agents import types +from loguru import logger -from langflow.custom.customs import get_custom_nodes from langflow.interface.agents.custom import CUSTOM_AGENTS from langflow.interface.base import LangChainTypeCreator +from langflow.legacy_custom.customs import get_custom_nodes from langflow.services.deps import get_settings_service - from langflow.template.frontend_node.agents import AgentFrontendNode -from loguru import logger from langflow.utils.util import build_template_from_class, build_template_from_method diff --git a/src/backend/langflow/interface/agents/custom.py b/src/backend/base/langflow/interface/agents/custom.py similarity index 98% rename from src/backend/langflow/interface/agents/custom.py rename to src/backend/base/langflow/interface/agents/custom.py index 608a98a9b..142b77139 100644 --- a/src/backend/langflow/interface/agents/custom.py +++ b/src/backend/base/langflow/interface/agents/custom.py @@ -1,11 +1,7 @@ from typing import Any, Optional from langchain.agents import AgentExecutor, ZeroShotAgent -from langchain.agents.agent_toolkits import ( - VectorStoreInfo, - VectorStoreRouterToolkit, - VectorStoreToolkit, -) +from langchain.agents.agent_toolkits import VectorStoreInfo, VectorStoreRouterToolkit, VectorStoreToolkit from langchain.agents.agent_toolkits.vectorstore.prompt import PREFIX as VECTORSTORE_PREFIX from langchain.agents.agent_toolkits.vectorstore.prompt import ROUTER_PREFIX as VECTORSTORE_ROUTER_PREFIX from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS diff --git a/src/backend/langflow/interface/agents/prebuilt.py b/src/backend/base/langflow/interface/agents/prebuilt.py similarity index 100% rename from src/backend/langflow/interface/agents/prebuilt.py rename to src/backend/base/langflow/interface/agents/prebuilt.py diff --git a/src/backend/langflow/interface/base.py b/src/backend/base/langflow/interface/base.py similarity index 100% rename from src/backend/langflow/interface/base.py rename to src/backend/base/langflow/interface/base.py index d535838ac..a300f12f2 100644 --- a/src/backend/langflow/interface/base.py +++ b/src/backend/base/langflow/interface/base.py @@ -1,15 +1,15 @@ from abc import ABC, abstractmethod from typing import Any, Dict, List, Optional, Type, Union -from langchain.chains.base import Chain + from langchain.agents import AgentExecutor -from langflow.services.deps import get_settings_service +from langchain.chains.base import Chain +from loguru import logger from pydantic import BaseModel +from langflow.services.deps import get_settings_service from langflow.template.field.base import TemplateField from langflow.template.frontend_node.base import FrontendNode from langflow.template.template.base import Template -from loguru import logger - # Assuming necessary imports for Field, Template, and FrontendNode classes diff --git a/src/backend/langflow/interface/chains/__init__.py b/src/backend/base/langflow/interface/chains/__init__.py similarity index 100% rename from src/backend/langflow/interface/chains/__init__.py rename to src/backend/base/langflow/interface/chains/__init__.py diff --git a/src/backend/langflow/interface/chains/base.py b/src/backend/base/langflow/interface/chains/base.py similarity index 98% rename from src/backend/langflow/interface/chains/base.py rename to src/backend/base/langflow/interface/chains/base.py index 8017902bd..fb008f0a7 100644 --- a/src/backend/langflow/interface/chains/base.py +++ b/src/backend/base/langflow/interface/chains/base.py @@ -1,15 +1,15 @@ from typing import Any, ClassVar, Dict, List, Optional, Type -from langflow.custom.customs import get_custom_nodes -from langflow.interface.base import LangChainTypeCreator -from langflow.interface.importing.utils import import_class -from langflow.services.deps import get_settings_service - -from langflow.template.frontend_node.chains import ChainFrontendNode -from loguru import logger -from langflow.utils.util import build_template_from_class, build_template_from_method from langchain import chains from langchain_experimental.sql import SQLDatabaseChain +from loguru import logger + +from langflow.interface.base import LangChainTypeCreator +from langflow.interface.importing.utils import import_class +from langflow.legacy_custom.customs import get_custom_nodes +from langflow.services.deps import get_settings_service +from langflow.template.frontend_node.chains import ChainFrontendNode +from langflow.utils.util import build_template_from_class, build_template_from_method # Assuming necessary imports for Field, Template, and FrontendNode classes diff --git a/src/backend/langflow/interface/chains/custom.py b/src/backend/base/langflow/interface/chains/custom.py similarity index 99% rename from src/backend/langflow/interface/chains/custom.py rename to src/backend/base/langflow/interface/chains/custom.py index 27ed646b2..2a72f3471 100644 --- a/src/backend/langflow/interface/chains/custom.py +++ b/src/backend/base/langflow/interface/chains/custom.py @@ -1,13 +1,14 @@ from typing import Dict, Optional, Type, Union +from langchain.base_language import BaseLanguageModel from langchain.chains import ConversationChain +from langchain.chains.question_answering import load_qa_chain from langchain.memory.buffer import ConversationBufferMemory from langchain.schema import BaseMemory -from langflow.interface.base import CustomChain from pydantic.v1 import Field, root_validator -from langchain.chains.question_answering import load_qa_chain + +from langflow.interface.base import CustomChain from langflow.interface.utils import extract_input_variables_from_prompt -from langchain.base_language import BaseLanguageModel DEFAULT_SUFFIX = """" Current conversation: diff --git a/src/backend/langflow/interface/custom/__init__.py b/src/backend/base/langflow/interface/custom/__init__.py similarity index 100% rename from src/backend/langflow/interface/custom/__init__.py rename to src/backend/base/langflow/interface/custom/__init__.py diff --git a/src/backend/base/langflow/interface/custom/attributes.py b/src/backend/base/langflow/interface/custom/attributes.py new file mode 100644 index 000000000..7bcfb5f4b --- /dev/null +++ b/src/backend/base/langflow/interface/custom/attributes.py @@ -0,0 +1,43 @@ +import warnings +from typing import Callable + +import emoji + + +def validate_icon(value: str, *args, **kwargs): + # we are going to use the emoji library to validate the emoji + # emojis can be defined using the :emoji_name: syntax + + if not value.startswith(":") and not value.endswith(":"): + return value + elif not value.startswith(":") or not value.endswith(":"): + # emoji should have both starting and ending colons + # so if one of them is missing, we will raise + raise ValueError(f"Invalid emoji. {value} is not a valid emoji.") + + emoji_value = emoji.emojize(value, variant="emoji_type") + if value == emoji_value: + warnings.warn(f"Invalid emoji. {value} is not a valid emoji.") + return value + return emoji_value + + +def getattr_return_str(value): + return str(value) if value else "" + + +def getattr_return_bool(value): + if isinstance(value, bool): + return value + + +ATTR_FUNC_MAPPING: dict[str, Callable] = { + "display_name": getattr_return_str, + "description": getattr_return_str, + "beta": getattr_return_bool, + "documentation": getattr_return_str, + "icon": validate_icon, + "frozen": getattr_return_bool, + "is_input": getattr_return_bool, + "is_output": getattr_return_bool, +} diff --git a/src/backend/langflow/interface/custom/base.py b/src/backend/base/langflow/interface/custom/base.py similarity index 85% rename from src/backend/langflow/interface/custom/base.py rename to src/backend/base/langflow/interface/custom/base.py index 45a3ea215..573eacba1 100644 --- a/src/backend/langflow/interface/custom/base.py +++ b/src/backend/base/langflow/interface/custom/base.py @@ -1,16 +1,12 @@ from typing import Any, Dict, List, Optional, Type +from loguru import logger from langflow.interface.base import LangChainTypeCreator # from langflow.interface.custom.custom import CustomComponent from langflow.interface.custom.custom_component import CustomComponent -from langflow.template.frontend_node.custom_components import ( - CustomComponentFrontendNode, -) -from loguru import logger - -# Assuming necessary imports for Field, Template, and FrontendNode classes +from langflow.template.frontend_node.custom_components import CustomComponentFrontendNode class CustomComponentCreator(LangChainTypeCreator): @@ -29,7 +25,7 @@ class CustomComponentCreator(LangChainTypeCreator): return self.type_dict def get_signature(self, name: str) -> Optional[Dict]: - from langflow.custom.customs import get_custom_nodes + from langflow.legacy_custom.customs import get_custom_nodes try: if name in get_custom_nodes(self.type_name).keys(): diff --git a/src/backend/langflow/interface/custom/code_parser/__init__.py b/src/backend/base/langflow/interface/custom/code_parser/__init__.py similarity index 100% rename from src/backend/langflow/interface/custom/code_parser/__init__.py rename to src/backend/base/langflow/interface/custom/code_parser/__init__.py diff --git a/src/backend/langflow/interface/custom/code_parser/code_parser.py b/src/backend/base/langflow/interface/custom/code_parser/code_parser.py similarity index 90% rename from src/backend/langflow/interface/custom/code_parser/code_parser.py rename to src/backend/base/langflow/interface/custom/code_parser/code_parser.py index 1ab1021b7..44dbbc1d9 100644 --- a/src/backend/langflow/interface/custom/code_parser/code_parser.py +++ b/src/backend/base/langflow/interface/custom/code_parser/code_parser.py @@ -6,9 +6,10 @@ from typing import Any, Dict, List, Type, Union from cachetools import TTLCache, cachedmethod, keys from fastapi import HTTPException +from loguru import logger - -from langflow.interface.custom.schema import CallableCodeDetails, ClassCodeDetails +from langflow.interface.custom.eval import eval_custom_component_code +from langflow.interface.custom.schema import CallableCodeDetails, ClassCodeDetails, MissingDefault class CodeSyntaxError(HTTPException): @@ -173,7 +174,7 @@ class CodeParser: args += self.parse_keyword_args(node) # Commented out because we don't want kwargs # showing up as fields in the frontend - # args += self.parse_kwargs(node) + args += self.parse_kwargs(node) return args @@ -184,7 +185,7 @@ class CodeParser: num_args = len(node.args.args) num_defaults = len(node.args.defaults) num_missing_defaults = num_args - num_defaults - missing_defaults = [None] * num_missing_defaults + missing_defaults = [MissingDefault()] * num_missing_defaults default_values = [ast.unparse(default).strip("'") if default else None for default in node.args.defaults] # Now check all default values to see if there # are any "None" values in the middle @@ -288,15 +289,28 @@ class CodeParser: method = self.parse_callable_details(stmt) return (method, True) if stmt.name == "__init__" else (method, False) + def get_base_classes(self): + """ + Returns the base classes of the custom component class. + """ + try: + bases = self.execute_and_inspect_classes(self.code) + except Exception as e: + # If the code cannot be executed, return an empty list + logger.exception(e) + bases = [] + raise e + return bases + def parse_classes(self, node: ast.ClassDef) -> None: """ Extracts "classes" from the code, including inheritance and init methods. """ - + bases = self.get_base_classes() or [ast.unparse(b) for b in node.bases] class_details = ClassCodeDetails( name=node.name, doc=ast.get_docstring(node), - bases=[ast.unparse(base) for base in node.bases], + bases=bases, attributes=[], methods=[], init=None, @@ -328,6 +342,18 @@ class CodeParser: } self.data["global_vars"].append(global_var) + def execute_and_inspect_classes(self, code: str): + custom_component_class = eval_custom_component_code(code) + custom_component = custom_component_class() + dunder_class = custom_component.__class__ + # Get the base classes at two levels of inheritance + bases = [] + for base in dunder_class.__bases__: + bases.append(base.__name__) + for bases_base in base.__bases__: + bases.append(bases_base.__name__) + return bases + def parse_code(self) -> Dict[str, Any]: """ Runs all parsing operations and returns the resulting data. diff --git a/src/backend/langflow/interface/custom/code_parser/utils.py b/src/backend/base/langflow/interface/custom/code_parser/utils.py similarity index 77% rename from src/backend/langflow/interface/custom/code_parser/utils.py rename to src/backend/base/langflow/interface/custom/code_parser/utils.py index d9b9def26..0f97b4c7b 100644 --- a/src/backend/langflow/interface/custom/code_parser/utils.py +++ b/src/backend/base/langflow/interface/custom/code_parser/utils.py @@ -14,11 +14,10 @@ def extract_inner_type(return_type: str) -> str: def extract_inner_type_from_generic_alias(return_type: GenericAlias) -> Any: """ - Extracts the inner type from a type hint that is a list. + Extracts the inner type from a type hint that is a list or a Optional. """ if return_type.__origin__ == list: return list(return_type.__args__) - return return_type @@ -36,4 +35,12 @@ def extract_union_types_from_generic_alias(return_type: GenericAlias) -> list: """ Extracts the inner type from a type hint that is a Union. """ + if isinstance(return_type, list): + return [ + _inner_arg + for _type in return_type + for _inner_arg in _type.__args__ + if _inner_arg not in set((Any, type(None), type(Any))) + ] + return list(return_type.__args__) diff --git a/src/backend/langflow/interface/custom/custom_component/__init__.py b/src/backend/base/langflow/interface/custom/custom_component/__init__.py similarity index 100% rename from src/backend/langflow/interface/custom/custom_component/__init__.py rename to src/backend/base/langflow/interface/custom/custom_component/__init__.py diff --git a/src/backend/langflow/interface/custom/custom_component/component.py b/src/backend/base/langflow/interface/custom/custom_component/component.py similarity index 56% rename from src/backend/langflow/interface/custom/custom_component/component.py rename to src/backend/base/langflow/interface/custom/custom_component/component.py index 9053dcb0f..470ebcde8 100644 --- a/src/backend/langflow/interface/custom/custom_component/component.py +++ b/src/backend/base/langflow/interface/custom/custom_component/component.py @@ -1,13 +1,13 @@ -import ast import operator import warnings from typing import Any, ClassVar, Optional -import emoji from cachetools import TTLCache, cachedmethod from fastapi import HTTPException +from langflow.interface.custom.attributes import ATTR_FUNC_MAPPING from langflow.interface.custom.code_parser import CodeParser +from langflow.interface.custom.eval import eval_custom_component_code from langflow.utils import validate @@ -36,10 +36,6 @@ class Component: else: setattr(self, key, value) - # Validate the emoji at the icon field - if hasattr(self, "icon") and self.icon: - self.icon = self.validate_icon(self.icon) - def __setattr__(self, key, value): if key == "_user_id" and hasattr(self, "_user_id"): warnings.warn("user_id is immutable and cannot be changed.") @@ -68,42 +64,27 @@ class Component: return validate.create_function(self.code, self._function_entrypoint_name) - def build_template_config(self, attributes) -> dict: + def build_template_config(self) -> dict: + """ + Builds the template configuration for the custom component. + + Returns: + A dictionary representing the template configuration. + """ + if not self.code: + return {} + + cc_class = eval_custom_component_code(self.code) + component_instance = cc_class() template_config = {} - for item in attributes: - item_name = item.get("name") - - if item_value := item.get("value"): - if "display_name" in item_name: - template_config["display_name"] = ast.literal_eval(item_value) - - elif "description" in item_name: - template_config["description"] = ast.literal_eval(item_value) - - elif "beta" in item_name: - template_config["beta"] = ast.literal_eval(item_value) - - elif "documentation" in item_name: - template_config["documentation"] = ast.literal_eval(item_value) - - elif "icon" in item_name: - icon_str = ast.literal_eval(item_value) - template_config["icon"] = self.validate_icon(icon_str) + for attribute, func in ATTR_FUNC_MAPPING.items(): + if hasattr(component_instance, attribute): + value = getattr(component_instance, attribute) + if value is not None: + template_config[attribute] = func(value=value) return template_config - def validate_icon(self, value: str): - # we are going to use the emoji library to validate the emoji - # emojis can be defined using the :emoji_name: syntax - if not value.startswith(":") or not value.endswith(":"): - warnings.warn("Invalid emoji. Please use the :emoji_name: syntax.") - return value - emoji_value = emoji.emojize(value, variant="emoji_type") - if value == emoji_value: - warnings.warn(f"Invalid emoji. {value} is not a valid emoji.") - return value - return emoji_value - def build(self, *args: Any, **kwargs: Any) -> Any: raise NotImplementedError diff --git a/src/backend/base/langflow/interface/custom/custom_component/custom_component.py b/src/backend/base/langflow/interface/custom/custom_component/custom_component.py new file mode 100644 index 000000000..e6f0c4652 --- /dev/null +++ b/src/backend/base/langflow/interface/custom/custom_component/custom_component.py @@ -0,0 +1,459 @@ +import operator +from pathlib import Path +from typing import TYPE_CHECKING, Any, Callable, ClassVar, List, Optional, Sequence, Union +from uuid import UUID + +import yaml +from cachetools import TTLCache, cachedmethod +from langchain_core.documents import Document +from pydantic import BaseModel + +from langflow.helpers.flow import list_flows, load_flow, run_flow +from langflow.interface.custom.code_parser.utils import ( + extract_inner_type_from_generic_alias, + extract_union_types_from_generic_alias, +) +from langflow.interface.custom.custom_component.component import Component +from langflow.schema import Record +from langflow.schema.dotdict import dotdict +from langflow.services.deps import get_storage_service, get_variable_service, session_scope +from langflow.services.storage.service import StorageService +from langflow.utils import validate + +if TYPE_CHECKING: + from langflow.graph.graph.base import Graph + from langflow.graph.vertex.base import Vertex + from langflow.services.storage.service import StorageService + + +class CustomComponent(Component): + """ + Represents a custom component in Langflow. + + Attributes: + display_name (Optional[str]): The display name of the custom component. + description (Optional[str]): The description of the custom component. + code (Optional[str]): The code of the custom component. + field_config (dict): The field configuration of the custom component. + code_class_base_inheritance (ClassVar[str]): The base class name for the custom component. + function_entrypoint_name (ClassVar[str]): The name of the function entrypoint for the custom component. + function (Optional[Callable]): The function associated with the custom component. + repr_value (Optional[Any]): The representation value of the custom component. + user_id (Optional[Union[UUID, str]]): The user ID associated with the custom component. + status (Optional[Any]): The status of the custom component. + _tree (Optional[dict]): The code tree of the custom component. + """ + + display_name: Optional[str] = None + """The display name of the component. Defaults to None.""" + description: Optional[str] = None + """The description of the component. Defaults to None.""" + icon: Optional[str] = None + """The icon of the component. It should be an emoji. Defaults to None.""" + is_input: Optional[bool] = None + """The input state of the component. Defaults to None. + If True, the component must have a field named 'input_value'.""" + is_output: Optional[bool] = None + """The output state of the component. Defaults to None. + If True, the component must have a field named 'input_value'.""" + code: Optional[str] = None + """The code of the component. Defaults to None.""" + field_config: dict = {} + """The field configuration of the component. Defaults to an empty dictionary.""" + field_order: Optional[List[str]] = None + """The field order of the component. Defaults to an empty list.""" + frozen: Optional[bool] = False + """The default frozen state of the component. Defaults to False.""" + build_parameters: Optional[dict] = None + """The build parameters of the component. Defaults to None.""" + selected_output_type: Optional[str] = None + """The selected output type of the component. Defaults to None.""" + vertex: Optional["Vertex"] = None + """The edge target parameter of the component. Defaults to None.""" + code_class_base_inheritance: ClassVar[str] = "CustomComponent" + function_entrypoint_name: ClassVar[str] = "build" + function: Optional[Callable] = None + repr_value: Optional[Any] = "" + user_id: Optional[Union[UUID, str]] = None + status: Optional[Any] = None + """The status of the component. This is displayed on the frontend. Defaults to None.""" + _flows_records: Optional[List[Record]] = None + + def update_state(self, name: str, value: Any): + if not self.vertex: + raise ValueError("Vertex is not set") + try: + self.vertex.graph.update_state(name=name, record=value, caller=self.vertex.id) + except Exception as e: + raise ValueError(f"Error updating state: {e}") + + def append_state(self, name: str, value: Any): + if not self.vertex: + raise ValueError("Vertex is not set") + try: + self.vertex.graph.append_state(name=name, record=value, caller=self.vertex.id) + except Exception as e: + raise ValueError(f"Error appending state: {e}") + + def get_state(self, name: str): + if not self.vertex: + raise ValueError("Vertex is not set") + try: + return self.vertex.graph.get_state(name=name) + except Exception as e: + raise ValueError(f"Error getting state: {e}") + + _tree: Optional[dict] = None + + def __init__(self, **data): + """ + Initializes a new instance of the CustomComponent class. + + Args: + **data: Additional keyword arguments to initialize the custom component. + """ + self.cache = TTLCache(maxsize=1024, ttl=60) + super().__init__(**data) + + @staticmethod + def resolve_path(path: str) -> str: + """Resolves the path to an absolute path.""" + path_object = Path(path) + if path_object.parts[0] == "~": + path_object = path_object.expanduser() + elif path_object.is_relative_to("."): + path_object = path_object.resolve() + return str(path_object) + + def get_full_path(self, path: str) -> str: + storage_svc: "StorageService" = get_storage_service() + + flow_id, file_name = path.split("/", 1) + return storage_svc.build_full_path(flow_id, file_name) + + @property + def graph(self): + return self.vertex.graph + + def _get_field_order(self): + return self.field_order or list(self.field_config.keys()) + + def custom_repr(self): + """ + Returns the custom representation of the custom component. + + Returns: + str: The custom representation of the custom component. + """ + if self.repr_value == "": + self.repr_value = self.status + if isinstance(self.repr_value, dict): + return yaml.dump(self.repr_value) + if isinstance(self.repr_value, str): + return self.repr_value + if isinstance(self.repr_value, BaseModel) and not isinstance(self.repr_value, Record): + return str(self.repr_value) + return self.repr_value + + def build_config(self): + """ + Builds the configuration for the custom component. + + Returns: + dict: The configuration for the custom component. + """ + return self.field_config + + def update_build_config( + self, + build_config: dotdict, + field_value: Any, + field_name: Optional[str] = None, + ): + build_config[field_name] = field_value + return build_config + + @property + def tree(self): + """ + Gets the code tree of the custom component. + + Returns: + dict: The code tree of the custom component. + """ + return self.get_code_tree(self.code or "") + + def to_records(self, data: Any, keys: Optional[List[str]] = None, silent_errors: bool = False) -> List[Record]: + """ + Converts input data into a list of Record objects. + + Args: + data (Any): The input data to be converted. It can be a single item or a sequence of items. + If the input data is a Langchain Document, text_key and data_key are ignored. + + keys (List[str], optional): The keys to access the text and data values in each item. + It should be a list of strings where the first element is the text key and the second element is the data key. + Defaults to None, in which case the default keys "text" and "data" are used. + + Returns: + List[Record]: A list of Record objects. + + Raises: + ValueError: If the input data is not of a valid type or if the specified keys are not found in the data. + + """ + if not keys: + keys = [] + records = [] + if not isinstance(data, Sequence): + data = [data] + for item in data: + data_dict = {} + if isinstance(item, Document): + data_dict = item.metadata + data_dict["text"] = item.page_content + elif isinstance(item, BaseModel): + model_dump = item.model_dump() + for key in keys: + if silent_errors: + data_dict[key] = model_dump.get(key, "") + else: + try: + data_dict[key] = model_dump[key] + except KeyError: + raise ValueError(f"Key {key} not found in {item}") + + elif isinstance(item, str): + data_dict = {"text": item} + elif isinstance(item, dict): + data_dict = item.copy() + else: + raise ValueError(f"Invalid data type: {type(item)}") + + records.append(Record(data=data_dict)) + + return records + + def create_references_from_records(self, records: List[Record], include_data: bool = False) -> str: + """ + Create references from a list of records. + + Args: + records (List[dict]): A list of records, where each record is a dictionary. + include_data (bool, optional): Whether to include data in the references. Defaults to False. + + Returns: + str: A string containing the references in markdown format. + """ + if not records: + return "" + markdown_string = "---\n" + for record in records: + markdown_string += f"- Text: {record.get_text()}" + if include_data: + markdown_string += f" Data: {record.data}" + markdown_string += "\n" + return markdown_string + + @property + def get_function_entrypoint_args(self) -> list: + """ + Gets the arguments of the function entrypoint for the custom component. + + Returns: + list: The arguments of the function entrypoint. + """ + build_method = self.get_build_method() + if not build_method: + return [] + + args = build_method["args"] + for arg in args: + if not arg.get("type") and arg.get("name") != "self": + # Set the type to Data + arg["type"] = "Data" + return args + + @cachedmethod(operator.attrgetter("cache")) + def get_build_method(self): + """ + Gets the build method for the custom component. + + Returns: + dict: The build method for the custom component. + """ + if not self.code: + return {} + + component_classes = [cls for cls in self.tree["classes"] if self.code_class_base_inheritance in cls["bases"]] + if not component_classes: + return {} + + # Assume the first Component class is the one we're interested in + component_class = component_classes[0] + build_methods = [ + method for method in component_class["methods"] if method["name"] == self.function_entrypoint_name + ] + + return build_methods[0] if build_methods else {} + + @property + def get_function_entrypoint_return_type(self) -> List[Any]: + """ + Gets the return type of the function entrypoint for the custom component. + + Returns: + List[Any]: The return type of the function entrypoint. + """ + build_method = self.get_build_method() + if not build_method or not build_method.get("has_return"): + return [] + return_type = build_method["return_type"] + + # If list or List is in the return type, then we remove it and return the inner type + if hasattr(return_type, "__origin__") and return_type.__origin__ in [ + list, + List, + ]: + return_type = extract_inner_type_from_generic_alias(return_type) + + # If the return type is not a Union, then we just return it as a list + inner_type = return_type[0] if isinstance(return_type, list) else return_type + if not hasattr(inner_type, "__origin__") or inner_type.__origin__ != Union: + return return_type if isinstance(return_type, list) else [return_type] + # If the return type is a Union, then we need to parse it + return_type = extract_union_types_from_generic_alias(return_type) + return return_type + + @property + def get_main_class_name(self): + """ + Gets the main class name of the custom component. + + Returns: + str: The main class name of the custom component. + """ + if not self.code: + return "" + + base_name = self.code_class_base_inheritance + method_name = self.function_entrypoint_name + + classes = [] + for item in self.tree.get("classes", []): + if base_name in item["bases"]: + method_names = [method["name"] for method in item["methods"]] + if method_name in method_names: + classes.append(item["name"]) + + # Get just the first item + return next(iter(classes), "") + + @property + def template_config(self): + """ + Gets the template configuration for the custom component. + + Returns: + dict: The template configuration for the custom component. + """ + return self.build_template_config() + + @property + def variables(self): + """ + Returns the variable for the current user with the specified name. + + Raises: + ValueError: If the user id is not set. + + Returns: + The variable for the current user with the specified name. + """ + + def get_variable(name: str): + if hasattr(self, "_user_id") and not self._user_id: + raise ValueError(f"User id is not set for {self.__class__.__name__}") + variable_service = get_variable_service() # Get service instance + # Retrieve and decrypt the variable by name for the current user + with session_scope() as session: + return variable_service.get_variable(user_id=self._user_id or "", name=name, session=session) + + return get_variable + + def list_key_names(self): + """ + Lists the names of the variables for the current user. + + Raises: + ValueError: If the user id is not set. + + Returns: + List[str]: The names of the variables for the current user. + """ + if hasattr(self, "_user_id") and not self._user_id: + raise ValueError(f"User id is not set for {self.__class__.__name__}") + variable_service = get_variable_service() + + with session_scope() as session: + return variable_service.list_variables(user_id=self._user_id, session=session) + + def index(self, value: int = 0): + """ + Returns a function that returns the value at the given index in the iterable. + + Args: + value (int): The index value. + + Returns: + Callable: A function that returns the value at the given index. + """ + + def get_index(iterable: List[Any]): + return iterable[value] if iterable else iterable + + return get_index + + def get_function(self): + """ + Gets the function associated with the custom component. + + Returns: + Callable: The function associated with the custom component. + """ + return validate.create_function(self.code, self.function_entrypoint_name) + + async def load_flow(self, flow_id: str, tweaks: Optional[dict] = None) -> "Graph": + if not self._user_id: + raise ValueError("Session is invalid") + return await load_flow(user_id=self._user_id, flow_id=flow_id, tweaks=tweaks) + + async def run_flow( + self, + inputs: Optional[Union[dict, List[dict]]] = None, + flow_id: Optional[str] = None, + flow_name: Optional[str] = None, + tweaks: Optional[dict] = None, + ) -> Any: + return await run_flow(inputs=inputs, flow_id=flow_id, flow_name=flow_name, tweaks=tweaks, user_id=self._user_id) + + def list_flows(self) -> List[Record]: + if not self._user_id: + raise ValueError("Session is invalid") + try: + return list_flows(user_id=self._user_id) + except Exception as e: + raise ValueError(f"Error listing flows: {e}") + + def build(self, *args: Any, **kwargs: Any) -> Any: + """ + Builds the custom component. + + Args: + *args: The positional arguments. + **kwargs: The keyword arguments. + + Returns: + Any: The result of the build process. + """ + raise NotImplementedError diff --git a/src/backend/langflow/interface/custom/directory_reader/__init__.py b/src/backend/base/langflow/interface/custom/directory_reader/__init__.py similarity index 100% rename from src/backend/langflow/interface/custom/directory_reader/__init__.py rename to src/backend/base/langflow/interface/custom/directory_reader/__init__.py diff --git a/src/backend/langflow/interface/custom/directory_reader/directory_reader.py b/src/backend/base/langflow/interface/custom/directory_reader/directory_reader.py similarity index 86% rename from src/backend/langflow/interface/custom/directory_reader/directory_reader.py rename to src/backend/base/langflow/interface/custom/directory_reader/directory_reader.py index 7bf55bec8..e9f3f6ceb 100644 --- a/src/backend/langflow/interface/custom/directory_reader/directory_reader.py +++ b/src/backend/base/langflow/interface/custom/directory_reader/directory_reader.py @@ -1,9 +1,11 @@ import ast import os import zlib +from pathlib import Path + +from loguru import logger from langflow.interface.custom.custom_component import CustomComponent -from loguru import logger class CustomComponentPathValueError(ValueError): @@ -76,7 +78,7 @@ class DirectoryReader: component_tuple = (*build_component(component), component) components.append(component_tuple) except Exception as e: - logger.error(f"Error while loading component: {e}") + logger.error(f"Error while loading component { component['name']}: {e}") continue items.append({"name": menu["name"], "path": menu["path"], "components": components}) filtered = [menu for menu in items if menu["components"]] @@ -105,8 +107,15 @@ class DirectoryReader: """ if not os.path.isfile(file_path): return None - with open(file_path, "r") as file: - return file.read() + with open(file_path, "r", encoding="utf-8") as file: + # UnicodeDecodeError: 'charmap' codec can't decode byte 0x9d in position 3069: character maps to + try: + return file.read() + except UnicodeDecodeError: + # This is happening in Windows, so we need to open the file in binary mode + # The file is always just a python file, so we can safely read it as utf-8 + with open(file_path, "rb") as file: + return file.read().decode("utf-8") def get_files(self): """ @@ -116,12 +125,15 @@ class DirectoryReader: raise CustomComponentPathValueError(f"The path needs to start with '{self.base_path}'.") file_list = [] - for root, _, files in os.walk(safe_path): - file_list.extend( - os.path.join(root, filename) - for filename in files - if filename.endswith(".py") and not filename.startswith("__") - ) + safe_path_obj = Path(safe_path) + for file_path in safe_path_obj.rglob("*.py"): + # The other condtion is that it should be + # in the safe_path/[folder]/[file].py format + # any folders below [folder] will be ignored + # basically the parent folder of the file should be a + # folder in the safe_path + if file_path.is_file() and file_path.parent.parent == safe_path_obj and not file_path.name.startswith("__"): + file_list.append(str(file_path)) return file_list def find_menu(self, response, menu_name): @@ -194,7 +206,12 @@ class DirectoryReader: Process a file by validating its content and returning the result and content/error message. """ - file_content = self.read_file_content(file_path) + try: + file_content = self.read_file_content(file_path) + except Exception as exc: + logger.exception(exc) + logger.error(f"Error while reading file {file_path}: {str(exc)}") + return False, f"Could not read {file_path}" if file_content is None: return False, f"Could not read {file_path}" @@ -226,10 +243,10 @@ class DirectoryReader: for file_path in file_paths: menu_name = os.path.basename(os.path.dirname(file_path)) - logger.debug(f"Menu name: {menu_name}") filename = os.path.basename(file_path) validation_result, result_content = self.process_file(file_path) - logger.debug(f"Validation result: {validation_result}") + if not validation_result: + logger.error(f"Error while processing file {file_path}") menu_result = self.find_menu(response, menu_name) or { "name": menu_name, @@ -256,7 +273,7 @@ class DirectoryReader: output_types = [component_name_camelcase] component_info = { - "name": "CustomComponent", + "name": component_name_camelcase, "output_types": output_types, "file": filename, "code": result_content if validation_result else "", @@ -264,7 +281,6 @@ class DirectoryReader: } menu_result["components"].append(component_info) - logger.debug(f"Component info: {component_info}") if menu_result not in response["menu"]: response["menu"].append(menu_result) logger.debug("-------------------- Component menu list built --------------------") @@ -277,5 +293,6 @@ class DirectoryReader: """ custom_component = CustomComponent(code=code) types_list = custom_component.get_function_entrypoint_return_type + # Get the name of types classes return [type_.__name__ for type_ in types_list if hasattr(type_, "__name__")] diff --git a/src/backend/langflow/interface/custom/directory_reader/utils.py b/src/backend/base/langflow/interface/custom/directory_reader/utils.py similarity index 91% rename from src/backend/langflow/interface/custom/directory_reader/utils.py rename to src/backend/base/langflow/interface/custom/directory_reader/utils.py index f1771b2d7..2772cb78c 100644 --- a/src/backend/langflow/interface/custom/directory_reader/utils.py +++ b/src/backend/base/langflow/interface/custom/directory_reader/utils.py @@ -1,6 +1,7 @@ +from loguru import logger + from langflow.interface.custom.directory_reader import DirectoryReader from langflow.template.frontend_node.custom_components import CustomComponentFrontendNode -from loguru import logger def merge_nested_dicts_with_renaming(dict1, dict2): @@ -124,21 +125,23 @@ def get_new_key(dictionary, original_key): def determine_component_name(component): """Determine the name of the component.""" - component_output_types = component["output_types"] - if len(component_output_types) == 1: - return component_output_types[0] - else: - file_name = component.get("file").split(".")[0] - return "".join(word.capitalize() for word in file_name.split("_")) if "_" in file_name else file_name + # component_output_types = component["output_types"] + # if len(component_output_types) == 1: + # return component_output_types[0] + # else: + # file_name = component.get("file").split(".")[0] + # return "".join(word.capitalize() for word in file_name.split("_")) if "_" in file_name else file_name + return component["name"] def build_menu_items(menu_item): """Build menu items for a given menu.""" menu_items = {} + logger.debug(f"Building menu items for {menu_item['name']}") + logger.debug(f"Loading {len(menu_item['components'])} components") for component_name, component_template, component in menu_item["components"]: try: menu_items[component_name] = component_template - logger.debug(f"Added {component_name} to valid menu.") except Exception as exc: logger.error(f"Error loading Component: {component['output_types']}") logger.exception(f"Error while building custom component {component['output_types']}: {exc}") diff --git a/src/backend/base/langflow/interface/custom/eval.py b/src/backend/base/langflow/interface/custom/eval.py new file mode 100644 index 000000000..b36f10d92 --- /dev/null +++ b/src/backend/base/langflow/interface/custom/eval.py @@ -0,0 +1,12 @@ +from typing import TYPE_CHECKING, Type + +from langflow.utils import validate + +if TYPE_CHECKING: + from langflow.interface.custom.custom_component import CustomComponent + + +def eval_custom_component_code(code: str) -> Type["CustomComponent"]: + """Evaluate custom component code""" + class_name = validate.extract_class_name(code) + return validate.create_class(code, class_name) diff --git a/src/backend/langflow/interface/custom/schema.py b/src/backend/base/langflow/interface/custom/schema.py similarity index 80% rename from src/backend/langflow/interface/custom/schema.py rename to src/backend/base/langflow/interface/custom/schema.py index 7c5975150..1636882ef 100644 --- a/src/backend/langflow/interface/custom/schema.py +++ b/src/backend/base/langflow/interface/custom/schema.py @@ -27,3 +27,12 @@ class CallableCodeDetails(BaseModel): body: list return_type: Optional[Any] = None has_return: bool = False + + +class MissingDefault: + """ + A class to represent a missing default value. + """ + + def __repr__(self): + return "MISSING" diff --git a/src/backend/langflow/interface/custom/utils.py b/src/backend/base/langflow/interface/custom/utils.py similarity index 66% rename from src/backend/langflow/interface/custom/utils.py rename to src/backend/base/langflow/interface/custom/utils.py index 2511c08ca..2935a363a 100644 --- a/src/backend/langflow/interface/custom/utils.py +++ b/src/backend/base/langflow/interface/custom/utils.py @@ -3,11 +3,15 @@ import contextlib import re import traceback import warnings -from typing import Any, Dict, List, Optional, Union +from typing import Any, Dict, List, Optional, Tuple, Union from uuid import UUID from fastapi import HTTPException +from loguru import logger +from pydantic import BaseModel + from langflow.field_typing.range_spec import RangeSpec +from langflow.interface.custom.attributes import ATTR_FUNC_MAPPING from langflow.interface.custom.code_parser.utils import extract_inner_type from langflow.interface.custom.custom_component import CustomComponent from langflow.interface.custom.directory_reader.utils import ( @@ -15,13 +19,17 @@ from langflow.interface.custom.directory_reader.utils import ( determine_component_name, merge_nested_dicts_with_renaming, ) -from langflow.interface.importing.utils import eval_custom_component_code +from langflow.interface.custom.eval import eval_custom_component_code +from langflow.interface.custom.schema import MissingDefault +from langflow.schema import dotdict from langflow.template.field.base import TemplateField -from langflow.template.frontend_node.custom_components import ( - CustomComponentFrontendNode, -) +from langflow.template.frontend_node.custom_components import CustomComponentFrontendNode +from langflow.utils import validate from langflow.utils.util import get_base_classes -from loguru import logger + + +class UpdateBuildConfigError(Exception): + pass def add_output_types(frontend_node: CustomComponentFrontendNode, return_types: List[str]): @@ -35,7 +43,9 @@ def add_output_types(frontend_node: CustomComponentFrontendNode, return_types: L "traceback": traceback.format_exc(), }, ) - if hasattr(return_type, "__name__"): + if return_type == str: + return_type = "Text" + elif hasattr(return_type, "__name__"): return_type = return_type.__name__ elif hasattr(return_type, "__class__"): return_type = return_type.__class__.__name__ @@ -58,6 +68,7 @@ def reorder_fields(frontend_node: CustomComponentFrontendNode, field_order: List if field.name not in field_order: reordered_fields.append(field) frontend_node.template.fields = reordered_fields + frontend_node.field_order = field_order def add_base_classes(frontend_node: CustomComponentFrontendNode, return_types: List[str]): @@ -73,6 +84,8 @@ def add_base_classes(frontend_node: CustomComponentFrontendNode, return_types: L ) base_classes = get_base_classes(return_type_instance) + if return_type_instance == str: + base_classes.append("Text") for base_class in base_classes: frontend_node.add_base_class(base_class) @@ -89,7 +102,7 @@ def extract_type_from_optional(field_type): str: The extracted type, or an empty string if no type was found. """ match = re.search(r"\[(.*?)\]$", field_type) - return match[1] if match else None + return match[1] if match else field_type def get_field_properties(extra_field): @@ -97,7 +110,11 @@ def get_field_properties(extra_field): field_name = extra_field["name"] field_type = extra_field.get("type", "str") field_value = extra_field.get("default", "") - field_required = "optional" not in field_type.lower() + # a required field is a field that does not contain + # optional in field_type + # and a field that does not have a default value + field_required = "optional" not in field_type.lower() and isinstance(field_value, MissingDefault) + field_value = field_value if not isinstance(field_value, MissingDefault) else None if not field_required: field_type = extract_type_from_optional(field_type) @@ -110,7 +127,13 @@ def get_field_properties(extra_field): def process_type(field_type: str): if field_type.startswith("list") or field_type.startswith("List"): return extract_inner_type(field_type) - return "prompt" if field_type == "Prompt" else field_type + + # field_type is a string can be Prompt or Code too + # so we just need to lower if it is the case + lowercase_type = field_type.lower() + if lowercase_type in ["prompt", "code"]: + return lowercase_type + return field_type def add_new_custom_field( @@ -130,13 +153,16 @@ def add_new_custom_field( field_value = field_config.pop("value", field_value) field_advanced = field_config.pop("advanced", False) + if field_type == "Dict": + field_type = "dict" + if field_type == "bool" and field_value is None: field_value = False # If options is a list, then it's a dropdown # If options is None, then it's a list of strings is_list = isinstance(field_config.get("options"), list) - field_config["is_list"] = is_list or field_config.get("is_list", False) or field_contains_list + field_config["is_list"] = is_list or field_config.get("list", False) or field_contains_list if "name" in field_config: warnings.warn("The 'name' key in field_config is used to build the object and can't be changed.") @@ -165,13 +191,21 @@ def add_extra_fields(frontend_node, field_config, function_args): """Add extra fields to the frontend node""" if not function_args: return + _field_config = field_config.copy() + function_args_names = [arg["name"] for arg in function_args] + # If kwargs is in the function_args and not all field_config keys are in function_args + # then we need to add the extra fields for extra_field in function_args: - if "name" not in extra_field or extra_field["name"] == "self": + if "name" not in extra_field or extra_field["name"] in [ + "self", + "kwargs", + "args", + ]: continue field_name, field_type, field_value, field_required = get_field_properties(extra_field) - config = field_config.get(field_name, {}) + config = _field_config.pop(field_name, {}) frontend_node = add_new_custom_field( frontend_node, field_name, @@ -180,25 +214,39 @@ def add_extra_fields(frontend_node, field_config, function_args): field_required, config, ) + if "kwargs" in function_args_names and not all(key in function_args_names for key in field_config.keys()): + for field_name, field_config in _field_config.copy().items(): + if "name" not in field_config or field_name == "code": + continue + config = _field_config.get(field_name, {}) + config = config.model_dump() if isinstance(config, BaseModel) else config + field_name, field_type, field_value, field_required = get_field_properties(extra_field=config) + frontend_node = add_new_custom_field( + frontend_node, + field_name, + field_type, + field_value, + field_required, + config, + ) def get_field_dict(field: Union[TemplateField, dict]): """Get the field dictionary from a TemplateField or a dict""" if isinstance(field, TemplateField): - return field.model_dump(by_alias=True, exclude_none=True) + return dotdict(field.model_dump(by_alias=True, exclude_none=True)) return field def run_build_config( custom_component: CustomComponent, user_id: Optional[Union[str, UUID]] = None, - update_field=None, -): +) -> Tuple[dict, CustomComponent]: """Build the field configuration for a custom component""" try: if custom_component.code is None: - return {} + raise ValueError("Code is None") elif isinstance(custom_component.code, str): custom_class = eval_custom_component_code(custom_component.code) else: @@ -217,43 +265,30 @@ def run_build_config( custom_instance = custom_class(user_id=user_id) build_config: Dict = custom_instance.build_config() - for field_name, field in build_config.items(): + for field_name, field in build_config.copy().items(): # Allow user to build TemplateField as well # as a dict with the same keys as TemplateField field_dict = get_field_dict(field) - if update_field is not None and field_name != update_field: - continue - try: - update_field_dict(field_dict) - build_config[field_name] = field_dict - except Exception as exc: - logger.error(f"Error while getting build_config: {str(exc)}") + # Let's check if "rangeSpec" is a RangeSpec object + if "rangeSpec" in field_dict and isinstance(field_dict["rangeSpec"], RangeSpec): + field_dict["rangeSpec"] = field_dict["rangeSpec"].model_dump() + build_config[field_name] = field_dict return build_config, custom_instance except Exception as exc: logger.error(f"Error while building field config: {str(exc)}") - raise HTTPException( - status_code=400, - detail={ - "error": ("Invalid type convertion. Please check your code and try again."), - "traceback": traceback.format_exc(), - }, - ) from exc + if hasattr(exc, "detail") and "traceback" in exc.detail: + logger.error(exc.detail["traceback"]) + + raise exc def sanitize_template_config(template_config): """Sanitize the template config""" - attributes = { - "display_name", - "description", - "beta", - "documentation", - "output_types", - "icon", - } + for key in template_config.copy(): - if key not in attributes: + if key not in ATTR_FUNC_MAPPING.keys(): template_config.pop(key, None) return template_config @@ -278,7 +313,7 @@ def add_code_field(frontend_node: CustomComponentFrontendNode, raw_code, field_c value=raw_code, password=False, name="code", - advanced=field_config.pop("advanced", False), + advanced=True, field_type="code", is_list=False, ) @@ -290,38 +325,35 @@ def add_code_field(frontend_node: CustomComponentFrontendNode, raw_code, field_c def build_custom_component_template( custom_component: CustomComponent, user_id: Optional[Union[str, UUID]] = None, - update_field: Optional[str] = None, -) -> Optional[Dict[str, Any]]: +) -> Tuple[Dict[str, Any], CustomComponent]: """Build a custom component template for the langchain""" try: - logger.debug("Building custom component template") frontend_node = build_frontend_node(custom_component.template_config) - logger.debug("Built base frontend node") + field_config, custom_instance = run_build_config( + custom_component, + user_id=user_id, + ) - logger.debug("Updated attributes") - field_config, custom_instance = run_build_config(custom_component, user_id=user_id, update_field=update_field) - logger.debug("Built field config") entrypoint_args = custom_component.get_function_entrypoint_args add_extra_fields(frontend_node, field_config, entrypoint_args) - logger.debug("Added extra fields") + frontend_node = add_code_field(frontend_node, custom_component.code, field_config.get("code", {})) - logger.debug("Added code field") + add_base_classes(frontend_node, custom_component.get_function_entrypoint_return_type) add_output_types(frontend_node, custom_component.get_function_entrypoint_return_type) - logger.debug("Added base classes") reorder_fields(frontend_node, custom_instance._get_field_order()) - return frontend_node.to_dict(add_name=False) + return frontend_node.to_dict(add_name=False), custom_instance except Exception as exc: if isinstance(exc, HTTPException): raise exc raise HTTPException( status_code=400, detail={ - "error": ("Invalid type convertion. Please check your code and try again."), + "error": (f"Something went wrong while building the custom component. Hints: {str(exc)}"), "traceback": traceback.format_exc(), }, ) from exc @@ -334,20 +366,22 @@ def create_component_template(component): component_extractor = CustomComponent(code=component_code) - component_template = build_custom_component_template(component_extractor) - component_template["output_types"] = component_output_types + component_template, _ = build_custom_component_template(component_extractor) + if not component_template["output_types"] and component_output_types: + component_template["output_types"] = component_output_types + return component_template -def build_custom_components(settings_service): +def build_custom_components(components_paths: List[str]): """Build custom components from the specified paths.""" - if not settings_service.settings.COMPONENTS_PATH: + if not components_paths: return {} - logger.info(f"Building custom components from {settings_service.settings.COMPONENTS_PATH}") - custom_components_from_file = {} + logger.info(f"Building custom components from {components_paths}") + custom_components_from_file: dict = {} processed_paths = set() - for path in settings_service.settings.COMPONENTS_PATH: + for path in components_paths: path_str = str(path) if path_str in processed_paths: continue @@ -364,24 +398,43 @@ def build_custom_components(settings_service): return custom_components_from_file -def update_field_dict(field_dict): +def update_field_dict( + custom_component_instance: "CustomComponent", + field_dict: Dict, + build_config: Dict, + update_field: Optional[str] = None, + update_field_value: Optional[Any] = None, + call: bool = False, +): """Update the field dictionary by calling options() or value() if they are callable""" - if "options" in field_dict and callable(field_dict["options"]): - field_dict["options"] = field_dict["options"]() - # Also update the "refresh" key - field_dict["refresh"] = True + if ("real_time_refresh" in field_dict or "refresh_button" in field_dict) and any( + ( + field_dict.get("real_time_refresh", False), + field_dict.get("refresh_button", False), + ) + ): + if call: + try: + dd_build_config = dotdict(build_config) + custom_component_instance.update_build_config( + build_config=dd_build_config, + field_value=update_field, + field_name=update_field_value, + ) + build_config = dd_build_config + except Exception as exc: + logger.error(f"Error while running update_build_config: {str(exc)}") + raise UpdateBuildConfigError(f"Error while running update_build_config: {str(exc)}") from exc - if "value" in field_dict and callable(field_dict["value"]): - field_dict["value"] = field_dict["value"]() - field_dict["refresh"] = True - - # Let's check if "range_spec" is a RangeSpec object - if "rangeSpec" in field_dict and isinstance(field_dict["rangeSpec"], RangeSpec): - field_dict["rangeSpec"] = field_dict["rangeSpec"].model_dump() + return build_config -def sanitize_field_config(field_config: Dict): +def sanitize_field_config(field_config: Union[Dict, TemplateField]): # If any of the already existing keys are in field_config, remove them + if isinstance(field_config, TemplateField): + field_dict = field_config.to_dict() + else: + field_dict = field_config for key in [ "name", "field_type", @@ -392,13 +445,20 @@ def sanitize_field_config(field_config: Dict): "advanced", "show", ]: - field_config.pop(key, None) - return field_config + field_dict.pop(key, None) + return field_dict def build_component(component): """Build a single component.""" - logger.debug(f"Building component: {component.get('name'), component.get('output_types')}") component_name = determine_component_name(component) component_template = create_component_template(component) + return component_name, component_template + + +def get_function(code): + """Get the function""" + function_name = validate.extract_function_name(code) + + return validate.create_function(code, function_name) diff --git a/src/backend/langflow/interface/custom_lists.py b/src/backend/base/langflow/interface/custom_lists.py similarity index 100% rename from src/backend/langflow/interface/custom_lists.py rename to src/backend/base/langflow/interface/custom_lists.py index 3cff26099..8f428ee11 100644 --- a/src/backend/langflow/interface/custom_lists.py +++ b/src/backend/base/langflow/interface/custom_lists.py @@ -2,8 +2,8 @@ import inspect from typing import Any from langchain import llms, memory, requests, text_splitter -from langchain_community.chat_models import AzureChatOpenAI, ChatAnthropic, ChatOpenAI, ChatVertexAI from langchain_community import agent_toolkits, document_loaders, embeddings +from langchain_community.chat_models import AzureChatOpenAI, ChatAnthropic, ChatOpenAI, ChatVertexAI from langflow.interface.agents.custom import CUSTOM_AGENTS from langflow.interface.chains.custom import CUSTOM_CHAINS diff --git a/src/backend/langflow/services/plugins/__init__.py b/src/backend/base/langflow/interface/document_loaders/__init__.py similarity index 100% rename from src/backend/langflow/services/plugins/__init__.py rename to src/backend/base/langflow/interface/document_loaders/__init__.py diff --git a/src/backend/langflow/interface/document_loaders/base.py b/src/backend/base/langflow/interface/document_loaders/base.py similarity index 100% rename from src/backend/langflow/interface/document_loaders/base.py rename to src/backend/base/langflow/interface/document_loaders/base.py index 6142d2fa2..24108c9b4 100644 --- a/src/backend/langflow/interface/document_loaders/base.py +++ b/src/backend/base/langflow/interface/document_loaders/base.py @@ -1,11 +1,11 @@ from typing import Dict, List, Optional, Type +from loguru import logger + from langflow.interface.base import LangChainTypeCreator +from langflow.interface.custom_lists import documentloaders_type_to_cls_dict from langflow.services.deps import get_settings_service from langflow.template.frontend_node.documentloaders import DocumentLoaderFrontNode -from langflow.interface.custom_lists import documentloaders_type_to_cls_dict - -from loguru import logger from langflow.utils.util import build_template_from_class diff --git a/src/backend/langflow/services/session/__init__.py b/src/backend/base/langflow/interface/embeddings/__init__.py similarity index 100% rename from src/backend/langflow/services/session/__init__.py rename to src/backend/base/langflow/interface/embeddings/__init__.py diff --git a/src/backend/langflow/interface/embeddings/base.py b/src/backend/base/langflow/interface/embeddings/base.py similarity index 100% rename from src/backend/langflow/interface/embeddings/base.py rename to src/backend/base/langflow/interface/embeddings/base.py index 834ea61fa..061321092 100644 --- a/src/backend/langflow/interface/embeddings/base.py +++ b/src/backend/base/langflow/interface/embeddings/base.py @@ -1,12 +1,12 @@ from typing import Dict, List, Optional, Type +from loguru import logger + from langflow.interface.base import LangChainTypeCreator from langflow.interface.custom_lists import embedding_type_to_cls_dict from langflow.services.deps import get_settings_service - from langflow.template.frontend_node.base import FrontendNode from langflow.template.frontend_node.embeddings import EmbeddingFrontendNode -from loguru import logger from langflow.utils.util import build_template_from_class diff --git a/src/backend/langflow/interface/importing/__init__.py b/src/backend/base/langflow/interface/importing/__init__.py similarity index 54% rename from src/backend/langflow/interface/importing/__init__.py rename to src/backend/base/langflow/interface/importing/__init__.py index 317849f8e..066653f49 100644 --- a/src/backend/langflow/interface/importing/__init__.py +++ b/src/backend/base/langflow/interface/importing/__init__.py @@ -1,5 +1,3 @@ -from langflow.interface.importing.utils import import_by_type # noqa: F401 - # This module is used to import any langchain class by name. ALL = [ diff --git a/src/backend/langflow/interface/importing/utils.py b/src/backend/base/langflow/interface/importing/utils.py similarity index 86% rename from src/backend/langflow/interface/importing/utils.py rename to src/backend/base/langflow/interface/importing/utils.py index 9d7305f16..24612df3c 100644 --- a/src/backend/langflow/interface/importing/utils.py +++ b/src/backend/base/langflow/interface/importing/utils.py @@ -9,9 +9,8 @@ from langchain.chains.base import Chain from langchain.prompts import PromptTemplate from langchain.tools import BaseTool from langchain_core.language_models.chat_models import BaseChatModel -from langflow.interface.custom.custom_component import CustomComponent + from langflow.interface.wrappers.base import wrapper_creator -from langflow.utils import validate def import_module(module_path: str) -> Any: @@ -35,7 +34,7 @@ def import_by_type(_type: str, name: str) -> Any: func_dict = { "agents": import_agent, "prompts": import_prompt, - "llms": {"llm": import_llm, "chat": import_chat_llm}, + "models": {"llm": import_llm, "chat": import_chat_llm}, "tools": import_tool, "chains": import_chain, "toolkits": import_toolkit, @@ -48,9 +47,8 @@ def import_by_type(_type: str, name: str) -> Any: "utilities": import_utility, "output_parsers": import_output_parser, "retrievers": import_retriever, - "custom_components": import_custom_component, } - if _type == "llms": + if _type == "models": key = "chat" if "chat" in name.lower() else "llm" loaded_func = func_dict[_type][key] # type: ignore else: @@ -59,11 +57,6 @@ def import_by_type(_type: str, name: str) -> Any: return loaded_func(name) -def import_custom_component(custom_component: str) -> CustomComponent: - """Import custom component from custom component name""" - return import_class("langflow.interface.custom.custom_component.CustomComponent") - - def import_output_parser(output_parser: str) -> Any: """Import output parser from output parser name""" return import_module(f"from langchain.output_parsers import {output_parser}") @@ -171,16 +164,3 @@ def import_utility(utility: str) -> Any: if utility == "SQLDatabase": return import_class(f"langchain_community.sql_database.{utility}") return import_class(f"langchain_community.utilities.{utility}") - - -def get_function(code): - """Get the function""" - function_name = validate.extract_function_name(code) - - return validate.create_function(code, function_name) - - -def eval_custom_component_code(code: str) -> Type[CustomComponent]: - """Evaluate custom component code""" - class_name = validate.extract_class_name(code) - return validate.create_class(code, class_name) diff --git a/src/backend/langflow/services/store/__init__.py b/src/backend/base/langflow/interface/initialize/__init__.py similarity index 100% rename from src/backend/langflow/services/store/__init__.py rename to src/backend/base/langflow/interface/initialize/__init__.py diff --git a/src/backend/langflow/interface/initialize/llm.py b/src/backend/base/langflow/interface/initialize/llm.py similarity index 100% rename from src/backend/langflow/interface/initialize/llm.py rename to src/backend/base/langflow/interface/initialize/llm.py diff --git a/src/backend/langflow/interface/initialize/loading.py b/src/backend/base/langflow/interface/initialize/loading.py similarity index 84% rename from src/backend/langflow/interface/initialize/loading.py rename to src/backend/base/langflow/interface/initialize/loading.py index 652cd1a0c..15045cd06 100644 --- a/src/backend/langflow/interface/initialize/loading.py +++ b/src/backend/base/langflow/interface/initialize/loading.py @@ -2,6 +2,7 @@ import inspect import json from typing import TYPE_CHECKING, Any, Callable, Dict, Sequence, Type + import orjson from langchain.agents import agent as agent_module from langchain.agents.agent import AgentExecutor @@ -14,8 +15,8 @@ from langchain_core.documents import Document from loguru import logger from pydantic import ValidationError -from langflow.interface.custom_lists import CUSTOM_NODES -from langflow.interface.importing.utils import eval_custom_component_code, get_function, import_by_type +from langflow.interface.custom.eval import eval_custom_component_code +from langflow.interface.importing.utils import import_by_type from langflow.interface.initialize.llm import initialize_vertexai from langflow.interface.initialize.utils import handle_format_kwargs, handle_node_type, handle_partial_variables from langflow.interface.initialize.vector_store import vecstore_initializer @@ -24,32 +25,49 @@ from langflow.interface.retrievers.base import retriever_creator from langflow.interface.toolkits.base import toolkits_creator from langflow.interface.utils import load_file_into_dict from langflow.interface.wrappers.base import wrapper_creator +from langflow.schema.schema import Record from langflow.utils import validate +from langflow.utils.util import unescape_string if TYPE_CHECKING: - from langflow import CustomComponent - - -def build_vertex_in_params(params: Dict) -> Dict: + from langflow.custom import CustomComponent from langflow.graph.vertex.base import Vertex - # If any of the values in params is a Vertex, we will build it - return {key: value.build() if isinstance(value, Vertex) else value for key, value in params.items()} - -async def instantiate_class(node_type: str, base_type: str, params: Dict, user_id=None) -> Any: +async def instantiate_class( + vertex: "Vertex", + user_id=None, +) -> Any: """Instantiate class from module type and key, and params""" + from langflow.interface.custom_lists import CUSTOM_NODES + + vertex_type = vertex.vertex_type + base_type = vertex.base_type + params = vertex.params params = convert_params_to_sets(params) params = convert_kwargs(params) - if node_type in CUSTOM_NODES: - if custom_node := CUSTOM_NODES.get(node_type): + if vertex_type in CUSTOM_NODES: + if custom_node := CUSTOM_NODES.get(vertex_type): if hasattr(custom_node, "initialize"): return custom_node.initialize(**params) - return custom_node(**params) - logger.debug(f"Instantiating {node_type} of type {base_type}") - class_object = import_by_type(_type=base_type, name=node_type) - return await instantiate_based_on_type(class_object, base_type, node_type, params, user_id=user_id) + if callable(custom_node): + return custom_node(**params) + raise ValueError(f"Custom node {vertex_type} is not callable") + logger.debug(f"Instantiating {vertex_type} of type {base_type}") + if not base_type: + raise ValueError("No base type provided for vertex") + if base_type == "custom_components": + return await instantiate_custom_component(params, user_id, vertex) + class_object = import_by_type(_type=base_type, name=vertex_type) + return await instantiate_based_on_type( + class_object=class_object, + base_type=base_type, + node_type=vertex_type, + params=params, + user_id=user_id, + vertex=vertex, + ) def convert_params_to_sets(params): @@ -76,7 +94,14 @@ def convert_kwargs(params): return params -async def instantiate_based_on_type(class_object, base_type, node_type, params, user_id): +async def instantiate_based_on_type( + class_object, + base_type, + node_type, + params, + user_id, + vertex, +): if base_type == "agents": return instantiate_agent(node_type, class_object, params) elif base_type == "prompts": @@ -103,24 +128,49 @@ async def instantiate_based_on_type(class_object, base_type, node_type, params, return instantiate_chains(node_type, class_object, params) elif base_type == "output_parsers": return instantiate_output_parser(node_type, class_object, params) - elif base_type == "llms": + elif base_type == "models": return instantiate_llm(node_type, class_object, params) elif base_type == "retrievers": return instantiate_retriever(node_type, class_object, params) elif base_type == "memory": return instantiate_memory(node_type, class_object, params) elif base_type == "custom_components": - return await instantiate_custom_component(node_type, class_object, params, user_id) + return await instantiate_custom_component( + params, + user_id, + vertex, + ) elif base_type == "wrappers": return instantiate_wrapper(node_type, class_object, params) else: return class_object(**params) -async def instantiate_custom_component(node_type, class_object, params, user_id): +def update_params_with_load_from_db_fields(custom_component: "CustomComponent", params, load_from_db_fields): + # For each field in load_from_db_fields, we will check if it's in the params + # and if it is, we will get the value from the custom_component.keys(name) + # and update the params with the value + for field in load_from_db_fields: + if field in params: + try: + key = custom_component.variables(params[field]) + params[field] = key if key else params[field] + except Exception as exc: + logger.error(f"Failed to get value for {field} from custom component. Error: {exc}") + pass + return params + + +async def instantiate_custom_component(params, user_id, vertex): params_copy = params.copy() - class_object: "CustomComponent" = eval_custom_component_code(params_copy.pop("code")) - custom_component = class_object(user_id=user_id) + class_object: Type["CustomComponent"] = eval_custom_component_code(params_copy.pop("code")) + custom_component: "CustomComponent" = class_object( + user_id=user_id, + parameters=params_copy, + vertex=vertex, + selected_output_type=vertex.selected_output_type, + ) + params_copy = update_params_with_load_from_db_fields(custom_component, params_copy, vertex.load_from_db_fields) if "retriever" in params_copy and hasattr(params_copy["retriever"], "as_retriever"): params_copy["retriever"] = params_copy["retriever"].as_retriever() @@ -130,12 +180,16 @@ async def instantiate_custom_component(node_type, class_object, params, user_id) if is_async: # Await the build method directly if it's async - built_object = await custom_component.build(**params_copy) + build_result = await custom_component.build(**params_copy) else: # Call the build method directly if it's sync - built_object = custom_component.build(**params_copy) - - return built_object, {"repr": custom_component.custom_repr()} + build_result = custom_component.build(**params_copy) + custom_repr = custom_component.custom_repr() + if not custom_repr and isinstance(build_result, (dict, Record, str)): + custom_repr = build_result + if not isinstance(custom_repr, str): + custom_repr = str(custom_repr) + return custom_component, build_result, {"repr": custom_repr} def instantiate_wrapper(node_type, class_object, params): @@ -252,6 +306,8 @@ def instantiate_tool(node_type, class_object: Type[BaseTool], params: Dict): raise ValueError("Invalid file") return class_object(**params) elif node_type == "PythonFunctionTool": + from langflow.interface.custom.utils import get_function + params["func"] = get_function(params.get("code")) return class_object(**params) elif node_type == "PythonFunction": @@ -367,7 +423,10 @@ def instantiate_textsplitter( # separators might come in as an escaped string like \\n # so we need to convert it to a string if "separators" in params: - params["separators"] = params["separators"].encode().decode("unicode-escape") + if isinstance(params["separators"], str): + params["separators"] = unescape_string(params["separators"]) + elif isinstance(params["separators"], list): + params["separators"] = [unescape_string(separator) for separator in params["separators"]] text_splitter = class_object(**params) else: from langchain.text_splitter import Language diff --git a/src/backend/langflow/interface/initialize/utils.py b/src/backend/base/langflow/interface/initialize/utils.py similarity index 100% rename from src/backend/langflow/interface/initialize/utils.py rename to src/backend/base/langflow/interface/initialize/utils.py diff --git a/src/backend/langflow/interface/initialize/vector_store.py b/src/backend/base/langflow/interface/initialize/vector_store.py similarity index 100% rename from src/backend/langflow/interface/initialize/vector_store.py rename to src/backend/base/langflow/interface/initialize/vector_store.py diff --git a/src/backend/langflow/interface/listing.py b/src/backend/base/langflow/interface/listing.py similarity index 89% rename from src/backend/langflow/interface/listing.py rename to src/backend/base/langflow/interface/listing.py index 79a7335d4..a831f1098 100644 --- a/src/backend/langflow/interface/listing.py +++ b/src/backend/base/langflow/interface/listing.py @@ -21,7 +21,7 @@ class AllTypesDict(LazyLoadDictBase): from langflow.interface.types import get_all_types_dict settings_service = get_settings_service() - return get_all_types_dict(settings_service=settings_service) + return get_all_types_dict(settings_service.settings.COMPONENTS_PATH) lazy_load_dict = AllTypesDict() diff --git a/src/backend/langflow/interface/llms/__init__.py b/src/backend/base/langflow/interface/llms/__init__.py similarity index 100% rename from src/backend/langflow/interface/llms/__init__.py rename to src/backend/base/langflow/interface/llms/__init__.py diff --git a/src/backend/langflow/interface/llms/base.py b/src/backend/base/langflow/interface/llms/base.py similarity index 97% rename from src/backend/langflow/interface/llms/base.py rename to src/backend/base/langflow/interface/llms/base.py index 4b0654a1a..ba611da8d 100644 --- a/src/backend/langflow/interface/llms/base.py +++ b/src/backend/base/langflow/interface/llms/base.py @@ -1,16 +1,16 @@ from typing import Dict, List, Optional, Type +from loguru import logger + from langflow.interface.base import LangChainTypeCreator from langflow.interface.custom_lists import llm_type_to_cls_dict from langflow.services.deps import get_settings_service - from langflow.template.frontend_node.llms import LLMFrontendNode -from loguru import logger from langflow.utils.util import build_template_from_class class LLMCreator(LangChainTypeCreator): - type_name: str = "llms" + type_name: str = "models" @property def frontend_node_class(self) -> Type[LLMFrontendNode]: diff --git a/src/backend/langflow/interface/memories/__init__.py b/src/backend/base/langflow/interface/memories/__init__.py similarity index 100% rename from src/backend/langflow/interface/memories/__init__.py rename to src/backend/base/langflow/interface/memories/__init__.py diff --git a/src/backend/langflow/interface/memories/base.py b/src/backend/base/langflow/interface/memories/base.py similarity index 97% rename from src/backend/langflow/interface/memories/base.py rename to src/backend/base/langflow/interface/memories/base.py index 7508d6d64..b63c7d3f0 100644 --- a/src/backend/langflow/interface/memories/base.py +++ b/src/backend/base/langflow/interface/memories/base.py @@ -1,14 +1,14 @@ from typing import ClassVar, Dict, List, Optional, Type +from loguru import logger + from langflow.interface.base import LangChainTypeCreator from langflow.interface.custom_lists import memory_type_to_cls_dict +from langflow.legacy_custom.customs import get_custom_nodes from langflow.services.deps import get_settings_service - from langflow.template.frontend_node.base import FrontendNode from langflow.template.frontend_node.memories import MemoryFrontendNode -from loguru import logger from langflow.utils.util import build_template_from_class, build_template_from_method -from langflow.custom.customs import get_custom_nodes class MemoryCreator(LangChainTypeCreator): diff --git a/src/backend/langflow/services/task/__init__.py b/src/backend/base/langflow/interface/output_parsers/__init__.py similarity index 100% rename from src/backend/langflow/services/task/__init__.py rename to src/backend/base/langflow/interface/output_parsers/__init__.py diff --git a/src/backend/langflow/interface/output_parsers/base.py b/src/backend/base/langflow/interface/output_parsers/base.py similarity index 99% rename from src/backend/langflow/interface/output_parsers/base.py rename to src/backend/base/langflow/interface/output_parsers/base.py index 3ae1e9d7b..503b47642 100644 --- a/src/backend/langflow/interface/output_parsers/base.py +++ b/src/backend/base/langflow/interface/output_parsers/base.py @@ -1,13 +1,12 @@ from typing import ClassVar, Dict, List, Optional, Type from langchain import output_parsers +from loguru import logger from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class from langflow.services.deps import get_settings_service - from langflow.template.frontend_node.output_parsers import OutputParserFrontendNode -from loguru import logger from langflow.utils.util import build_template_from_class, build_template_from_method diff --git a/src/backend/langflow/interface/prompts/__init__.py b/src/backend/base/langflow/interface/prompts/__init__.py similarity index 100% rename from src/backend/langflow/interface/prompts/__init__.py rename to src/backend/base/langflow/interface/prompts/__init__.py diff --git a/src/backend/langflow/interface/prompts/base.py b/src/backend/base/langflow/interface/prompts/base.py similarity index 97% rename from src/backend/langflow/interface/prompts/base.py rename to src/backend/base/langflow/interface/prompts/base.py index 164ea2b10..2b448c362 100644 --- a/src/backend/langflow/interface/prompts/base.py +++ b/src/backend/base/langflow/interface/prompts/base.py @@ -1,14 +1,13 @@ from typing import Dict, List, Optional, Type from langchain import prompts +from loguru import logger -from langflow.custom.customs import get_custom_nodes from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class +from langflow.legacy_custom.customs import get_custom_nodes from langflow.services.deps import get_settings_service - from langflow.template.frontend_node.prompts import PromptFrontendNode -from loguru import logger from langflow.utils.util import build_template_from_class diff --git a/src/backend/langflow/interface/prompts/custom.py b/src/backend/base/langflow/interface/prompts/custom.py similarity index 100% rename from src/backend/langflow/interface/prompts/custom.py rename to src/backend/base/langflow/interface/prompts/custom.py diff --git a/src/backend/langflow/services/task/backends/__init__.py b/src/backend/base/langflow/interface/retrievers/__init__.py similarity index 100% rename from src/backend/langflow/services/task/backends/__init__.py rename to src/backend/base/langflow/interface/retrievers/__init__.py diff --git a/src/backend/langflow/interface/retrievers/base.py b/src/backend/base/langflow/interface/retrievers/base.py similarity index 99% rename from src/backend/langflow/interface/retrievers/base.py rename to src/backend/base/langflow/interface/retrievers/base.py index a6813e76c..a0be56b9b 100644 --- a/src/backend/langflow/interface/retrievers/base.py +++ b/src/backend/base/langflow/interface/retrievers/base.py @@ -1,12 +1,13 @@ from typing import Any, ClassVar, Dict, List, Optional, Type from langchain_community import retrievers +from loguru import logger + from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class from langflow.services.deps import get_settings_service from langflow.template.frontend_node.retrievers import RetrieverFrontendNode from langflow.utils.util import build_template_from_class, build_template_from_method -from loguru import logger class RetrieverCreator(LangChainTypeCreator): diff --git a/src/backend/langflow/interface/run.py b/src/backend/base/langflow/interface/run.py similarity index 79% rename from src/backend/langflow/interface/run.py rename to src/backend/base/langflow/interface/run.py index 94cd922eb..aec602ac7 100644 --- a/src/backend/langflow/interface/run.py +++ b/src/backend/base/langflow/interface/run.py @@ -1,25 +1,19 @@ -from typing import Dict, Optional, Tuple, Union -from uuid import UUID +from typing import Dict, Tuple from loguru import logger from langflow.graph import Graph -async def build_sorted_vertices(data_graph, user_id: Optional[Union[str, UUID]] = None) -> Tuple[Graph, Dict]: +async def build_sorted_vertices(data_graph, flow_id: str) -> Tuple[Graph, Dict]: """ Build langchain object from data_graph. """ logger.debug("Building langchain object") - graph = Graph.from_payload(data_graph) - sorted_vertices = graph.topological_sort() - artifacts = {} - for vertex in sorted_vertices: - await vertex.build(user_id=user_id) - if vertex.artifacts: - artifacts.update(vertex.artifacts) - return graph, artifacts + graph = Graph.from_payload(data_graph, flow_id=flow_id) + + return graph, {} def get_memory_key(langchain_object): diff --git a/src/backend/langflow/interface/text_splitters/__init__.py b/src/backend/base/langflow/interface/text_splitters/__init__.py similarity index 100% rename from src/backend/langflow/interface/text_splitters/__init__.py rename to src/backend/base/langflow/interface/text_splitters/__init__.py diff --git a/src/backend/langflow/interface/text_splitters/base.py b/src/backend/base/langflow/interface/text_splitters/base.py similarity index 100% rename from src/backend/langflow/interface/text_splitters/base.py rename to src/backend/base/langflow/interface/text_splitters/base.py index 29c77a12f..497377cde 100644 --- a/src/backend/langflow/interface/text_splitters/base.py +++ b/src/backend/base/langflow/interface/text_splitters/base.py @@ -1,11 +1,11 @@ from typing import Dict, List, Optional, Type +from loguru import logger + from langflow.interface.base import LangChainTypeCreator +from langflow.interface.custom_lists import textsplitter_type_to_cls_dict from langflow.services.deps import get_settings_service from langflow.template.frontend_node.textsplitters import TextSplittersFrontendNode -from langflow.interface.custom_lists import textsplitter_type_to_cls_dict - -from loguru import logger from langflow.utils.util import build_template_from_class diff --git a/src/backend/langflow/template/__init__.py b/src/backend/base/langflow/interface/toolkits/__init__.py similarity index 100% rename from src/backend/langflow/template/__init__.py rename to src/backend/base/langflow/interface/toolkits/__init__.py diff --git a/src/backend/langflow/interface/toolkits/base.py b/src/backend/base/langflow/interface/toolkits/base.py similarity index 79% rename from src/backend/langflow/interface/toolkits/base.py rename to src/backend/base/langflow/interface/toolkits/base.py index 7c57579d1..efcfefaa4 100644 --- a/src/backend/langflow/interface/toolkits/base.py +++ b/src/backend/base/langflow/interface/toolkits/base.py @@ -1,12 +1,12 @@ +import warnings from typing import Callable, Dict, List, Optional from langchain.agents import agent_toolkits +from loguru import logger from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class, import_module from langflow.services.deps import get_settings_service - -from loguru import logger from langflow.utils.util import build_template_from_class @@ -30,13 +30,15 @@ class ToolkitCreator(LangChainTypeCreator): @property def type_to_loader_dict(self) -> Dict: if self.type_dict is None: - settings_service = get_settings_service() - self.type_dict = { - toolkit_name: import_class(f"langchain.agents.agent_toolkits.{toolkit_name}") - # if toolkit_name is not lower case it is a class - for toolkit_name in agent_toolkits.__all__ - if not toolkit_name.islower() and toolkit_name in settings_service.settings.TOOLKITS - } + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + settings_service = get_settings_service() + self.type_dict = { + toolkit_name: import_class(f"langchain.agents.agent_toolkits.{toolkit_name}") + # if toolkit_name is not lower case it is a class + for toolkit_name in agent_toolkits.__all__ + if not toolkit_name.islower() and toolkit_name in settings_service.settings.TOOLKITS + } return self.type_dict diff --git a/src/backend/langflow/interface/toolkits/custom.py b/src/backend/base/langflow/interface/toolkits/custom.py similarity index 100% rename from src/backend/langflow/interface/toolkits/custom.py rename to src/backend/base/langflow/interface/toolkits/custom.py diff --git a/src/backend/langflow/interface/tools/__init__.py b/src/backend/base/langflow/interface/tools/__init__.py similarity index 100% rename from src/backend/langflow/interface/tools/__init__.py rename to src/backend/base/langflow/interface/tools/__init__.py diff --git a/src/backend/langflow/interface/tools/base.py b/src/backend/base/langflow/interface/tools/base.py similarity index 95% rename from src/backend/langflow/interface/tools/base.py rename to src/backend/base/langflow/interface/tools/base.py index 875915a58..b4c933baa 100644 --- a/src/backend/langflow/interface/tools/base.py +++ b/src/backend/base/langflow/interface/tools/base.py @@ -1,27 +1,17 @@ from typing import Dict, List, Optional -from langchain.agents.load_tools import ( - _EXTRA_LLM_TOOLS, - _EXTRA_OPTIONAL_TOOLS, - _LLM_TOOLS, -) +from langchain.agents.load_tools import _EXTRA_LLM_TOOLS, _EXTRA_OPTIONAL_TOOLS, _LLM_TOOLS -from langflow.custom import customs from langflow.interface.base import LangChainTypeCreator -from langflow.interface.tools.constants import ( - ALL_TOOLS_NAMES, - CUSTOM_TOOLS, - FILE_TOOLS, - OTHER_TOOLS, -) +from langflow.interface.tools.constants import ALL_TOOLS_NAMES, CUSTOM_TOOLS, FILE_TOOLS, OTHER_TOOLS from langflow.interface.tools.util import get_tool_params +from langflow.legacy_custom import customs from langflow.services.deps import get_settings_service - from langflow.template.field.base import TemplateField from langflow.template.template.base import Template from langflow.utils import util -from langflow.utils.util import build_template_from_class from langflow.utils.logger import logger +from langflow.utils.util import build_template_from_class TOOL_INPUTS = { "str": TemplateField( diff --git a/src/backend/langflow/interface/tools/constants.py b/src/backend/base/langflow/interface/tools/constants.py similarity index 86% rename from src/backend/langflow/interface/tools/constants.py rename to src/backend/base/langflow/interface/tools/constants.py index 0ac37a0a4..7b07922e5 100644 --- a/src/backend/langflow/interface/tools/constants.py +++ b/src/backend/base/langflow/interface/tools/constants.py @@ -2,14 +2,14 @@ from langchain import tools from langchain.agents import Tool from langchain.agents.load_tools import _BASE_TOOLS, _EXTRA_LLM_TOOLS, _EXTRA_OPTIONAL_TOOLS, _LLM_TOOLS from langchain.tools.json.tool import JsonSpec + from langflow.interface.importing.utils import import_class -from langflow.interface.tools.custom import PythonFunction, PythonFunctionTool +from langflow.interface.tools.custom import PythonFunctionTool FILE_TOOLS = {"JsonSpec": JsonSpec} CUSTOM_TOOLS = { "Tool": Tool, "PythonFunctionTool": PythonFunctionTool, - "PythonFunction": PythonFunction, } OTHER_TOOLS = {tool: import_class(f"langchain_community.tools.{tool}") for tool in tools.__all__} diff --git a/src/backend/langflow/interface/tools/custom.py b/src/backend/base/langflow/interface/tools/custom.py similarity index 95% rename from src/backend/langflow/interface/tools/custom.py rename to src/backend/base/langflow/interface/tools/custom.py index 73f5842df..6ba8cac13 100644 --- a/src/backend/langflow/interface/tools/custom.py +++ b/src/backend/base/langflow/interface/tools/custom.py @@ -1,10 +1,10 @@ from typing import Callable, Optional -from langflow.interface.importing.utils import get_function +from langchain.agents.tools import Tool from pydantic.v1 import BaseModel, validator +from langflow.interface.custom.utils import get_function from langflow.utils import validate -from langchain.agents.tools import Tool class Function(BaseModel): diff --git a/src/backend/langflow/interface/tools/util.py b/src/backend/base/langflow/interface/tools/util.py similarity index 100% rename from src/backend/langflow/interface/tools/util.py rename to src/backend/base/langflow/interface/tools/util.py diff --git a/src/backend/langflow/interface/types.py b/src/backend/base/langflow/interface/types.py similarity index 75% rename from src/backend/langflow/interface/types.py rename to src/backend/base/langflow/interface/types.py index 39c68889f..32ab7ff95 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/base/langflow/interface/types.py @@ -9,12 +9,10 @@ from langflow.interface.embeddings.base import embedding_creator from langflow.interface.llms.base import llm_creator from langflow.interface.memories.base import memory_creator from langflow.interface.output_parsers.base import output_parser_creator -from langflow.interface.prompts.base import prompt_creator from langflow.interface.retrievers.base import retriever_creator from langflow.interface.text_splitters.base import textsplitter_creator from langflow.interface.toolkits.base import toolkits_creator from langflow.interface.tools.base import tool_creator -from langflow.interface.utilities.base import utility_creator from langflow.interface.wrappers.base import wrapper_creator @@ -39,7 +37,7 @@ def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union creators = [ chain_creator, agent_creator, - prompt_creator, + # prompt_creator, llm_creator, memory_creator, tool_creator, @@ -49,7 +47,7 @@ def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union # vectorstore_creator, documentloader_creator, textsplitter_creator, - utility_creator, + # utility_creator, output_parser_creator, retriever_creator, ] @@ -63,8 +61,22 @@ def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union return all_types -def get_all_types_dict(settings_service): +def get_all_types_dict(components_paths): """Get all types dictionary combining native and custom components.""" native_components = build_langchain_types_dict() - custom_components_from_file = build_custom_components(settings_service) + custom_components_from_file = build_custom_components(components_paths=components_paths) return merge_nested_dicts_with_renaming(native_components, custom_components_from_file) + + +def get_all_components(components_paths, as_dict=False): + """Get all components names combining native and custom components.""" + all_types_dict = get_all_types_dict(components_paths) + components = [] if not as_dict else {} + for category in all_types_dict.values(): + for component in category.values(): + component["name"] = component["display_name"] + if as_dict: + components[component["name"]] = component + else: + components.append(component) + return components diff --git a/src/backend/langflow/template/field/__init__.py b/src/backend/base/langflow/interface/utilities/__init__.py similarity index 100% rename from src/backend/langflow/template/field/__init__.py rename to src/backend/base/langflow/interface/utilities/__init__.py diff --git a/src/backend/langflow/interface/utilities/base.py b/src/backend/base/langflow/interface/utilities/base.py similarity index 97% rename from src/backend/langflow/interface/utilities/base.py rename to src/backend/base/langflow/interface/utilities/base.py index 0a58ef1bc..1b7b69421 100644 --- a/src/backend/langflow/interface/utilities/base.py +++ b/src/backend/base/langflow/interface/utilities/base.py @@ -3,9 +3,9 @@ from typing import Dict, List, Optional, Type from langchain_community import utilities from loguru import logger -from langflow.custom.customs import get_custom_nodes from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class +from langflow.legacy_custom.customs import get_custom_nodes from langflow.services.deps import get_settings_service from langflow.template.frontend_node.utilities import UtilitiesFrontendNode from langflow.utils.util import build_template_from_class diff --git a/src/backend/langflow/interface/utils.py b/src/backend/base/langflow/interface/utils.py similarity index 71% rename from src/backend/langflow/interface/utils.py rename to src/backend/base/langflow/interface/utils.py index ef29911f5..8e7f476f5 100644 --- a/src/backend/langflow/interface/utils.py +++ b/src/backend/base/langflow/interface/utils.py @@ -1,14 +1,14 @@ import base64 import json import os -from io import BytesIO import re - +from io import BytesIO import yaml from langchain.base_language import BaseLanguageModel -from PIL.Image import Image from loguru import logger +from PIL.Image import Image + from langflow.services.chat.config import ChatConfig from langflow.services.deps import get_settings_service @@ -56,8 +56,35 @@ def try_setting_streaming_options(langchain_object): def extract_input_variables_from_prompt(prompt: str) -> list[str]: - """Extract input variables from prompt.""" - return re.findall(r"{(.*?)}", prompt) + variables = [] + remaining_text = prompt + + # Pattern to match single {var} and double {{var}} braces. + pattern = r"\{\{(.*?)\}\}|\{([^{}]+)\}" + + while True: + match = re.search(pattern, remaining_text) + if not match: + break + + # Extract the variable name from either the single or double brace match + if match.group(1): # Match found in double braces + variable_name = "{{" + match.group(1) + "}}" # Re-add single braces for JSON strings + else: # Match found in single braces + variable_name = match.group(2) + if variable_name is not None: + # This means there is a match + # but there is nothing inside the braces + variables.append(variable_name) + + # Remove the matched text from the remaining_text + start, end = match.span() + remaining_text = remaining_text[:start] + remaining_text[end:] + + # Proceed to the next match until no more matches are found + # No need to compare remaining "{}" instances because we are re-adding braces for JSON compatibility + + return variables def setup_llm_caching(): @@ -73,6 +100,7 @@ def setup_llm_caching(): def set_langchain_cache(settings): from langchain.globals import set_llm_cache + from langflow.interface.importing.utils import import_class if cache_type := os.getenv("LANGFLOW_LANGCHAIN_CACHE"): diff --git a/src/backend/langflow/template/frontend_node/formatter/__init__.py b/src/backend/base/langflow/interface/vector_store/__init__.py similarity index 100% rename from src/backend/langflow/template/frontend_node/formatter/__init__.py rename to src/backend/base/langflow/interface/vector_store/__init__.py diff --git a/src/backend/langflow/interface/vector_store/base.py b/src/backend/base/langflow/interface/vector_store/base.py similarity index 100% rename from src/backend/langflow/interface/vector_store/base.py rename to src/backend/base/langflow/interface/vector_store/base.py diff --git a/src/backend/langflow/template/template/__init__.py b/src/backend/base/langflow/interface/wrappers/__init__.py similarity index 100% rename from src/backend/langflow/template/template/__init__.py rename to src/backend/base/langflow/interface/wrappers/__init__.py diff --git a/src/backend/langflow/interface/wrappers/base.py b/src/backend/base/langflow/interface/wrappers/base.py similarity index 51% rename from src/backend/langflow/interface/wrappers/base.py rename to src/backend/base/langflow/interface/wrappers/base.py index 469559ad9..204a5153e 100644 --- a/src/backend/langflow/interface/wrappers/base.py +++ b/src/backend/base/langflow/interface/wrappers/base.py @@ -1,35 +1,23 @@ -from typing import ClassVar, Dict, List, Optional +from typing import Dict, List, Optional -from langchain_community.utilities import requests, sql_database +from langchain_community.utilities import requests from loguru import logger from langflow.interface.base import LangChainTypeCreator -from langflow.utils.util import build_template_from_class, build_template_from_method +from langflow.utils.util import build_template_from_class class WrapperCreator(LangChainTypeCreator): type_name: str = "wrappers" - from_method_nodes: ClassVar[Dict] = {"SQLDatabase": "from_uri"} - @property def type_to_loader_dict(self) -> Dict: if self.type_dict is None: - self.type_dict = { - wrapper.__name__: wrapper for wrapper in [requests.TextRequestsWrapper, sql_database.SQLDatabase] - } + self.type_dict = {wrapper.__name__: wrapper for wrapper in [requests.TextRequestsWrapper]} return self.type_dict def get_signature(self, name: str) -> Optional[Dict]: try: - if name in self.from_method_nodes: - return build_template_from_method( - name, - type_to_cls_dict=self.type_to_loader_dict, - add_function=True, - method_name=self.from_method_nodes[name], - ) - return build_template_from_class(name, self.type_to_loader_dict) except ValueError as exc: raise ValueError("Wrapper not found") from exc diff --git a/src/backend/langflow/utils/__init__.py b/src/backend/base/langflow/legacy_custom/__init__.py similarity index 100% rename from src/backend/langflow/utils/__init__.py rename to src/backend/base/langflow/legacy_custom/__init__.py diff --git a/src/backend/langflow/custom/customs.py b/src/backend/base/langflow/legacy_custom/customs.py similarity index 94% rename from src/backend/langflow/custom/customs.py rename to src/backend/base/langflow/legacy_custom/customs.py index fb22c56b3..ff69064ff 100644 --- a/src/backend/langflow/custom/customs.py +++ b/src/backend/base/langflow/legacy_custom/customs.py @@ -1,13 +1,12 @@ from langflow.template import frontend_node # These should always be instantiated -CUSTOM_NODES = { +CUSTOM_NODES: dict[str, dict[str, frontend_node.base.FrontendNode]] = { # "prompts": { # "ZeroShotPrompt": frontend_node.prompts.ZeroShotPromptNode(), # }, "tools": { "PythonFunctionTool": frontend_node.tools.PythonFunctionToolNode(), - "PythonFunction": frontend_node.tools.PythonFunctionNode(), "Tool": frontend_node.tools.ToolNode(), }, "agents": { diff --git a/src/backend/base/langflow/load.py b/src/backend/base/langflow/load.py new file mode 100644 index 000000000..1262ac4b9 --- /dev/null +++ b/src/backend/base/langflow/load.py @@ -0,0 +1 @@ +from langflow.processing.load import load_flow_from_json, run_flow_from_json # noqa: F401 diff --git a/src/backend/langflow/main.py b/src/backend/base/langflow/main.py similarity index 74% rename from src/backend/langflow/main.py rename to src/backend/base/langflow/main.py index 0eba10ab1..eff774f57 100644 --- a/src/backend/langflow/main.py +++ b/src/backend/base/langflow/main.py @@ -3,32 +3,42 @@ from pathlib import Path from typing import Optional from urllib.parse import urlencode +import nest_asyncio # type: ignore +import socketio # type: ignore from fastapi import FastAPI, Request from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import FileResponse from fastapi.staticfiles import StaticFiles +from loguru import logger from langflow.api import router +from langflow.initial_setup.setup import create_or_update_starter_projects from langflow.interface.utils import setup_llm_caching from langflow.services.plugins.langfuse_plugin import LangfuseInstance from langflow.services.utils import initialize_services, teardown_services from langflow.utils.logger import configure -@asynccontextmanager -async def lifespan(app: FastAPI): - initialize_services() - setup_llm_caching() - LangfuseInstance.update() - yield - teardown_services() +def get_lifespan(fix_migration=False, socketio_server=None): + @asynccontextmanager + async def lifespan(app: FastAPI): + nest_asyncio.apply() + initialize_services(fix_migration=fix_migration, socketio_server=socketio_server) + setup_llm_caching() + LangfuseInstance.update() + create_or_update_starter_projects() + yield + teardown_services() + + return lifespan def create_app(): """Create the FastAPI app and include the router.""" configure() - + socketio_server = socketio.AsyncServer(async_mode="asgi", cors_allowed_origins="*", logger=True) + lifespan = get_lifespan(socketio_server=socketio_server) app = FastAPI(lifespan=lifespan) origins = ["*"] @@ -56,6 +66,13 @@ def create_app(): app.include_router(router) + app = mount_socketio(app, socketio_server) + + return app + + +def mount_socketio(app: FastAPI, socketio_server: socketio.AsyncServer): + app.mount("/sio", socketio.ASGIApp(socketio_server, socketio_path="")) return app @@ -90,6 +107,7 @@ def get_static_files_dir(): def setup_app(static_files_dir: Optional[Path] = None, backend_only: bool = False) -> FastAPI: """Setup the FastAPI app.""" # get the directory of the current file + logger.info(f"Setting up app with static files directory {static_files_dir}") if not static_files_dir: static_files_dir = get_static_files_dir() @@ -112,6 +130,7 @@ if __name__ == "__main__": host="127.0.0.1", port=7860, workers=get_number_of_workers(), - log_level="debug", + log_level="error", reload=True, + loop="asyncio", ) diff --git a/src/backend/base/langflow/memory.py b/src/backend/base/langflow/memory.py new file mode 100644 index 000000000..aea9b3000 --- /dev/null +++ b/src/backend/base/langflow/memory.py @@ -0,0 +1,100 @@ +from typing import Optional, Union + +from loguru import logger + +from langflow.schema import Record +from langflow.services.deps import get_monitor_service +from langflow.services.monitor.schema import MessageModel + + +def get_messages( + sender: Optional[str] = None, + sender_name: Optional[str] = None, + session_id: Optional[str] = None, + order_by: Optional[str] = "timestamp", + order: Optional[str] = "DESC", + limit: Optional[int] = None, +): + """ + Retrieves messages from the monitor service based on the provided filters. + + Args: + sender (Optional[str]): The sender of the messages (e.g., "Machine" or "User") + sender_name (Optional[str]): The name of the sender. + session_id (Optional[str]): The session ID associated with the messages. + order_by (Optional[str]): The field to order the messages by. Defaults to "timestamp". + limit (Optional[int]): The maximum number of messages to retrieve. + + Returns: + List[Record]: A list of Record objects representing the retrieved messages. + """ + monitor_service = get_monitor_service() + messages_df = monitor_service.get_messages( + sender=sender, + sender_name=sender_name, + session_id=session_id, + order_by=order_by, + limit=limit, + order=order, + ) + + records: list[Record] = [] + # messages_df has a timestamp + # it gets the last 5 messages, for example + # but now they are ordered from most recent to least recent + # so we need to reverse the order + messages_df = messages_df[::-1] if order == "DESC" else messages_df + for row in messages_df.itertuples(): + record = Record( + data={ + "text": row.message, + "sender": row.sender, + "sender_name": row.sender_name, + "session_id": row.session_id, + }, + ) + records.append(record) + + return records + + +def add_messages(records: Union[list[Record], Record]): + """ + Add a message to the monitor service. + """ + try: + monitor_service = get_monitor_service() + + if isinstance(records, Record): + records = [records] + + if not all(isinstance(record, (Record, str)) for record in records): + types = ", ".join([str(type(record)) for record in records]) + raise ValueError(f"The records must be instances of Record. Found: {types}") + + messages: list[MessageModel] = [] + for record in records: + messages.append(MessageModel.from_record(record)) + + for message in messages: + try: + monitor_service.add_message(message) + except Exception as e: + logger.error(f"Error adding message to monitor service: {e}") + logger.exception(e) + raise e + return records + except Exception as e: + logger.exception(e) + raise e + + +def delete_messages(session_id: str): + """ + Delete messages from the monitor service based on the provided session ID. + + Args: + session_id (str): The session ID associated with the messages to delete. + """ + monitor_service = get_monitor_service() + monitor_service.delete_messages(session_id) diff --git a/src/backend/base/langflow/processing/__init__.py b/src/backend/base/langflow/processing/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/processing/base.py b/src/backend/base/langflow/processing/base.py similarity index 90% rename from src/backend/langflow/processing/base.py rename to src/backend/base/langflow/processing/base.py index 41e03e7bc..e11af0a44 100644 --- a/src/backend/langflow/processing/base.py +++ b/src/backend/base/langflow/processing/base.py @@ -4,7 +4,6 @@ from langchain.agents.agent import AgentExecutor from langchain.callbacks.base import BaseCallbackHandler from loguru import logger -from langflow.api.v1.callback import AsyncStreamingLLMCallbackHandler, StreamingLLMCallbackHandler from langflow.processing.process import fix_memory_inputs, format_actions from langflow.services.deps import get_plugins_service @@ -15,11 +14,6 @@ if TYPE_CHECKING: def setup_callbacks(sync, trace_id, **kwargs): """Setup callbacks for langchain object""" callbacks = [] - if sync: - callbacks.append(StreamingLLMCallbackHandler(**kwargs)) - else: - callbacks.append(AsyncStreamingLLMCallbackHandler(**kwargs)) - plugin_service = get_plugins_service() plugin_callbacks = plugin_service.get_callbacks(_id=trace_id) if plugin_callbacks: @@ -70,14 +64,13 @@ async def get_result_and_steps(langchain_object, inputs: Union[dict, str], **kwa except Exception as exc: logger.error(f"Error fixing memory inputs: {exc}") + trace_id = kwargs.pop("session_id", None) try: - trace_id = kwargs.pop("session_id", None) callbacks = setup_callbacks(sync=False, trace_id=trace_id, **kwargs) output = await langchain_object.acall(inputs, callbacks=callbacks) except Exception as exc: # make the error message more informative logger.debug(f"Error: {str(exc)}") - trace_id = kwargs.pop("session_id", None) callbacks = setup_callbacks(sync=True, trace_id=trace_id, **kwargs) output = langchain_object(inputs, callbacks=callbacks) diff --git a/src/backend/base/langflow/processing/load.py b/src/backend/base/langflow/processing/load.py new file mode 100644 index 000000000..43d9ecc78 --- /dev/null +++ b/src/backend/base/langflow/processing/load.py @@ -0,0 +1,67 @@ +import json +from pathlib import Path +from typing import List, Optional, Union + +from langflow.graph import Graph +from langflow.graph.schema import RunOutputs +from langflow.processing.process import process_tweaks, run_graph + + +def load_flow_from_json(flow: Union[Path, str, dict], tweaks: Optional[dict] = None) -> Graph: + """ + Load flow from a JSON file or a JSON object. + + :param flow: JSON file path or JSON object + :param tweaks: Optional tweaks to be processed + :param build: If True, build the graph, otherwise return the graph object + :return: Langchain object or Graph object depending on the build parameter + """ + # If input is a file path, load JSON from the file + if isinstance(flow, (str, Path)): + with open(flow, "r", encoding="utf-8") as f: + flow_graph = json.load(f) + # If input is a dictionary, assume it's a JSON object + elif isinstance(flow, dict): + flow_graph = flow + else: + raise TypeError("Input must be either a file path (str) or a JSON object (dict)") + + graph_data = flow_graph["data"] + if tweaks is not None: + graph_data = process_tweaks(graph_data, tweaks) + + graph = Graph.from_payload(graph_data) + return graph + + +def run_flow_from_json( + flow: Union[Path, str, dict], + input_value: str, + tweaks: Optional[dict] = None, + input_type: str = "chat", + output_type: str = "chat", + output_component: Optional[str] = None, +) -> List[RunOutputs]: + """ + Runs a JSON flow by loading it from a file or dictionary and executing it with the given input value. + + Args: + flow (Union[Path, str, dict]): The path to the JSON file, or the JSON dictionary representing the flow. + input_value (str): The input value to be processed by the flow. + tweaks (Optional[dict], optional): Optional tweaks to be applied to the flow. Defaults to None. + input_type (str, optional): The type of the input value. Defaults to "chat". + output_type (str, optional): The type of the output value. Defaults to "chat". + output_component (Optional[str], optional): The specific output component to retrieve. Defaults to None. + + Returns: + None: The result of running the flow. + """ + graph = load_flow_from_json(flow, tweaks) + result = run_graph( + graph=graph, + input_value=input_value, + input_type=input_type, + output_type=output_type, + output_component=output_component, + ) + return result diff --git a/src/backend/base/langflow/processing/process.py b/src/backend/base/langflow/processing/process.py new file mode 100644 index 000000000..c05163933 --- /dev/null +++ b/src/backend/base/langflow/processing/process.py @@ -0,0 +1,302 @@ +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union + +from langchain.agents import AgentExecutor +from langchain.schema import AgentAction +from loguru import logger +from pydantic import BaseModel + +from langflow.graph.graph.base import Graph +from langflow.graph.schema import RunOutputs +from langflow.graph.vertex.base import Vertex +from langflow.interface.run import get_memory_key, update_memory_keys +from langflow.schema.schema import INPUT_FIELD_NAME +from langflow.services.session.service import SessionService + +if TYPE_CHECKING: + from langflow.api.v1.schemas import InputValueRequest, Tweaks + + +def fix_memory_inputs(langchain_object): + """ + Given a LangChain object, this function checks if it has a memory attribute and if that memory key exists in the + object's input variables. If so, it does nothing. Otherwise, it gets a possible new memory key using the + get_memory_key function and updates the memory keys using the update_memory_keys function. + """ + if not hasattr(langchain_object, "memory") or langchain_object.memory is None: + return + try: + if ( + hasattr(langchain_object.memory, "memory_key") + and langchain_object.memory.memory_key in langchain_object.input_variables + ): + return + except AttributeError: + input_variables = ( + langchain_object.prompt.input_variables + if hasattr(langchain_object, "prompt") + else langchain_object.input_keys + ) + if langchain_object.memory.memory_key in input_variables: + return + + possible_new_mem_key = get_memory_key(langchain_object) + if possible_new_mem_key is not None: + update_memory_keys(langchain_object, possible_new_mem_key) + + +def format_actions(actions: List[Tuple[AgentAction, str]]) -> str: + """Format a list of (AgentAction, answer) tuples into a string.""" + output = [] + for action, answer in actions: + log = action.log + tool = action.tool + tool_input = action.tool_input + output.append(f"Log: {log}") + if "Action" not in log and "Action Input" not in log: + output.append(f"Tool: {tool}") + output.append(f"Tool Input: {tool_input}") + output.append(f"Answer: {answer}") + output.append("") # Add a blank line + return "\n".join(output) + + +def get_result_and_thought(langchain_object: Any, inputs: dict): + """Get result and thought from extracted json""" + try: + if hasattr(langchain_object, "verbose"): + langchain_object.verbose = True + + if hasattr(langchain_object, "return_intermediate_steps"): + langchain_object.return_intermediate_steps = False + + try: + if not isinstance(langchain_object, AgentExecutor): + fix_memory_inputs(langchain_object) + except Exception as exc: + logger.error(f"Error fixing memory inputs: {exc}") + + try: + output = langchain_object(inputs, return_only_outputs=True) + except ValueError as exc: + # make the error message more informative + logger.debug(f"Error: {str(exc)}") + output = langchain_object.run(inputs) + + except Exception as exc: + raise ValueError(f"Error: {str(exc)}") from exc + return output + + +def get_input_str_if_only_one_input(inputs: dict) -> Optional[str]: + """Get input string if only one input is provided""" + return list(inputs.values())[0] if len(inputs) == 1 else None + + +def process_inputs( + inputs: Optional[Union[dict, List[dict]]] = None, + artifacts: Optional[Dict[str, Any]] = None, +) -> Union[dict, List[dict]]: + if inputs is None: + inputs = {} + if artifacts is None: + artifacts = {} + + if isinstance(inputs, dict): + inputs = update_inputs_dict(inputs, artifacts) + elif isinstance(inputs, List): + inputs = [update_inputs_dict(inp, artifacts) for inp in inputs] + + return inputs + + +def update_inputs_dict(inputs: dict, artifacts: Dict[str, Any]) -> dict: + for key, value in artifacts.items(): + if key == "repr": + continue + elif key not in inputs or not inputs[key]: + inputs[key] = value + + return inputs + + +class Result(BaseModel): + result: Any + session_id: str + + +async def run_graph_internal( + graph: "Graph", + flow_id: str, + stream: bool = False, + session_id: Optional[str] = None, + inputs: Optional[List["InputValueRequest"]] = None, + outputs: Optional[List[str]] = None, + artifacts: Optional[Dict[str, Any]] = None, + session_service: Optional[SessionService] = None, +) -> tuple[List[RunOutputs], str]: + """Run the graph and generate the result""" + inputs = inputs or [] + graph_data = graph._graph_data + if session_id is None and session_service is not None: + session_id_str = session_service.generate_key(session_id=flow_id, data_graph=graph_data) + elif session_id is not None: + session_id_str = session_id + else: + raise ValueError("session_id or session_service must be provided") + components = [] + inputs_list = [] + types = [] + for input_value_request in inputs: + if input_value_request.input_value is None: + logger.warning("InputValueRequest input_value cannot be None, defaulting to an empty string.") + input_value_request.input_value = "" + components.append(input_value_request.components or []) + inputs_list.append({INPUT_FIELD_NAME: input_value_request.input_value}) + types.append(input_value_request.type) + + run_outputs = await graph.arun( + inputs_list, + components, + types, + outputs or [], + stream=stream, + session_id=session_id_str or "", + ) + if session_id_str and session_service: + await session_service.update_session(session_id_str, (graph, artifacts)) + return run_outputs, session_id_str + + +def run_graph( + graph: "Graph", + input_value: str, + input_type: str, + output_type: str, + output_component: Optional[str] = None, +) -> List[RunOutputs]: + """ + Runs the given Langflow Graph with the specified input and returns the outputs. + + Args: + graph (Graph): The graph to be executed. + input_value (str): The input value to be passed to the graph. + input_type (str): The type of the input value. + output_type (str): The type of the desired output. + output_component (Optional[str], optional): The specific output component to retrieve. Defaults to None. + + Returns: + List[RunOutputs]: A list of RunOutputs objects representing the outputs of the graph. + + """ + inputs = [InputValueRequest(components=[], input_value=input_value, type=input_type)] + if output_component: + outputs = [output_component] + else: + outputs = [ + vertex.id + for vertex in graph.vertices + if output_type == "debug" + or (vertex.is_output and (output_type == "any" or output_type in vertex.id.lower())) + ] + components = [] + inputs_list = [] + types = [] + for input_value_request in inputs: + if input_value_request.input_value is None: + logger.warning("InputValueRequest input_value cannot be None, defaulting to an empty string.") + input_value_request.input_value = "" + components.append(input_value_request.components or []) + inputs_list.append({INPUT_FIELD_NAME: input_value_request.input_value}) + types.append(input_value_request.type) + run_outputs = graph.run( + inputs_list, + components, + types, + outputs or [], + stream=False, + session_id="", + ) + return run_outputs + + +def validate_input( + graph_data: Dict[str, Any], tweaks: Union["Tweaks", Dict[str, Dict[str, Any]]] +) -> List[Dict[str, Any]]: + if not isinstance(graph_data, dict) or not isinstance(tweaks, dict): + raise ValueError("graph_data and tweaks should be dictionaries") + + nodes = graph_data.get("data", {}).get("nodes") or graph_data.get("nodes") + + if not isinstance(nodes, list): + raise ValueError("graph_data should contain a list of nodes under 'data' key or directly under 'nodes' key") + + return nodes + + +def apply_tweaks(node: Dict[str, Any], node_tweaks: Dict[str, Any]) -> None: + template_data = node.get("data", {}).get("node", {}).get("template") + + if not isinstance(template_data, dict): + logger.warning(f"Template data for node {node.get('id')} should be a dictionary") + return + + for tweak_name, tweak_value in node_tweaks.items(): + if tweak_name not in template_data: + logger.warning(f"Node {node.get('id')} does not have a tweak named {tweak_name}") + continue + if tweak_name in template_data: + key = tweak_name if tweak_name == "file_path" else "value" + template_data[tweak_name][key] = tweak_value + + +def apply_tweaks_on_vertex(vertex: Vertex, node_tweaks: Dict[str, Any]) -> None: + for tweak_name, tweak_value in node_tweaks.items(): + if tweak_name and tweak_value and tweak_name in vertex.params: + vertex.params[tweak_name] = tweak_value + + +def process_tweaks(graph_data: Dict[str, Any], tweaks: Union["Tweaks", Dict[str, Dict[str, Any]]]) -> Dict[str, Any]: + """ + This function is used to tweak the graph data using the node id and the tweaks dict. + + :param graph_data: The dictionary containing the graph data. It must contain a 'data' key with + 'nodes' as its child or directly contain 'nodes' key. Each node should have an 'id' and 'data'. + :param tweaks: The dictionary containing the tweaks. The keys can be the node id or the name of the tweak. + The values can be a dictionary containing the tweaks for the node or the value of the tweak. + :return: The modified graph_data dictionary. + + :raises ValueError: If the input is not in the expected format. + """ + if not isinstance(tweaks, dict): + tweaks = tweaks.model_dump() + + nodes = validate_input(graph_data, tweaks) + nodes_map = {node.get("id"): node for node in nodes} + nodes_display_name_map = {node.get("data", {}).get("node", {}).get("display_name"): node for node in nodes} + + all_nodes_tweaks = {} + for key, value in tweaks.items(): + if isinstance(value, dict): + if node := nodes_map.get(key): + apply_tweaks(node, value) + elif node := nodes_display_name_map.get(key): + apply_tweaks(node, value) + else: + all_nodes_tweaks[key] = value + if all_nodes_tweaks: + for node in nodes: + apply_tweaks(node, all_nodes_tweaks) + + return graph_data + + +def process_tweaks_on_graph(graph: Graph, tweaks: Dict[str, Dict[str, Any]]): + for vertex in graph.vertices: + if isinstance(vertex, Vertex) and isinstance(vertex.id, str): + node_id = vertex.id + if node_tweaks := tweaks.get(node_id): + apply_tweaks_on_vertex(vertex, node_tweaks) + else: + logger.warning("Each node should be a Vertex with an 'id' attribute of type str") + + return graph diff --git a/src/backend/base/langflow/py.typed b/src/backend/base/langflow/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/base/langflow/schema/__init__.py b/src/backend/base/langflow/schema/__init__.py new file mode 100644 index 000000000..14230578c --- /dev/null +++ b/src/backend/base/langflow/schema/__init__.py @@ -0,0 +1,4 @@ +from .dotdict import dotdict +from .schema import Record + +__all__ = ["Record", "dotdict"] diff --git a/src/backend/base/langflow/schema/dotdict.py b/src/backend/base/langflow/schema/dotdict.py new file mode 100644 index 000000000..f85c928bb --- /dev/null +++ b/src/backend/base/langflow/schema/dotdict.py @@ -0,0 +1,71 @@ +class dotdict(dict): + """ + dotdict allows accessing dictionary elements using dot notation (e.g., dict.key instead of dict['key']). + It automatically converts nested dictionaries into dotdict instances, enabling dot notation on them as well. + + Note: + - Only keys that are valid attribute names (e.g., strings that could be variable names) are accessible via dot notation. + - Keys which are not valid Python attribute names or collide with the dict method names (like 'items', 'keys') + should be accessed using the traditional dict['key'] notation. + """ + + def __getattr__(self, attr): + """ + Override dot access to behave like dictionary lookup. Automatically convert nested dicts to dotdicts. + + Args: + attr (str): Attribute to access. + + Returns: + The value associated with 'attr' in the dictionary, converted to dotdict if it is a dict. + + Raises: + AttributeError: If the attribute is not found in the dictionary. + """ + try: + value = self[attr] + if isinstance(value, dict) and not isinstance(value, dotdict): + value = dotdict(value) + self[attr] = value # Update self to nest dotdict for future accesses + return value + except KeyError: + raise AttributeError(f"'dotdict' object has no attribute '{attr}'") + + def __setattr__(self, key, value): + """ + Override attribute setting to work as dictionary item assignment. + + Args: + key (str): The key under which to store the value. + value: The value to store in the dictionary. + """ + if isinstance(value, dict) and not isinstance(value, dotdict): + value = dotdict(value) + self[key] = value + + def __delattr__(self, key): + """ + Override attribute deletion to work as dictionary item deletion. + + Args: + key (str): The key of the item to delete from the dictionary. + + Raises: + AttributeError: If the key is not found in the dictionary. + """ + try: + del self[key] + except KeyError: + raise AttributeError(f"'dotdict' object has no attribute '{key}'") + + def __missing__(self, key): + """ + Handle missing keys by returning an empty dotdict. This allows chaining access without raising KeyError. + + Args: + key: The missing key. + + Returns: + An empty dotdict instance for the given missing key. + """ + return dotdict() diff --git a/src/backend/base/langflow/schema/schema.py b/src/backend/base/langflow/schema/schema.py new file mode 100644 index 000000000..a48e72339 --- /dev/null +++ b/src/backend/base/langflow/schema/schema.py @@ -0,0 +1,150 @@ +import copy +from typing import Literal, Optional + +from langchain_core.documents import Document +from pydantic import BaseModel, model_validator + + +class Record(BaseModel): + """ + Represents a record with text and optional data. + + Attributes: + data (dict, optional): Additional data associated with the record. + """ + + text_key: str = "text" + data: dict = {} + default_value: Optional[str] = "" + + @model_validator(mode="before") + def validate_data(cls, values): + if not values.get("data"): + values["data"] = {} + # Any other keyword should be added to the data dictionary + for key in values: + if key not in values["data"] and key not in {"text_key", "data", "default_value"}: + values["data"][key] = values[key] + return values + + def get_text(self): + """ + Retrieves the text value from the data dictionary. + + If the text key is present in the data dictionary, the corresponding value is returned. + Otherwise, the default value is returned. + + Returns: + The text value from the data dictionary or the default value. + """ + return self.data.get(self.text_key, self.default_value) + + @classmethod + def from_document(cls, document: Document) -> "Record": + """ + Converts a Document to a Record. + + Args: + document (Document): The Document to convert. + + Returns: + Record: The converted Record. + """ + data = document.metadata + data["text"] = document.page_content + return cls(data=data, text_key="text") + + def __add__(self, other: "Record") -> "Record": + """ + Combines the data of two records by attempting to add values for overlapping keys + for all types that support the addition operation. Falls back to the value from 'other' + record when addition is not supported. + """ + combined_data = self.data.copy() + for key, value in other.data.items(): + # If the key exists in both records and both values support the addition operation + if key in combined_data: + try: + combined_data[key] += value + except TypeError: + # Fallback: Use the value from 'other' record if addition is not supported + combined_data[key] = value + else: + # If the key is not in the first record, simply add it + combined_data[key] = value + + return Record(data=combined_data) + + def to_lc_document(self) -> Document: + """ + Converts the Record to a Document. + + Returns: + Document: The converted Document. + """ + text = self.data.pop(self.text_key, self.default_value) + return Document(page_content=text, metadata=self.data) + + def __getattr__(self, key): + """ + Allows attribute-like access to the data dictionary. + """ + try: + if key.startswith("__"): + return self.__getattribute__(key) + if key in {"data", "text_key"} or key.startswith("_"): + return super().__getattr__(key) + + return self.data.get(key, self.default_value) + except KeyError: + # Fallback to default behavior to raise AttributeError for undefined attributes + raise AttributeError(f"'{type(self).__name__}' object has no attribute '{key}'") + + def __setattr__(self, key, value): + """ + Allows attribute-like setting of values in the data dictionary, + while still allowing direct assignment to class attributes. + """ + if key in {"data", "text_key"} or key.startswith("_"): + super().__setattr__(key, value) + else: + self.data[key] = value + + def __delattr__(self, key): + """ + Allows attribute-like deletion from the data dictionary. + """ + if key in {"data", "text_key"} or key.startswith("_"): + super().__delattr__(key) + else: + del self.data[key] + + def __deepcopy__(self, memo): + """ + Custom deepcopy implementation to handle copying of the Record object. + """ + # Create a new Record object with a deep copy of the data dictionary + return Record(data=copy.deepcopy(self.data, memo), text_key=self.text_key, default_value=self.default_value) + + def __str__(self) -> str: + """ + Returns a string representation of the Record, including text and data. + """ + # Assuming a method to dump model data as JSON string exists. + # If it doesn't, you might need to implement it or use json.dumps() directly. + # build the string considering all keys in the data dictionary + prefix = "Record(" + suffix = ")" + text = f"text_key={self.text_key}, " + text += ", ".join([f"{k}={v}" for k, v in self.data.items()]) + return prefix + text + suffix + + # check which attributes the Record has by checking the keys in the data dictionary + def __dir__(self): + return super().__dir__() + list(self.data.keys()) + + +INPUT_FIELD_NAME = "input_value" + +InputType = Literal["chat", "text", "any"] +OutputType = Literal["chat", "text", "any", "debug"] diff --git a/src/backend/langflow/server.py b/src/backend/base/langflow/server.py similarity index 63% rename from src/backend/langflow/server.py rename to src/backend/base/langflow/server.py index 9fe432744..b7e86934d 100644 --- a/src/backend/langflow/server.py +++ b/src/backend/base/langflow/server.py @@ -1,11 +1,19 @@ +import os + from gunicorn.app.base import BaseApplication # type: ignore +from uvicorn.workers import UvicornWorker + + +class LangflowUvicornWorker(UvicornWorker): + CONFIG_KWARGS = {"loop": "asyncio"} class LangflowApplication(BaseApplication): def __init__(self, app, options=None): self.options = options or {} - self.options["worker_class"] = "uvicorn.workers.UvicornWorker" + self.options["worker_class"] = "langflow.server.LangflowUvicornWorker" + self.options["loglevel"] = os.getenv("LANGFLOW_LOG_LEVEL", "error").lower() self.application = app super().__init__() diff --git a/src/backend/langflow/services/__init__.py b/src/backend/base/langflow/services/__init__.py similarity index 100% rename from src/backend/langflow/services/__init__.py rename to src/backend/base/langflow/services/__init__.py diff --git a/src/backend/base/langflow/services/auth/__init__.py b/src/backend/base/langflow/services/auth/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/services/auth/factory.py b/src/backend/base/langflow/services/auth/factory.py similarity index 100% rename from src/backend/langflow/services/auth/factory.py rename to src/backend/base/langflow/services/auth/factory.py index b44019289..63d5d2a6d 100644 --- a/src/backend/langflow/services/auth/factory.py +++ b/src/backend/base/langflow/services/auth/factory.py @@ -1,5 +1,5 @@ -from langflow.services.factory import ServiceFactory from langflow.services.auth.service import AuthService +from langflow.services.factory import ServiceFactory class AuthServiceFactory(ServiceFactory): diff --git a/src/backend/langflow/services/auth/service.py b/src/backend/base/langflow/services/auth/service.py similarity index 99% rename from src/backend/langflow/services/auth/service.py rename to src/backend/base/langflow/services/auth/service.py index 8beef8dcb..5c3a89af6 100644 --- a/src/backend/langflow/services/auth/service.py +++ b/src/backend/base/langflow/services/auth/service.py @@ -1,6 +1,7 @@ -from langflow.services.base import Service from typing import TYPE_CHECKING +from langflow.services.base import Service + if TYPE_CHECKING: from langflow.services.settings.service import SettingsService diff --git a/src/backend/langflow/services/auth/utils.py b/src/backend/base/langflow/services/auth/utils.py similarity index 94% rename from src/backend/langflow/services/auth/utils.py rename to src/backend/base/langflow/services/auth/utils.py index cc0120de9..d89330856 100644 --- a/src/backend/langflow/services/auth/utils.py +++ b/src/backend/base/langflow/services/auth/utils.py @@ -103,13 +103,13 @@ async def get_current_user_by_jwt( if isinstance(token, Coroutine): token = await token - if settings_service.auth_settings.SECRET_KEY is None: + if settings_service.auth_settings.SECRET_KEY.get_secret_value() is None: raise credentials_exception try: payload = jwt.decode( token, - settings_service.auth_settings.SECRET_KEY, + settings_service.auth_settings.SECRET_KEY.get_secret_value(), algorithms=[settings_service.auth_settings.ALGORITHM], ) user_id: UUID = payload.get("sub") # type: ignore @@ -179,7 +179,7 @@ def create_token(data: dict, expires_delta: timedelta): return jwt.encode( to_encode, - settings_service.auth_settings.SECRET_KEY, + settings_service.auth_settings.SECRET_KEY.get_secret_value(), algorithm=settings_service.auth_settings.ALGORITHM, ) @@ -254,13 +254,13 @@ def get_user_id_from_token(token: str) -> UUID: def create_user_tokens(user_id: UUID, db: Session = Depends(get_session), update_last_login: bool = False) -> dict: settings_service = get_settings_service() - access_token_expires = timedelta(minutes=settings_service.auth_settings.ACCESS_TOKEN_EXPIRE_MINUTES) + access_token_expires = timedelta(seconds=settings_service.auth_settings.ACCESS_TOKEN_EXPIRE_SECONDS) access_token = create_token( data={"sub": str(user_id)}, expires_delta=access_token_expires, ) - refresh_token_expires = timedelta(minutes=settings_service.auth_settings.REFRESH_TOKEN_EXPIRE_MINUTES) + refresh_token_expires = timedelta(seconds=settings_service.auth_settings.REFRESH_TOKEN_EXPIRE_SECONDS) refresh_token = create_token( data={"sub": str(user_id), "type": "rf"}, expires_delta=refresh_token_expires, @@ -283,7 +283,7 @@ def create_refresh_token(refresh_token: str, db: Session = Depends(get_session)) try: payload = jwt.decode( refresh_token, - settings_service.auth_settings.SECRET_KEY, + settings_service.auth_settings.SECRET_KEY.get_secret_value(), algorithms=[settings_service.auth_settings.ALGORITHM], ) user_id: UUID = payload.get("sub") # type: ignore @@ -322,8 +322,8 @@ def add_padding(s): def get_fernet(settings_service=Depends(get_settings_service)): - SECRET_KEY = settings_service.auth_settings.SECRET_KEY - # It's important that your secret key is 32 url-safe base64-encoded bytes + SECRET_KEY = settings_service.auth_settings.SECRET_KEY.get_secret_value() + # It's important that your secret key is 32 url-safe base64-encoded byte padded_secret_key = add_padding(SECRET_KEY) fernet = Fernet(padded_secret_key) return fernet diff --git a/src/backend/base/langflow/services/base.py b/src/backend/base/langflow/services/base.py new file mode 100644 index 000000000..594d697bc --- /dev/null +++ b/src/backend/base/langflow/services/base.py @@ -0,0 +1,29 @@ +from abc import ABC + + +class Service(ABC): + name: str + ready: bool = False + + def get_schema(self): + """Build a dictionary listing all methods, their parameters, types, return types and documentation.""" + + schema = {} + ignore = ["teardown", "set_ready"] + for method in dir(self): + if method.startswith("_") or method in ignore: + continue + func = getattr(self, method) + schema[method] = { + "name": method, + "parameters": func.__annotations__, + "return": func.__annotations__.get("return"), + "documentation": func.__doc__, + } + return schema + + def teardown(self): + pass + + def set_ready(self): + self.ready = True diff --git a/src/backend/base/langflow/services/cache/__init__.py b/src/backend/base/langflow/services/cache/__init__.py new file mode 100644 index 000000000..48fb9837a --- /dev/null +++ b/src/backend/base/langflow/services/cache/__init__.py @@ -0,0 +1,12 @@ +from langflow.services.cache.service import AsyncInMemoryCache, CacheService, RedisCache, ThreadingInMemoryCache + +from . import factory, service + +__all__ = [ + "factory", + "service", + "ThreadingInMemoryCache", + "AsyncInMemoryCache", + "CacheService", + "RedisCache", +] diff --git a/src/backend/base/langflow/services/cache/base.py b/src/backend/base/langflow/services/cache/base.py new file mode 100644 index 000000000..3c484934b --- /dev/null +++ b/src/backend/base/langflow/services/cache/base.py @@ -0,0 +1,168 @@ +import abc +import asyncio +import threading +from typing import Optional + +from langflow.services.base import Service + + +class CacheService(Service): + """ + Abstract base class for a cache. + """ + + name = "cache_service" + + @abc.abstractmethod + def get(self, key, lock: Optional[threading.Lock] = None): + """ + Retrieve an item from the cache. + + Args: + key: The key of the item to retrieve. + + Returns: + The value associated with the key, or None if the key is not found. + """ + + @abc.abstractmethod + def set(self, key, value, lock: Optional[threading.Lock] = None): + """ + Add an item to the cache. + + Args: + key: The key of the item. + value: The value to cache. + """ + + @abc.abstractmethod + def upsert(self, key, value, lock: Optional[threading.Lock] = None): + """ + Add an item to the cache if it doesn't exist, or update it if it does. + + Args: + key: The key of the item. + value: The value to cache. + """ + + @abc.abstractmethod + def delete(self, key, lock: Optional[threading.Lock] = None): + """ + Remove an item from the cache. + + Args: + key: The key of the item to remove. + """ + + @abc.abstractmethod + def clear(self, lock: Optional[threading.Lock] = None): + """ + Clear all items from the cache. + """ + + @abc.abstractmethod + def __contains__(self, key): + """ + Check if the key is in the cache. + + Args: + key: The key of the item to check. + + Returns: + True if the key is in the cache, False otherwise. + """ + + @abc.abstractmethod + def __getitem__(self, key): + """ + Retrieve an item from the cache using the square bracket notation. + + Args: + key: The key of the item to retrieve. + """ + + @abc.abstractmethod + def __setitem__(self, key, value): + """ + Add an item to the cache using the square bracket notation. + + Args: + key: The key of the item. + value: The value to cache. + """ + + @abc.abstractmethod + def __delitem__(self, key): + """ + Remove an item from the cache using the square bracket notation. + + Args: + key: The key of the item to remove. + """ + + +class AsyncBaseCacheService(Service): + """ + Abstract base class for a async cache. + """ + + name = "cache_service" + + @abc.abstractmethod + async def get(self, key, lock: Optional[asyncio.Lock] = None): + """ + Retrieve an item from the cache. + + Args: + key: The key of the item to retrieve. + + Returns: + The value associated with the key, or None if the key is not found. + """ + + @abc.abstractmethod + async def set(self, key, value, lock: Optional[asyncio.Lock] = None): + """ + Add an item to the cache. + + Args: + key: The key of the item. + value: The value to cache. + """ + + @abc.abstractmethod + async def upsert(self, key, value, lock: Optional[asyncio.Lock] = None): + """ + Add an item to the cache if it doesn't exist, or update it if it does. + + Args: + key: The key of the item. + value: The value to cache. + """ + + @abc.abstractmethod + async def delete(self, key, lock: Optional[asyncio.Lock] = None): + """ + Remove an item from the cache. + + Args: + key: The key of the item to remove. + """ + + @abc.abstractmethod + async def clear(self, lock: Optional[asyncio.Lock] = None): + """ + Clear all items from the cache. + """ + + @abc.abstractmethod + def __contains__(self, key): + """ + Check if the key is in the cache. + + Args: + key: The key of the item to check. + + Returns: + True if the key is in the cache, False otherwise. + """ diff --git a/src/backend/langflow/services/cache/factory.py b/src/backend/base/langflow/services/cache/factory.py similarity index 78% rename from src/backend/langflow/services/cache/factory.py rename to src/backend/base/langflow/services/cache/factory.py index 145f4e653..a64b47f3a 100644 --- a/src/backend/langflow/services/cache/factory.py +++ b/src/backend/base/langflow/services/cache/factory.py @@ -1,6 +1,6 @@ from typing import TYPE_CHECKING -from langflow.services.cache.service import BaseCacheService, InMemoryCache, RedisCache +from langflow.services.cache.service import AsyncInMemoryCache, CacheService, RedisCache, ThreadingInMemoryCache from langflow.services.factory import ServiceFactory from langflow.utils.logger import logger @@ -10,7 +10,7 @@ if TYPE_CHECKING: class CacheServiceFactory(ServiceFactory): def __init__(self): - super().__init__(BaseCacheService) + super().__init__(CacheService) def create(self, settings_service: "SettingsService"): # Here you would have logic to create and configure a CacheService @@ -29,7 +29,9 @@ class CacheServiceFactory(ServiceFactory): logger.debug("Redis cache is connected") return redis_cache logger.warning("Redis cache is not connected, falling back to in-memory cache") - return InMemoryCache() + return ThreadingInMemoryCache() elif settings_service.settings.CACHE_TYPE == "memory": - return InMemoryCache() + return ThreadingInMemoryCache() + elif settings_service.settings.CACHE_TYPE == "async": + return AsyncInMemoryCache() diff --git a/src/backend/langflow/services/cache/service.py b/src/backend/base/langflow/services/cache/service.py similarity index 73% rename from src/backend/langflow/services/cache/service.py rename to src/backend/base/langflow/services/cache/service.py index ced345851..4ae4dc540 100644 --- a/src/backend/langflow/services/cache/service.py +++ b/src/backend/base/langflow/services/cache/service.py @@ -1,16 +1,17 @@ +import asyncio import pickle import threading import time from collections import OrderedDict +from typing import Optional from loguru import logger from langflow.services.base import Service -from langflow.services.cache.base import BaseCacheService +from langflow.services.cache.base import AsyncBaseCacheService, CacheService -class InMemoryCache(BaseCacheService, Service): - +class ThreadingInMemoryCache(CacheService, Service): """ A simple in-memory cache using an OrderedDict. @@ -49,7 +50,7 @@ class InMemoryCache(BaseCacheService, Service): self.max_size = max_size self.expiration_time = expiration_time - def get(self, key): + def get(self, key, lock: Optional[threading.Lock] = None): """ Retrieve an item from the cache. @@ -59,7 +60,7 @@ class InMemoryCache(BaseCacheService, Service): Returns: The value associated with the key, or None if the key is not found or the item has expired. """ - with self._lock: + with lock or self._lock: return self._get_without_lock(key) def _get_without_lock(self, key): @@ -80,7 +81,7 @@ class InMemoryCache(BaseCacheService, Service): self.delete(key) return None - def set(self, key, value, pickle=False): + def set(self, key, value, lock: Optional[threading.Lock] = None): """ Add an item to the cache. @@ -90,7 +91,7 @@ class InMemoryCache(BaseCacheService, Service): key: The key of the item. value: The value to cache. """ - with self._lock: + with lock or self._lock: if key in self._cache: # Remove existing key before re-inserting to update order self.delete(key) @@ -98,12 +99,10 @@ class InMemoryCache(BaseCacheService, Service): # Remove least recently used item self._cache.popitem(last=False) # pickle locally to mimic Redis - if pickle: - value = pickle.dumps(value) self._cache[key] = {"value": value, "time": time.time()} - def upsert(self, key, value): + def upsert(self, key, value, lock: Optional[threading.Lock] = None): """ Inserts or updates a value in the cache. If the existing value and the new value are both dictionaries, they are merged. @@ -112,7 +111,7 @@ class InMemoryCache(BaseCacheService, Service): key: The key of the item. value: The value to insert or update. """ - with self._lock: + with lock or self._lock: existing_value = self._get_without_lock(key) if existing_value is not None and isinstance(existing_value, dict) and isinstance(value, dict): existing_value.update(value) @@ -120,7 +119,7 @@ class InMemoryCache(BaseCacheService, Service): self.set(key, value) - def get_or_set(self, key, value): + def get_or_set(self, key, value, lock: Optional[threading.Lock] = None): """ Retrieve an item from the cache. If the item does not exist, set it with the provided value. @@ -132,27 +131,27 @@ class InMemoryCache(BaseCacheService, Service): Returns: The cached value associated with the key. """ - with self._lock: + with lock or self._lock: if key in self._cache: return self.get(key) self.set(key, value) return value - def delete(self, key): + def delete(self, key, lock: Optional[threading.Lock] = None): """ Remove an item from the cache. Args: key: The key of the item to remove. """ - with self._lock: + with lock or self._lock: self._cache.pop(key, None) - def clear(self): + def clear(self, lock: Optional[threading.Lock] = None): """ Clear all items from the cache. """ - with self._lock: + with lock or self._lock: self._cache.clear() def __contains__(self, key): @@ -180,7 +179,7 @@ class InMemoryCache(BaseCacheService, Service): return f"InMemoryCache(max_size={self.max_size}, expiration_time={self.expiration_time})" -class RedisCache(BaseCacheService, Service): +class RedisCache(CacheService): """ A Redis-based cache implementation. @@ -323,3 +322,85 @@ class RedisCache(BaseCacheService, Service): def __repr__(self): """Return a string representation of the RedisCache instance.""" return f"RedisCache(expiration_time={self.expiration_time})" + + +class AsyncInMemoryCache(AsyncBaseCacheService, Service): + def __init__(self, max_size=None, expiration_time=3600): + self.cache = OrderedDict() + + self.lock = asyncio.Lock() + self.max_size = max_size + self.expiration_time = expiration_time + + async def get(self, key, lock: Optional[asyncio.Lock] = None): + if not lock: + async with self.lock: + return await self._get(key) + else: + return await self._get(key) + + async def _get(self, key): + item = self.cache.get(key, None) + if item and (time.time() - item["time"] < self.expiration_time): + self.cache.move_to_end(key) + return pickle.loads(item["value"]) if isinstance(item["value"], bytes) else item["value"] + if item: + await self.delete(key) + return None + + async def set(self, key, value, lock: Optional[asyncio.Lock] = None): + if not lock: + async with self.lock: + await self._set( + key, + value, + ) + else: + await self._set( + key, + value, + ) + + async def _set(self, key, value): + if self.max_size and len(self.cache) >= self.max_size: + self.cache.popitem(last=False) + self.cache[key] = {"value": value, "time": time.time()} + self.cache.move_to_end(key) + + async def delete(self, key, lock: Optional[asyncio.Lock] = None): + if not lock: + async with self.lock: + await self._delete(key) + else: + await self._delete(key) + + async def _delete(self, key): + if key in self.cache: + del self.cache[key] + + async def clear(self, lock: Optional[asyncio.Lock] = None): + if not lock: + async with self.lock: + await self._clear() + else: + await self._clear() + + async def _clear(self): + self.cache.clear() + + async def upsert(self, key, value, lock: Optional[asyncio.Lock] = None): + if not lock: + async with self.lock: + await self._upsert(key, value) + else: + await self._upsert(key, value) + + async def _upsert(self, key, value): + existing_value = await self.get(key) + if existing_value is not None and isinstance(existing_value, dict) and isinstance(value, dict): + existing_value.update(value) + value = existing_value + await self.set(key, value) + + def __contains__(self, key): + return key in self.cache diff --git a/src/backend/langflow/services/cache/utils.py b/src/backend/base/langflow/services/cache/utils.py similarity index 100% rename from src/backend/langflow/services/cache/utils.py rename to src/backend/base/langflow/services/cache/utils.py diff --git a/src/backend/base/langflow/services/chat/__init__.py b/src/backend/base/langflow/services/chat/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/services/chat/cache.py b/src/backend/base/langflow/services/chat/cache.py similarity index 99% rename from src/backend/langflow/services/chat/cache.py rename to src/backend/base/langflow/services/chat/cache.py index f6247b540..959ea877c 100644 --- a/src/backend/langflow/services/chat/cache.py +++ b/src/backend/base/langflow/services/chat/cache.py @@ -1,10 +1,11 @@ from contextlib import contextmanager from typing import Any, Awaitable, Callable, List, Optional -from langflow.services.base import Service import pandas as pd from PIL import Image +from langflow.services.base import Service + class Subject: """Base class for implementing the observer pattern.""" diff --git a/src/backend/langflow/services/chat/config.py b/src/backend/base/langflow/services/chat/config.py similarity index 100% rename from src/backend/langflow/services/chat/config.py rename to src/backend/base/langflow/services/chat/config.py diff --git a/src/backend/langflow/services/chat/factory.py b/src/backend/base/langflow/services/chat/factory.py similarity index 100% rename from src/backend/langflow/services/chat/factory.py rename to src/backend/base/langflow/services/chat/factory.py diff --git a/src/backend/base/langflow/services/chat/service.py b/src/backend/base/langflow/services/chat/service.py new file mode 100644 index 000000000..072920418 --- /dev/null +++ b/src/backend/base/langflow/services/chat/service.py @@ -0,0 +1,39 @@ +import asyncio +from collections import defaultdict +from typing import Any, Optional + +from langflow.services.base import Service +from langflow.services.deps import get_cache_service + + +class ChatService(Service): + name = "chat_service" + + def __init__(self): + self._cache_locks = defaultdict(asyncio.Lock) + self.cache_service = get_cache_service() + + async def set_cache(self, flow_id: str, data: Any, lock: Optional[asyncio.Lock] = None) -> bool: + """ + Set the cache for a client. + """ + # client_id is the flow id but that already exists in the cache + # so we need to change it to something else + result_dict = { + "result": data, + "type": type(data), + } + await self.cache_service.upsert(flow_id, result_dict, lock=lock or self._cache_locks[flow_id]) + return flow_id in self.cache_service + + async def get_cache(self, flow_id: str, lock: Optional[asyncio.Lock] = None) -> Any: + """ + Get the cache for a client. + """ + return await self.cache_service.get(flow_id, lock=lock or self._cache_locks[flow_id]) + + async def clear_cache(self, flow_id: str, lock: Optional[asyncio.Lock] = None): + """ + Clear the cache for a client. + """ + await self.cache_service.delete(flow_id, lock=lock or self._cache_locks[flow_id]) diff --git a/src/backend/langflow/services/chat/utils.py b/src/backend/base/langflow/services/chat/utils.py similarity index 99% rename from src/backend/langflow/services/chat/utils.py rename to src/backend/base/langflow/services/chat/utils.py index 0562b9df2..271c0e85b 100644 --- a/src/backend/langflow/services/chat/utils.py +++ b/src/backend/base/langflow/services/chat/utils.py @@ -3,10 +3,11 @@ from typing import Any from langchain.agents import AgentExecutor from langchain.chains.base import Chain from langchain_core.runnables import Runnable +from loguru import logger + from langflow.api.v1.schemas import ChatMessage from langflow.interface.utils import try_setting_streaming_options from langflow.processing.base import get_result_and_steps -from loguru import logger LANGCHAIN_RUNNABLES = (Chain, Runnable, AgentExecutor) diff --git a/src/backend/base/langflow/services/database/__init__.py b/src/backend/base/langflow/services/database/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/services/database/factory.py b/src/backend/base/langflow/services/database/factory.py similarity index 99% rename from src/backend/langflow/services/database/factory.py rename to src/backend/base/langflow/services/database/factory.py index 57bf1668d..3b03da131 100644 --- a/src/backend/langflow/services/database/factory.py +++ b/src/backend/base/langflow/services/database/factory.py @@ -1,4 +1,6 @@ from typing import TYPE_CHECKING + + from langflow.services.database.service import DatabaseService from langflow.services.factory import ServiceFactory diff --git a/src/backend/base/langflow/services/database/models/__init__.py b/src/backend/base/langflow/services/database/models/__init__.py new file mode 100644 index 000000000..1f19435bd --- /dev/null +++ b/src/backend/base/langflow/services/database/models/__init__.py @@ -0,0 +1,6 @@ +from .api_key import ApiKey +from .flow import Flow +from .user import User +from .variable import Variable + +__all__ = ["Flow", "User", "ApiKey", "Variable"] diff --git a/src/backend/langflow/services/database/models/api_key/__init__.py b/src/backend/base/langflow/services/database/models/api_key/__init__.py similarity index 100% rename from src/backend/langflow/services/database/models/api_key/__init__.py rename to src/backend/base/langflow/services/database/models/api_key/__init__.py diff --git a/src/backend/langflow/services/database/models/api_key/crud.py b/src/backend/base/langflow/services/database/models/api_key/crud.py similarity index 100% rename from src/backend/langflow/services/database/models/api_key/crud.py rename to src/backend/base/langflow/services/database/models/api_key/crud.py diff --git a/src/backend/langflow/services/database/models/api_key/model.py b/src/backend/base/langflow/services/database/models/api_key/model.py similarity index 100% rename from src/backend/langflow/services/database/models/api_key/model.py rename to src/backend/base/langflow/services/database/models/api_key/model.py diff --git a/src/backend/langflow/services/database/models/base.py b/src/backend/base/langflow/services/database/models/base.py similarity index 100% rename from src/backend/langflow/services/database/models/base.py rename to src/backend/base/langflow/services/database/models/base.py diff --git a/src/backend/langflow/services/database/models/flow/__init__.py b/src/backend/base/langflow/services/database/models/flow/__init__.py similarity index 100% rename from src/backend/langflow/services/database/models/flow/__init__.py rename to src/backend/base/langflow/services/database/models/flow/__init__.py diff --git a/src/backend/base/langflow/services/database/models/flow/model.py b/src/backend/base/langflow/services/database/models/flow/model.py new file mode 100644 index 000000000..cbfdec563 --- /dev/null +++ b/src/backend/base/langflow/services/database/models/flow/model.py @@ -0,0 +1,127 @@ +# Path: src/backend/langflow/services/database/models/flow/model.py + +from datetime import datetime +from typing import TYPE_CHECKING, Dict, Optional +from uuid import UUID, uuid4 + +from emoji import purely_emoji # type: ignore +from pydantic import field_serializer, field_validator +from sqlmodel import JSON, Column, Field, Relationship, SQLModel + +from langflow.interface.custom.attributes import validate_icon +from langflow.schema.schema import Record + +if TYPE_CHECKING: + from langflow.services.database.models.user import User + + +class FlowBase(SQLModel): + name: str = Field(index=True) + description: Optional[str] = Field(index=True, nullable=True, default=None) + icon: Optional[str] = Field(default=None, nullable=True) + icon_bg_color: Optional[str] = Field(default=None, nullable=True) + data: Optional[Dict] = Field(default=None, nullable=True) + is_component: Optional[bool] = Field(default=False, nullable=True) + updated_at: Optional[datetime] = Field(default_factory=datetime.utcnow, nullable=True) + folder: Optional[str] = Field(default=None, nullable=True) + + @field_validator("icon_bg_color") + def validate_icon_bg_color(cls, v): + if v is not None and not isinstance(v, str): + raise ValueError("Icon background color must be a string") + # validate that is is a hex color + if v and not v.startswith("#"): + raise ValueError("Icon background color must start with #") + + # validate that it is a valid hex color + if v and len(v) != 7: + raise ValueError("Icon background color must be 7 characters long") + return v + + @field_validator("icon") + def validate_icon_atr(cls, v): + # const emojiRegex = /\p{Emoji}/u; + # const isEmoji = emojiRegex.test(data?.node?.icon!); + # emoji pattern in Python + if v is None: + return v + + emoji = validate_icon(v) + + if purely_emoji(emoji): + # this is indeed an emoji + return emoji + # otherwise it should be a valid lucide icon + if v is not None and not isinstance(v, str): + raise ValueError("Icon must be a string") + # is should be lowercase and contain only letters and hyphens + if v and not v.islower(): + raise ValueError("Icon must be lowercase") + if v and not v.replace("-", "").isalpha(): + raise ValueError("Icon must contain only letters and hyphens") + return v + + @field_validator("data") + def validate_json(v): + if not v: + return v + if not isinstance(v, dict): + raise ValueError("Flow must be a valid JSON") + + # data must contain nodes and edges + if "nodes" not in v.keys(): + raise ValueError("Flow must have nodes") + if "edges" not in v.keys(): + raise ValueError("Flow must have edges") + + return v + + # updated_at can be serialized to JSON + @field_serializer("updated_at") + def serialize_dt(self, dt: datetime, _info): + if dt is None: + return None + return dt.isoformat() + + @field_validator("updated_at", mode="before") + def validate_dt(cls, v): + if v is None: + return v + elif isinstance(v, datetime): + return v + + return datetime.fromisoformat(v) + + +class Flow(FlowBase, table=True): + id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) + data: Optional[Dict] = Field(default=None, sa_column=Column(JSON)) + user_id: Optional[UUID] = Field(index=True, foreign_key="user.id", nullable=True) + user: "User" = Relationship(back_populates="flows") + + def to_record(self): + serialized = self.model_dump() + data = { + "id": serialized.pop("id"), + "data": serialized.pop("data"), + "name": serialized.pop("name"), + "description": serialized.pop("description"), + "updated_at": serialized.pop("updated_at"), + } + record = Record(data=data) + return record + + +class FlowCreate(FlowBase): + user_id: Optional[UUID] = None + + +class FlowRead(FlowBase): + id: UUID + user_id: Optional[UUID] = Field() + + +class FlowUpdate(SQLModel): + name: Optional[str] = None + description: Optional[str] = None + data: Optional[Dict] = None diff --git a/src/backend/langflow/services/database/models/user/__init__.py b/src/backend/base/langflow/services/database/models/user/__init__.py similarity index 100% rename from src/backend/langflow/services/database/models/user/__init__.py rename to src/backend/base/langflow/services/database/models/user/__init__.py diff --git a/src/backend/langflow/services/database/models/user/crud.py b/src/backend/base/langflow/services/database/models/user/crud.py similarity index 99% rename from src/backend/langflow/services/database/models/user/crud.py rename to src/backend/base/langflow/services/database/models/user/crud.py index e8f58735c..5a948815e 100644 --- a/src/backend/langflow/services/database/models/user/crud.py +++ b/src/backend/base/langflow/services/database/models/user/crud.py @@ -3,12 +3,13 @@ from typing import Optional, Union from uuid import UUID from fastapi import Depends, HTTPException, status -from langflow.services.database.models.user.model import User, UserUpdate -from langflow.services.deps import get_session from sqlalchemy.exc import IntegrityError from sqlalchemy.orm.attributes import flag_modified from sqlmodel import Session, select +from langflow.services.database.models.user.model import User, UserUpdate +from langflow.services.deps import get_session + def get_user_by_username(db: Session, username: str) -> Union[User, None]: return db.exec(select(User).where(User.username == username)).first() diff --git a/src/backend/langflow/services/database/models/user/model.py b/src/backend/base/langflow/services/database/models/user/model.py similarity index 93% rename from src/backend/langflow/services/database/models/user/model.py rename to src/backend/base/langflow/services/database/models/user/model.py index dccd3c305..fa229b4b3 100644 --- a/src/backend/langflow/services/database/models/user/model.py +++ b/src/backend/base/langflow/services/database/models/user/model.py @@ -6,7 +6,7 @@ from sqlmodel import Field, Relationship, SQLModel if TYPE_CHECKING: from langflow.services.database.models.api_key import ApiKey - from langflow.services.database.models.credential import Credential + from langflow.services.database.models.variable import Variable from langflow.services.database.models.flow import Flow @@ -26,7 +26,7 @@ class User(SQLModel, table=True): ) store_api_key: Optional[str] = Field(default=None, nullable=True) flows: list["Flow"] = Relationship(back_populates="user") - credentials: list["Credential"] = Relationship( + variables: list["Variable"] = Relationship( back_populates="user", sa_relationship_kwargs={"cascade": "delete"}, ) diff --git a/src/backend/base/langflow/services/database/models/variable/__init__.py b/src/backend/base/langflow/services/database/models/variable/__init__.py new file mode 100644 index 000000000..62ecc4f87 --- /dev/null +++ b/src/backend/base/langflow/services/database/models/variable/__init__.py @@ -0,0 +1,3 @@ +from .model import Variable, VariableCreate, VariableRead, VariableUpdate + +__all__ = ["Variable", "VariableCreate", "VariableRead", "VariableUpdate"] diff --git a/src/backend/base/langflow/services/database/models/variable/model.py b/src/backend/base/langflow/services/database/models/variable/model.py new file mode 100644 index 000000000..1c314d756 --- /dev/null +++ b/src/backend/base/langflow/services/database/models/variable/model.py @@ -0,0 +1,49 @@ +from datetime import datetime, timezone +from typing import TYPE_CHECKING, Optional +from uuid import UUID, uuid4 + +from sqlmodel import Field, Relationship, SQLModel + + +if TYPE_CHECKING: + from langflow.services.database.models.user.model import User + + +def utc_now(): + return datetime.now(timezone.utc) + + +class VariableBase(SQLModel): + name: Optional[str] = Field(None, description="Name of the variable") + value: Optional[str] = Field(None, description="Encrypted value of the variable") + type: Optional[str] = Field(None, description="Type of the variable") + + +class Variable(VariableBase, table=True): + id: Optional[UUID] = Field( + default_factory=uuid4, + primary_key=True, + description="Unique ID for the variable", + ) + # name is unique per user + created_at: datetime = Field(default_factory=utc_now, description="Creation time of the variable") + updated_at: Optional[datetime] = Field(None, description="Last update time of the variable") + # foreign key to user table + user_id: UUID = Field(description="User ID associated with this variable", foreign_key="user.id") + user: "User" = Relationship(back_populates="variables") + + +class VariableCreate(VariableBase): + type: Optional[str] = Field(None, description="Type of the variable") + + +class VariableRead(SQLModel): + id: UUID + name: Optional[str] = Field(None, description="Name of the variable") + type: Optional[str] = Field(None, description="Type of the variable") + + +class VariableUpdate(SQLModel): + id: UUID # Include the ID for updating + name: Optional[str] = Field(None, description="Name of the variable") + value: Optional[str] = Field(None, description="Encrypted value of the variable") diff --git a/src/backend/langflow/services/database/service.py b/src/backend/base/langflow/services/database/service.py similarity index 94% rename from src/backend/langflow/services/database/service.py rename to src/backend/base/langflow/services/database/service.py index 1103d7755..f3d77f6d7 100644 --- a/src/backend/langflow/services/database/service.py +++ b/src/backend/base/langflow/services/database/service.py @@ -1,3 +1,4 @@ +from datetime import datetime import time from pathlib import Path from typing import TYPE_CHECKING @@ -40,7 +41,7 @@ class DatabaseService(Service): connect_args = {"check_same_thread": False} else: connect_args = {} - return create_engine(self.database_url, connect_args=connect_args, max_overflow=-1) + return create_engine(self.database_url, connect_args=connect_args) def __enter__(self): self._session = Session(self.engine) @@ -124,10 +125,16 @@ class DatabaseService(Service): # if not self.script_location.exists(): # this is not the correct way to check if alembic has been initialized # We need to check if the alembic_version table exists # if not, we need to initialize alembic - alembic_cfg = Config() + # stdout should be something like sys.stdout + # which is a buffer + # I don't want to output anything + # subprocess.DEVNULL is an int + buffer = open(self.script_location / "alembic.log", "w") + alembic_cfg = Config(stdout=buffer) # alembic_cfg.attributes["connection"] = session alembic_cfg.set_main_option("script_location", str(self.script_location)) alembic_cfg.set_main_option("sqlalchemy.url", self.database_url) + should_initialize_alembic = False with Session(self.engine) as session: # If the table does not exist it throws an error @@ -150,6 +157,7 @@ class DatabaseService(Service): logger.info(f"Running DB migrations in {self.script_location}") try: + buffer.write(f"{datetime.now().isoformat()}: Checking migrations\n") command.check(alembic_cfg) except Exception as exc: if isinstance(exc, (util.exc.CommandError, util.exc.AutogenerateDiffsDetected)): @@ -157,13 +165,14 @@ class DatabaseService(Service): time.sleep(3) try: + buffer.write(f"{datetime.now().isoformat()}: Checking migrations\n") command.check(alembic_cfg) - except util.exc.AutogenerateDiffsDetected as e: - logger.error("AutogenerateDiffsDetected: {exc}") + except util.exc.AutogenerateDiffsDetected as exc: + logger.error(f"AutogenerateDiffsDetected: {exc}") if not fix: raise RuntimeError( "Something went wrong running migrations. Please, run `langflow migration --fix`" - ) from e + ) from exc if fix: self.try_downgrade_upgrade_until_success(alembic_cfg) diff --git a/src/backend/langflow/services/database/utils.py b/src/backend/base/langflow/services/database/utils.py similarity index 100% rename from src/backend/langflow/services/database/utils.py rename to src/backend/base/langflow/services/database/utils.py diff --git a/src/backend/base/langflow/services/deps.py b/src/backend/base/langflow/services/deps.py new file mode 100644 index 000000000..23a78e320 --- /dev/null +++ b/src/backend/base/langflow/services/deps.py @@ -0,0 +1,219 @@ +from contextlib import contextmanager +from typing import TYPE_CHECKING, Generator + +from langflow.services import ServiceType, service_manager + +if TYPE_CHECKING: + from sqlmodel import Session + + from langflow.services.cache.service import CacheService + from langflow.services.chat.service import ChatService + from langflow.services.database.service import DatabaseService + from langflow.services.monitor.service import MonitorService + from langflow.services.plugins.service import PluginService + from langflow.services.session.service import SessionService + from langflow.services.settings.service import SettingsService + from langflow.services.socket.service import SocketIOService + from langflow.services.state.service import StateService + from langflow.services.storage.service import StorageService + from langflow.services.store.service import StoreService + from langflow.services.task.service import TaskService + from langflow.services.variable.service import VariableService + + +def get_service(service_type: ServiceType): + """ + Retrieves the service instance for the given service type. + + Args: + service_type (ServiceType): The type of service to retrieve. + + Returns: + Any: The service instance. + + """ + return service_manager.get(service_type) # type: ignore + + +def get_state_service() -> "StateService": + """ + Retrieves the StateService instance from the service manager. + + Returns: + The StateService instance. + """ + return service_manager.get(ServiceType.STATE_SERVICE) # type: ignore + + +def get_socket_service() -> "SocketIOService": + """ + Get the SocketIOService instance from the service manager. + + Returns: + SocketIOService: The SocketIOService instance. + """ + return service_manager.get(ServiceType.SOCKETIO_SERVICE) # type: ignore + + +def get_storage_service() -> "StorageService": + """ + Retrieves the storage service instance. + + Returns: + The storage service instance. + """ + return service_manager.get(ServiceType.STORAGE_SERVICE) # type: ignore + + +def get_variable_service() -> "VariableService": + """ + Retrieves the VariableService instance from the service manager. + + Returns: + The VariableService instance. + + """ + return service_manager.get(ServiceType.VARIABLE_SERVICE) # type: ignore + + +def get_plugins_service() -> "PluginService": + """ + Get the PluginService instance from the service manager. + + Returns: + PluginService: The PluginService instance. + """ + return service_manager.get(ServiceType.PLUGIN_SERVICE) # type: ignore + + +def get_settings_service() -> "SettingsService": + """ + Retrieves the SettingsService instance. + + If the service is not yet initialized, it will be initialized before returning. + + Returns: + The SettingsService instance. + + Raises: + ValueError: If the service cannot be retrieved or initialized. + """ + try: + return service_manager.get(ServiceType.SETTINGS_SERVICE) # type: ignore + except ValueError: + # initialize settings service + from langflow.services.manager import initialize_settings_service + + initialize_settings_service() + return service_manager.get(ServiceType.SETTINGS_SERVICE) # type: ignore + + +def get_db_service() -> "DatabaseService": + """ + Retrieves the DatabaseService instance from the service manager. + + Returns: + The DatabaseService instance. + + """ + return service_manager.get(ServiceType.DATABASE_SERVICE) # type: ignore + + +def get_session() -> Generator["Session", None, None]: + """ + Retrieves a session from the database service. + + Yields: + Session: A session object. + + """ + db_service = get_db_service() + yield from db_service.get_session() + + +@contextmanager +def session_scope(): + """ + Context manager for managing a session scope. + + This context manager is used to manage a session scope for database operations. + It ensures that the session is properly committed if no exceptions occur, + and rolled back if an exception is raised. + + Yields: + session: The session object. + + Raises: + Exception: If an error occurs during the session scope. + + """ + session = next(get_session()) + try: + yield session + session.commit() + except: + session.rollback() + raise + finally: + session.close() + + +def get_cache_service() -> "CacheService": + """ + Retrieves the cache service from the service manager. + + Returns: + The cache service instance. + """ + return service_manager.get(ServiceType.CACHE_SERVICE) # type: ignore + + +def get_session_service() -> "SessionService": + """ + Retrieves the session service from the service manager. + + Returns: + The session service instance. + """ + return service_manager.get(ServiceType.SESSION_SERVICE) # type: ignore + + +def get_monitor_service() -> "MonitorService": + """ + Retrieves the MonitorService instance from the service manager. + + Returns: + MonitorService: The MonitorService instance. + """ + return service_manager.get(ServiceType.MONITOR_SERVICE) # type: ignore + + +def get_task_service() -> "TaskService": + """ + Retrieves the TaskService instance from the service manager. + + Returns: + The TaskService instance. + + """ + return service_manager.get(ServiceType.TASK_SERVICE) # type: ignore + + +def get_chat_service() -> "ChatService": + """ + Get the chat service instance. + + Returns: + ChatService: The chat service instance. + """ + return service_manager.get(ServiceType.CHAT_SERVICE) # type: ignore + + +def get_store_service() -> "StoreService": + """ + Retrieves the StoreService instance from the service manager. + + Returns: + StoreService: The StoreService instance. + """ + return service_manager.get(ServiceType.STORE_SERVICE) # type: ignore diff --git a/src/backend/base/langflow/services/factory.py b/src/backend/base/langflow/services/factory.py new file mode 100644 index 000000000..49bea7db2 --- /dev/null +++ b/src/backend/base/langflow/services/factory.py @@ -0,0 +1,83 @@ +import importlib +import inspect +from typing import TYPE_CHECKING, Type, get_type_hints + +from cachetools import LRUCache, cached +from loguru import logger + +from langflow.services.schema import ServiceType + +if TYPE_CHECKING: + from langflow.services.base import Service + + +class ServiceFactory: + def __init__( + self, + service_class, + ): + self.service_class = service_class + self.dependencies = infer_service_types(self, import_all_services_into_a_dict()) + + def create(self, *args, **kwargs) -> "Service": + raise self.service_class(*args, **kwargs) + + +def hash_factory(factory: Type[ServiceFactory]) -> str: + return factory.service_class.__name__ + + +def hash_dict(d: dict) -> str: + return str(d) + + +def hash_infer_service_types_args(factory_class: Type[ServiceFactory], available_services=None) -> str: + factory_hash = hash_factory(factory_class) + services_hash = hash_dict(available_services) + return f"{factory_hash}_{services_hash}" + + +@cached(cache=LRUCache(maxsize=10), key=hash_infer_service_types_args) +def infer_service_types(factory_class: Type[ServiceFactory], available_services=None) -> list["ServiceType"]: + create_method = factory_class.create + type_hints = get_type_hints(create_method, globalns=available_services) + service_types = [] + for param_name, param_type in type_hints.items(): + # Skip the return type if it's included in type hints + if param_name == "return": + continue + + # Convert the type to the expected enum format directly without appending "_SERVICE" + type_name = param_type.__name__.upper().replace("SERVICE", "_SERVICE") + + try: + # Attempt to find a matching enum value + service_type = ServiceType[type_name] + service_types.append(service_type) + except KeyError: + raise ValueError(f"No matching ServiceType for parameter type: {param_type.__name__}") + return service_types + + +@cached(cache=LRUCache(maxsize=1)) +def import_all_services_into_a_dict(): + # Services are all in langflow.services.{service_name}.service + # and are subclass of Service + # We want to import all of them and put them in a dict + # to use as globals + from langflow.services.base import Service + + services = {} + for service_type in ServiceType: + try: + service_name = ServiceType(service_type).value.replace("_service", "") + module_name = f"langflow.services.{service_name}.service" + module = importlib.import_module(module_name) + for name, obj in inspect.getmembers(module, inspect.isclass): + if issubclass(obj, Service) and obj is not Service: + services[name] = obj + break + except Exception as exc: + logger.exception(exc) + raise RuntimeError("Could not initialize services. Please check your settings.") from exc + return services diff --git a/src/backend/langflow/services/manager.py b/src/backend/base/langflow/services/manager.py similarity index 61% rename from src/backend/langflow/services/manager.py rename to src/backend/base/langflow/services/manager.py index 0adeefd29..20186f7da 100644 --- a/src/backend/langflow/services/manager.py +++ b/src/backend/base/langflow/services/manager.py @@ -1,11 +1,11 @@ -from typing import TYPE_CHECKING, Dict, List, Optional +from typing import TYPE_CHECKING, Dict -from langflow.services.schema import ServiceType from loguru import logger if TYPE_CHECKING: from langflow.services.base import Service from langflow.services.factory import ServiceFactory + from langflow.services.schema import ServiceType class ServiceManager: @@ -16,23 +16,19 @@ class ServiceManager: def __init__(self): self.services: Dict[str, "Service"] = {} self.factories = {} - self.dependencies = {} def register_factory( self, service_factory: "ServiceFactory", - dependencies: Optional[List[ServiceType]] = None, ): """ Registers a new factory with dependencies. """ - if dependencies is None: - dependencies = [] + service_name = service_factory.service_class.name self.factories[service_name] = service_factory - self.dependencies[service_name] = dependencies - def get(self, service_name: ServiceType) -> "Service": + def get(self, service_name: "ServiceType") -> "Service": """ Get (or create) a service by its name. """ @@ -41,7 +37,7 @@ class ServiceManager: return self.services[service_name] - def _create_service(self, service_name: ServiceType): + def _create_service(self, service_name: "ServiceType"): """ Create a new service given its name, handling dependencies. """ @@ -49,25 +45,26 @@ class ServiceManager: self._validate_service_creation(service_name) # Create dependencies first - for dependency in self.dependencies.get(service_name, []): + factory = self.factories.get(service_name) + for dependency in factory.dependencies: if dependency not in self.services: self._create_service(dependency) # Collect the dependent services - dependent_services = {dep.value: self.services[dep] for dep in self.dependencies.get(service_name, [])} + dependent_services = {dep.value: self.services[dep] for dep in factory.dependencies} # Create the actual service self.services[service_name] = self.factories[service_name].create(**dependent_services) self.services[service_name].set_ready() - def _validate_service_creation(self, service_name: ServiceType): + def _validate_service_creation(self, service_name: "ServiceType"): """ Validate whether the service can be created. """ if service_name not in self.factories: raise ValueError(f"No factory registered for the service class '{service_name.name}'") - def update(self, service_name: ServiceType): + def update(self, service_name: "ServiceType"): """ Update a service by its name. """ @@ -90,36 +87,11 @@ class ServiceManager: logger.exception(exc) self.services = {} self.factories = {} - self.dependencies = {} service_manager = ServiceManager() -def reinitialize_services(): - """ - Reinitialize all the services needed. - """ - - service_manager.update(ServiceType.SETTINGS_SERVICE) - service_manager.update(ServiceType.DATABASE_SERVICE) - service_manager.update(ServiceType.CACHE_SERVICE) - service_manager.update(ServiceType.CHAT_SERVICE) - service_manager.update(ServiceType.SESSION_SERVICE) - service_manager.update(ServiceType.AUTH_SERVICE) - service_manager.update(ServiceType.TASK_SERVICE) - - # Test cache connection - service_manager.get(ServiceType.CACHE_SERVICE) - # Test database connection - service_manager.get(ServiceType.DATABASE_SERVICE) - - # Test cache connection - service_manager.get(ServiceType.CACHE_SERVICE) - # Test database connection - service_manager.get(ServiceType.DATABASE_SERVICE) - - def initialize_settings_service(): """ Initialize the settings manager. @@ -134,13 +106,12 @@ def initialize_session_service(): Initialize the session manager. """ from langflow.services.cache import factory as cache_factory - from langflow.services.session import factory as session_service_factory # type: ignore + from langflow.services.session import ( + factory as session_service_factory, + ) # type: ignore initialize_settings_service() - service_manager.register_factory(cache_factory.CacheServiceFactory(), dependencies=[ServiceType.SETTINGS_SERVICE]) + service_manager.register_factory(cache_factory.CacheServiceFactory()) - service_manager.register_factory( - session_service_factory.SessionServiceFactory(), - dependencies=[ServiceType.CACHE_SERVICE], - ) + service_manager.register_factory(session_service_factory.SessionServiceFactory()) diff --git a/src/backend/base/langflow/services/monitor/__init__.py b/src/backend/base/langflow/services/monitor/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/base/langflow/services/monitor/factory.py b/src/backend/base/langflow/services/monitor/factory.py new file mode 100644 index 000000000..054e6bb3f --- /dev/null +++ b/src/backend/base/langflow/services/monitor/factory.py @@ -0,0 +1,13 @@ +from langflow.services.factory import ServiceFactory +from langflow.services.monitor.service import MonitorService +from langflow.services.settings.service import SettingsService + + +class MonitorServiceFactory(ServiceFactory): + name = "monitor_service" + + def __init__(self): + super().__init__(MonitorService) + + def create(self, settings_service: SettingsService): + return self.service_class(settings_service) diff --git a/src/backend/base/langflow/services/monitor/schema.py b/src/backend/base/langflow/services/monitor/schema.py new file mode 100644 index 000000000..80636a554 --- /dev/null +++ b/src/backend/base/langflow/services/monitor/schema.py @@ -0,0 +1,142 @@ +import json +from datetime import datetime +from typing import TYPE_CHECKING, Any, Optional + +from pydantic import BaseModel, Field, field_serializer, validator + +if TYPE_CHECKING: + from langflow.schema import Record + + +class TransactionModel(BaseModel): + id: Optional[int] = Field(default=None, alias="id") + timestamp: Optional[datetime] = Field(default_factory=datetime.now, alias="timestamp") + source: str + target: str + target_args: dict + status: str + error: Optional[str] = None + + class Config: + from_attributes = True + populate_by_name = True + + # validate target_args in case it is a JSON + @validator("target_args", pre=True) + def validate_target_args(cls, v): + if isinstance(v, str): + return json.loads(v) + return v + + +class MessageModel(BaseModel): + id: Optional[int] = Field(default=None, alias="id") + timestamp: datetime = Field(default_factory=datetime.now) + sender: str + sender_name: str + session_id: str + message: str + artifacts: dict + + class Config: + from_attributes = True + populate_by_name = True + + @validator("artifacts", pre=True) + def validate_target_args(cls, v): + if isinstance(v, str): + return json.loads(v) + return v + + @classmethod + def from_record(cls, record: "Record"): + # first check if the record has all the required fields + if not record.data or ("sender" not in record.data and "sender_name" not in record.data): + raise ValueError("The record does not have the required fields 'sender' and 'sender_name' in the data.") + return cls( + sender=record.sender, + sender_name=record.sender_name, + message=record.text, + session_id=record.session_id, + artifacts=record.artifacts or {}, + ) + + +class VertexBuildModel(BaseModel): + index: Optional[int] = Field(default=None, alias="index", exclude=True) + id: Optional[str] = Field(default=None, alias="id") + flow_id: str + valid: bool + params: Any + data: dict + artifacts: dict + timestamp: datetime = Field(default_factory=datetime.now) + + class Config: + from_attributes = True + populate_by_name = True + + @field_serializer("data", "artifacts") + def serialize_dict(v): + if isinstance(v, dict): + # check if the value of each key is a BaseModel or a list of BaseModels + for key, value in v.items(): + if isinstance(value, BaseModel): + v[key] = value.model_dump() + elif isinstance(value, list) and all(isinstance(i, BaseModel) for i in value): + v[key] = [i.model_dump() for i in value] + return json.dumps(v) + return v + + @validator("params", pre=True) + def validate_params(cls, v): + if isinstance(v, str): + try: + return json.loads(v) + except json.JSONDecodeError: + return v + return v + + @field_serializer("params") + def serialize_params(v): + if isinstance(v, list) and all(isinstance(i, BaseModel) for i in v): + return json.dumps([i.model_dump() for i in v]) + return v + + @validator("data", pre=True) + def validate_data(cls, v): + if isinstance(v, str): + return json.loads(v) + return v + + @validator("artifacts", pre=True) + def validate_artifacts(cls, v): + if isinstance(v, str): + return json.loads(v) + return v + + +class VertexBuildResponseModel(VertexBuildModel): + @field_serializer("data", "artifacts") + def serialize_dict(v): + return v + + +def to_map(value: dict): + keys = list(value.keys()) + values = list(value.values()) + return {"key": keys, "value": values} + + +class VertexBuildMapModel(BaseModel): + vertex_builds: dict[str, list[VertexBuildResponseModel]] + + @classmethod + def from_list_of_dicts(cls, vertex_build_dicts): + vertex_build_map = {} + for vertex_build_dict in vertex_build_dicts: + vertex_build = VertexBuildResponseModel(**vertex_build_dict) + if vertex_build.id not in vertex_build_map: + vertex_build_map[vertex_build.id] = [] + vertex_build_map[vertex_build.id].append(vertex_build) + return cls(vertex_builds=vertex_build_map) diff --git a/src/backend/base/langflow/services/monitor/service.py b/src/backend/base/langflow/services/monitor/service.py new file mode 100644 index 000000000..e3d56db52 --- /dev/null +++ b/src/backend/base/langflow/services/monitor/service.py @@ -0,0 +1,167 @@ +from datetime import datetime +from pathlib import Path +from typing import TYPE_CHECKING, Optional, Union + +import duckdb +from loguru import logger +from platformdirs import user_cache_dir + +from langflow.services.base import Service +from langflow.services.monitor.schema import MessageModel, TransactionModel, VertexBuildModel +from langflow.services.monitor.utils import add_row_to_table, drop_and_create_table_if_schema_mismatch + +if TYPE_CHECKING: + from langflow.services.settings.manager import SettingsService + + +class MonitorService(Service): + name = "monitor_service" + + def __init__(self, settings_service: "SettingsService"): + self.settings_service = settings_service + self.base_cache_dir = Path(user_cache_dir("langflow")) + self.db_path = self.base_cache_dir / "monitor.duckdb" + self.table_map = { + "transactions": TransactionModel, + "messages": MessageModel, + "vertex_builds": VertexBuildModel, + } + + try: + self.ensure_tables_exist() + except Exception as e: + logger.exception(f"Error initializing monitor service: {e}") + + def to_df(self, table_name): + return self.load_table_as_dataframe(table_name) + + def ensure_tables_exist(self): + for table_name, model in self.table_map.items(): + drop_and_create_table_if_schema_mismatch(str(self.db_path), table_name, model) + + def add_row( + self, + table_name: str, + data: Union[dict, TransactionModel, MessageModel, VertexBuildModel], + ): + # Make sure the model passed matches the table + + model = self.table_map.get(table_name) + if model is None: + raise ValueError(f"Unknown table name: {table_name}") + + # Connect to DuckDB and add the row + with duckdb.connect(str(self.db_path)) as conn: + add_row_to_table(conn, table_name, model, data) + + def load_table_as_dataframe(self, table_name): + with duckdb.connect(str(self.db_path)) as conn: + return conn.table(table_name).df() + + @staticmethod + def get_timestamp(): + return datetime.now().strftime("%Y-%m-%d %H:%M:%S") + + def get_vertex_builds( + self, + flow_id: Optional[str] = None, + vertex_id: Optional[str] = None, + valid: Optional[bool] = None, + order_by: Optional[str] = "timestamp", + ): + query = "SELECT id, flow_id, valid, params, data, artifacts, timestamp FROM vertex_builds" + conditions = [] + if flow_id: + conditions.append(f"flow_id = '{flow_id}'") + if vertex_id: + conditions.append(f"id = '{vertex_id}'") + if valid is not None: # Check for None because valid is a boolean + valid_str = "true" if valid else "false" + conditions.append(f"valid = {valid_str}") + + if conditions: + query += " WHERE " + " AND ".join(conditions) + + if order_by: + query += f" ORDER BY {order_by}" + + with duckdb.connect(str(self.db_path)) as conn: + df = conn.execute(query).df() + + return df.to_dict(orient="records") + + def delete_vertex_builds(self, flow_id: Optional[str] = None): + query = "DELETE FROM vertex_builds" + if flow_id: + query += f" WHERE flow_id = '{flow_id}'" + + with duckdb.connect(str(self.db_path)) as conn: + conn.execute(query) + + def delete_messages(self, session_id: str): + query = f"DELETE FROM messages WHERE session_id = '{session_id}'" + + with duckdb.connect(str(self.db_path)) as conn: + conn.execute(query) + + def add_message(self, message: MessageModel): + self.add_row("messages", message) + + def get_messages( + self, + sender: Optional[str] = None, + sender_name: Optional[str] = None, + session_id: Optional[str] = None, + order_by: Optional[str] = "timestamp", + order: Optional[str] = "DESC", + limit: Optional[int] = None, + ): + query = "SELECT sender_name, sender, session_id, message, artifacts, timestamp FROM messages" + conditions = [] + if sender: + conditions.append(f"sender = '{sender}'") + if sender_name: + conditions.append(f"sender_name = '{sender_name}'") + if session_id: + conditions.append(f"session_id = '{session_id}'") + + if conditions: + query += " WHERE " + " AND ".join(conditions) + + if order_by and order: + # Make sure the order is from newest to oldest + query += f" ORDER BY {order_by} {order.upper()}" + + if limit is not None: + query += f" LIMIT {limit}" + + with duckdb.connect(str(self.db_path)) as conn: + df = conn.execute(query).df() + + return df + + def get_transactions( + self, + source: Optional[str] = None, + target: Optional[str] = None, + status: Optional[str] = None, + order_by: Optional[str] = "timestamp", + ): + query = "SELECT source, target, target_args, status, error, timestamp FROM transactions" + conditions = [] + if source: + conditions.append(f"source = '{source}'") + if target: + conditions.append(f"target = '{target}'") + if status: + conditions.append(f"status = '{status}'") + + if conditions: + query += " WHERE " + " AND ".join(conditions) + + if order_by: + query += f" ORDER BY {order_by}" + with duckdb.connect(str(self.db_path)) as conn: + df = conn.execute(query).df() + + return df.to_dict(orient="records") diff --git a/src/backend/base/langflow/services/monitor/utils.py b/src/backend/base/langflow/services/monitor/utils.py new file mode 100644 index 000000000..a71653e82 --- /dev/null +++ b/src/backend/base/langflow/services/monitor/utils.py @@ -0,0 +1,159 @@ +from typing import TYPE_CHECKING, Any, Dict, Optional, Type, Union + +import duckdb +from loguru import logger +from pydantic import BaseModel + +from langflow.services.deps import get_monitor_service + +if TYPE_CHECKING: + from langflow.api.v1.schemas import ResultDataResponse + + +INDEX_KEY = "index" + + +def get_table_schema_as_dict(conn: duckdb.DuckDBPyConnection, table_name: str) -> dict: + result = conn.execute(f"PRAGMA table_info('{table_name}')").fetchall() + schema = {row[1]: row[2].upper() for row in result} + schema.pop(INDEX_KEY, None) + return schema + + +def model_to_sql_column_definitions(model: Type[BaseModel]) -> dict: + columns = {} + for field_name, field_type in model.model_fields.items(): + if hasattr(field_type.annotation, "__args__") and field_type.annotation is not None: + field_args = field_type.annotation.__args__ + else: + field_args = [] + field_info = field_args[0] if field_args else field_type.annotation + if field_info.__name__ == "int": + sql_type = "INTEGER" + elif field_info.__name__ == "str": + sql_type = "VARCHAR" + elif field_info.__name__ == "datetime": + sql_type = "TIMESTAMP" + elif field_info.__name__ == "bool": + sql_type = "BOOLEAN" + elif field_info.__name__ == "dict": + sql_type = "VARCHAR" + elif field_info.__name__ == "Any": + sql_type = "VARCHAR" + else: + continue # Skip types we don't handle + columns[field_name] = sql_type + return columns + + +def drop_and_create_table_if_schema_mismatch(db_path: str, table_name: str, model: Type[BaseModel]): + with duckdb.connect(db_path) as conn: + # Get the current schema from the database + try: + current_schema = get_table_schema_as_dict(conn, table_name) + except duckdb.CatalogException: + current_schema = {} + # Get the desired schema from the model + desired_schema = model_to_sql_column_definitions(model) + + # Compare the current and desired schemas + + if current_schema != desired_schema: + # If they don't match, drop the existing table and create a new one + conn.execute(f"DROP TABLE IF EXISTS {table_name}") + if "id" in desired_schema.keys(): + # Create a sequence for the id column + try: + conn.execute(f"CREATE SEQUENCE seq_{table_name} START 1;") + except duckdb.CatalogException: + pass + desired_schema[INDEX_KEY] = f"INTEGER PRIMARY KEY DEFAULT NEXTVAL('seq_{table_name}')" + columns_sql = ", ".join(f"{name} {data_type}" for name, data_type in desired_schema.items()) + create_table_sql = f"CREATE TABLE {table_name} ({columns_sql})" + conn.execute(create_table_sql) + + +def add_row_to_table( + conn: duckdb.DuckDBPyConnection, + table_name: str, + model: Type, + monitor_data: Union[Dict[str, Any], BaseModel], +): + # Validate the data with the Pydantic model + if isinstance(monitor_data, model): + validated_data = monitor_data + else: + validated_data = model(**monitor_data) + + # Extract data for the insert statement + validated_dict = validated_data.model_dump() + keys = [key for key in validated_dict.keys() if key != INDEX_KEY] + columns = ", ".join(keys) + + values_placeholders = ", ".join(["?" for _ in keys]) + values = list(validated_dict.values()) + + # Create the insert statement + insert_sql = f"INSERT INTO {table_name} ({columns}) VALUES ({values_placeholders})" + + # Execute the insert statement + try: + conn.execute(insert_sql, values) + except Exception as e: + # Log values types + for key, value in validated_dict.items(): + logger.error(f"{key}: {type(value)}") + + logger.error(f"Error adding row to table: {e}") + + +async def log_message( + sender: str, + sender_name: str, + message: str, + session_id: str, + artifacts: Optional[dict] = None, +): + try: + from langflow.graph.vertex.base import Vertex + + if isinstance(session_id, Vertex): + session_id = await session_id.build() # type: ignore + + monitor_service = get_monitor_service() + row = { + "sender": sender, + "sender_name": sender_name, + "message": message, + "artifacts": artifacts or {}, + "session_id": session_id, + "timestamp": monitor_service.get_timestamp(), + } + monitor_service.add_row(table_name="messages", data=row) + except Exception as e: + logger.error(f"Error logging message: {e}") + + +async def log_vertex_build( + flow_id: str, + vertex_id: str, + valid: bool, + params: Any, + data: "ResultDataResponse", + artifacts: Optional[dict] = None, +): + try: + monitor_service = get_monitor_service() + + row = { + "flow_id": flow_id, + "id": vertex_id, + "valid": valid, + "params": params, + "data": data.model_dump(), + "artifacts": artifacts or {}, + "timestamp": monitor_service.get_timestamp(), + } + monitor_service.add_row(table_name="vertex_builds", data=row) + except Exception as e: + logger.exception(f"Error logging vertex build: {e}") diff --git a/src/backend/base/langflow/services/plugins/__init__.py b/src/backend/base/langflow/services/plugins/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/services/plugins/base.py b/src/backend/base/langflow/services/plugins/base.py similarity index 100% rename from src/backend/langflow/services/plugins/base.py rename to src/backend/base/langflow/services/plugins/base.py diff --git a/src/backend/langflow/services/plugins/factory.py b/src/backend/base/langflow/services/plugins/factory.py similarity index 100% rename from src/backend/langflow/services/plugins/factory.py rename to src/backend/base/langflow/services/plugins/factory.py diff --git a/src/backend/langflow/services/plugins/langfuse_plugin.py b/src/backend/base/langflow/services/plugins/langfuse_plugin.py similarity index 99% rename from src/backend/langflow/services/plugins/langfuse_plugin.py rename to src/backend/base/langflow/services/plugins/langfuse_plugin.py index e5d3afdd6..e6d37d3c5 100644 --- a/src/backend/langflow/services/plugins/langfuse_plugin.py +++ b/src/backend/base/langflow/services/plugins/langfuse_plugin.py @@ -1,8 +1,9 @@ from typing import Optional +from loguru import logger + from langflow.services.deps import get_settings_service from langflow.services.plugins.base import CallbackPlugin -from loguru import logger class LangfuseInstance: diff --git a/src/backend/langflow/services/plugins/service.py b/src/backend/base/langflow/services/plugins/service.py similarity index 99% rename from src/backend/langflow/services/plugins/service.py rename to src/backend/base/langflow/services/plugins/service.py index f7b2895ec..98f210fea 100644 --- a/src/backend/langflow/services/plugins/service.py +++ b/src/backend/base/langflow/services/plugins/service.py @@ -3,9 +3,10 @@ import inspect import os from typing import TYPE_CHECKING, Union +from loguru import logger + from langflow.services.base import Service from langflow.services.plugins.base import BasePlugin, CallbackPlugin -from loguru import logger if TYPE_CHECKING: from langflow.services.settings.service import SettingsService diff --git a/src/backend/langflow/services/schema.py b/src/backend/base/langflow/services/schema.py similarity index 66% rename from src/backend/langflow/services/schema.py rename to src/backend/base/langflow/services/schema.py index 8265c1108..fe0d7022e 100644 --- a/src/backend/langflow/services/schema.py +++ b/src/backend/base/langflow/services/schema.py @@ -14,6 +14,10 @@ class ServiceType(str, Enum): CHAT_SERVICE = "chat_service" SESSION_SERVICE = "session_service" TASK_SERVICE = "task_service" - PLUGIN_SERVICE = "plugin_service" + PLUGINS_SERVICE = "plugins_service" STORE_SERVICE = "store_service" - CREDENTIAL_SERVICE = "credential_service" + VARIABLE_SERVICE = "variable_service" + STORAGE_SERVICE = "storage_service" + MONITOR_SERVICE = "monitor_service" + SOCKETIO_SERVICE = "socket_service" + STATE_SERVICE = "state_service" diff --git a/src/backend/base/langflow/services/session/__init__.py b/src/backend/base/langflow/services/session/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/services/session/factory.py b/src/backend/base/langflow/services/session/factory.py similarity index 72% rename from src/backend/langflow/services/session/factory.py rename to src/backend/base/langflow/services/session/factory.py index beb0bd6bd..d55bd5b46 100644 --- a/src/backend/langflow/services/session/factory.py +++ b/src/backend/base/langflow/services/session/factory.py @@ -1,14 +1,15 @@ from typing import TYPE_CHECKING -from langflow.services.session.service import SessionService + from langflow.services.factory import ServiceFactory +from langflow.services.session.service import SessionService if TYPE_CHECKING: - from langflow.services.cache.service import BaseCacheService + from langflow.services.cache.service import CacheService class SessionServiceFactory(ServiceFactory): def __init__(self): super().__init__(SessionService) - def create(self, cache_service: "BaseCacheService"): + def create(self, cache_service: "CacheService"): return SessionService(cache_service) diff --git a/src/backend/langflow/services/session/service.py b/src/backend/base/langflow/services/session/service.py similarity index 57% rename from src/backend/langflow/services/session/service.py rename to src/backend/base/langflow/services/session/service.py index 059d82bec..0c925db19 100644 --- a/src/backend/langflow/services/session/service.py +++ b/src/backend/base/langflow/services/session/service.py @@ -1,32 +1,33 @@ -from typing import TYPE_CHECKING, Optional +from typing import Coroutine, Optional from langflow.interface.run import build_sorted_vertices from langflow.services.base import Service +from langflow.services.cache.base import CacheService from langflow.services.session.utils import compute_dict_hash, session_id_generator -if TYPE_CHECKING: - from langflow.services.cache.base import BaseCacheService - class SessionService(Service): name = "session_service" def __init__(self, cache_service): - self.cache_service: "BaseCacheService" = cache_service + self.cache_service: "CacheService" = cache_service - async def load_session(self, key, data_graph: Optional[dict] = None): + async def load_session(self, key, flow_id: str, data_graph: Optional[dict] = None): # Check if the data is cached if key in self.cache_service: - return self.cache_service.get(key) + result = self.cache_service.get(key) + if isinstance(result, Coroutine): + result = await result + return result if key is None: key = self.generate_key(session_id=None, data_graph=data_graph) if data_graph is None: return (None, None) # If not cached, build the graph and cache it - graph, artifacts = await build_sorted_vertices(data_graph) + graph, artifacts = await build_sorted_vertices(data_graph, flow_id) - self.cache_service.set(key, (graph, artifacts)) + await self.cache_service.set(key, (graph, artifacts)) return graph, artifacts @@ -41,8 +42,14 @@ class SessionService(Service): session_id = session_id_generator() return self.build_key(session_id, data_graph=data_graph) - def update_session(self, session_id, value): - self.cache_service.set(session_id, value) + async def update_session(self, session_id, value): + result = self.cache_service.set(session_id, value) + # if it is a coroutine, await it + if isinstance(result, Coroutine): + await result - def clear_session(self, session_id): - self.cache_service.delete(session_id) + async def clear_session(self, session_id): + result = self.cache_service.delete(session_id) + # if it is a coroutine, await it + if isinstance(result, Coroutine): + await result diff --git a/src/backend/langflow/services/session/utils.py b/src/backend/base/langflow/services/session/utils.py similarity index 100% rename from src/backend/langflow/services/session/utils.py rename to src/backend/base/langflow/services/session/utils.py diff --git a/src/backend/langflow/services/settings/__init__.py b/src/backend/base/langflow/services/settings/__init__.py similarity index 100% rename from src/backend/langflow/services/settings/__init__.py rename to src/backend/base/langflow/services/settings/__init__.py diff --git a/src/backend/langflow/services/settings/auth.py b/src/backend/base/langflow/services/settings/auth.py similarity index 81% rename from src/backend/langflow/services/settings/auth.py rename to src/backend/base/langflow/services/settings/auth.py index 8463d0781..103c96a40 100644 --- a/src/backend/langflow/services/settings/auth.py +++ b/src/backend/base/langflow/services/settings/auth.py @@ -1,32 +1,29 @@ import secrets from pathlib import Path -from typing import Optional +from typing import Literal -from langflow.services.settings.constants import ( - DEFAULT_SUPERUSER, - DEFAULT_SUPERUSER_PASSWORD, -) -from langflow.services.settings.utils import read_secret_from_file, write_secret_to_file from loguru import logger from passlib.context import CryptContext -from pydantic import Field, validator +from pydantic import Field, SecretStr, validator from pydantic_settings import BaseSettings +from langflow.services.settings.constants import DEFAULT_SUPERUSER, DEFAULT_SUPERUSER_PASSWORD +from langflow.services.settings.utils import read_secret_from_file, write_secret_to_file + class AuthSettings(BaseSettings): # Login settings CONFIG_DIR: str - SECRET_KEY: str = Field( - default="", + SECRET_KEY: SecretStr = Field( + default=SecretStr(""), description="Secret key for JWT. If not provided, a random one will be generated.", frozen=False, ) ALGORITHM: str = "HS256" - ACCESS_TOKEN_EXPIRE_MINUTES: int = 60 - REFRESH_TOKEN_EXPIRE_MINUTES: int = 60 * 12 * 7 + ACCESS_TOKEN_EXPIRE_SECONDS: int = 60 * 60 # 1 hour + REFRESH_TOKEN_EXPIRE_SECONDS: int = 60 * 60 * 24 * 7 # 7 days # API Key to execute /process endpoint - API_KEY_SECRET_KEY: Optional[str] = "b82818e0ad4ff76615c5721ee21004b07d84cd9b87ba4d9cb42374da134b841a" API_KEY_ALGORITHM: str = "HS256" API_V1_STR: str = "/api/v1" @@ -37,15 +34,15 @@ class AuthSettings(BaseSettings): SUPERUSER: str = DEFAULT_SUPERUSER SUPERUSER_PASSWORD: str = DEFAULT_SUPERUSER_PASSWORD - REFRESH_SAME_SITE: str = "none" + REFRESH_SAME_SITE: Literal["lax", "strict", "none"] = "none" """The SameSite attribute of the refresh token cookie.""" REFRESH_SECURE: bool = True """The Secure attribute of the refresh token cookie.""" REFRESH_HTTPONLY: bool = True """The HttpOnly attribute of the refresh token cookie.""" - ACCESS_SAME_SITE: str = "none" + ACCESS_SAME_SITE: Literal["lax", "strict", "none"] = "lax" """The SameSite attribute of the access token cookie.""" - ACCESS_SECURE: bool = True + ACCESS_SECURE: bool = False """The Secure attribute of the access token cookie.""" ACCESS_HTTPONLY: bool = False """The HttpOnly attribute of the access token cookie.""" @@ -89,9 +86,10 @@ class AuthSettings(BaseSettings): secret_key_path = Path(config_dir) / "secret_key" - if value: + if value and isinstance(value, SecretStr): logger.debug("Secret key provided") - write_secret_to_file(secret_key_path, value) + secret_value = value.get_secret_value() + write_secret_to_file(secret_key_path, secret_value) else: logger.debug("No secret key provided, generating a random one") @@ -107,4 +105,4 @@ class AuthSettings(BaseSettings): write_secret_to_file(secret_key_path, value) logger.debug("Saved secret key") - return value + return value if isinstance(value, SecretStr) else SecretStr(value) diff --git a/src/backend/langflow/services/settings/base.py b/src/backend/base/langflow/services/settings/base.py similarity index 98% rename from src/backend/langflow/services/settings/base.py rename to src/backend/base/langflow/services/settings/base.py index f68afb0f4..9c7ab21c4 100644 --- a/src/backend/langflow/services/settings/base.py +++ b/src/backend/base/langflow/services/settings/base.py @@ -38,7 +38,7 @@ class Settings(BaseSettings): DEV: bool = False DATABASE_URL: Optional[str] = None - CACHE_TYPE: str = "memory" + CACHE_TYPE: str = "async" REMOVE_API_KEYS: bool = False COMPONENTS_PATH: List[str] = [] LANGCHAIN_CACHE: str = "InMemoryCache" @@ -63,6 +63,10 @@ class Settings(BaseSettings): ] = "https://api.langflow.store/flows/trigger/ec611a61-8460-4438-b187-a4f65e5559d4" LIKE_WEBHOOK_URL: Optional[str] = "https://api.langflow.store/flows/trigger/64275852-ec00-45c1-984e-3bff814732da" + STORAGE_TYPE: str = "local" + + CELERY_ENABLED: bool = False + @validator("CONFIG_DIR", pre=True, allow_reuse=True) def set_langflow_dir(cls, value): if not value: diff --git a/src/backend/langflow/services/settings/constants.py b/src/backend/base/langflow/services/settings/constants.py similarity index 100% rename from src/backend/langflow/services/settings/constants.py rename to src/backend/base/langflow/services/settings/constants.py diff --git a/src/backend/langflow/services/settings/factory.py b/src/backend/base/langflow/services/settings/factory.py similarity index 99% rename from src/backend/langflow/services/settings/factory.py rename to src/backend/base/langflow/services/settings/factory.py index 713f13f82..a94e0abbe 100644 --- a/src/backend/langflow/services/settings/factory.py +++ b/src/backend/base/langflow/services/settings/factory.py @@ -1,6 +1,7 @@ from pathlib import Path -from langflow.services.settings.service import SettingsService + from langflow.services.factory import ServiceFactory +from langflow.services.settings.service import SettingsService class SettingsServiceFactory(ServiceFactory): diff --git a/src/backend/langflow/services/settings/service.py b/src/backend/base/langflow/services/settings/manager.py similarity index 99% rename from src/backend/langflow/services/settings/service.py rename to src/backend/base/langflow/services/settings/manager.py index a57a59eb8..f81c3f0c5 100644 --- a/src/backend/langflow/services/settings/service.py +++ b/src/backend/base/langflow/services/settings/manager.py @@ -1,9 +1,11 @@ +import os + +import yaml +from loguru import logger + from langflow.services.base import Service from langflow.services.settings.auth import AuthSettings from langflow.services.settings.base import Settings -from loguru import logger -import os -import yaml class SettingsService(Service): diff --git a/src/backend/base/langflow/services/settings/service.py b/src/backend/base/langflow/services/settings/service.py new file mode 100644 index 000000000..a66888924 --- /dev/null +++ b/src/backend/base/langflow/services/settings/service.py @@ -0,0 +1,44 @@ +import os + +import yaml +from loguru import logger + +from langflow.services.base import Service +from langflow.services.settings.auth import AuthSettings +from langflow.services.settings.base import Settings + + +class SettingsService(Service): + name = "settings_service" + + def __init__(self, settings: Settings, auth_settings: AuthSettings): + super().__init__() + self.settings: Settings = settings + self.auth_settings: AuthSettings = auth_settings + + @classmethod + def load_settings_from_yaml(cls, file_path: str) -> "SettingsService": + # Check if a string is a valid path or a file name + if "/" not in file_path: + # Get current path + current_path = os.path.dirname(os.path.abspath(__file__)) + + file_path = os.path.join(current_path, file_path) + + with open(file_path, "r") as f: + settings_dict = yaml.safe_load(f) + settings_dict = {k.upper(): v for k, v in settings_dict.items()} + + for key in settings_dict: + if key not in Settings.model_fields.keys(): + raise KeyError(f"Key {key} not found in settings") + logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}") + + settings = Settings(**settings_dict) + if not settings.CONFIG_DIR: + raise ValueError("CONFIG_DIR must be set in settings") + + auth_settings = AuthSettings( + CONFIG_DIR=settings.CONFIG_DIR, + ) + return cls(settings, auth_settings) diff --git a/src/backend/langflow/services/settings/utils.py b/src/backend/base/langflow/services/settings/utils.py similarity index 100% rename from src/backend/langflow/services/settings/utils.py rename to src/backend/base/langflow/services/settings/utils.py diff --git a/src/backend/base/langflow/services/socket/__init__.py b/src/backend/base/langflow/services/socket/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/base/langflow/services/socket/factory.py b/src/backend/base/langflow/services/socket/factory.py new file mode 100644 index 000000000..3ea6bb0ba --- /dev/null +++ b/src/backend/base/langflow/services/socket/factory.py @@ -0,0 +1,17 @@ +from typing import TYPE_CHECKING + +from langflow.services.factory import ServiceFactory +from langflow.services.socket.service import SocketIOService + +if TYPE_CHECKING: + from langflow.services.cache.service import CacheService + + +class SocketIOFactory(ServiceFactory): + def __init__(self): + super().__init__( + service_class=SocketIOService, + ) + + def create(self, cache_service: "CacheService"): + return SocketIOService(cache_service) diff --git a/src/backend/base/langflow/services/socket/service.py b/src/backend/base/langflow/services/socket/service.py new file mode 100644 index 000000000..45cfc5fbc --- /dev/null +++ b/src/backend/base/langflow/services/socket/service.py @@ -0,0 +1,86 @@ +from typing import TYPE_CHECKING, Any + +import socketio # type: ignore +from loguru import logger + +from langflow.services.base import Service +from langflow.services.deps import get_chat_service +from langflow.services.socket.utils import build_vertex, get_vertices + +if TYPE_CHECKING: + from langflow.services.cache.service import CacheService + + +class SocketIOService(Service): + name = "socket_service" + + def __init__(self, cache_service: "CacheService"): + self.cache_service = cache_service + + def init(self, sio: socketio.AsyncServer): + # Registering event handlers + self.sio = sio + if self.sio: + self.sio.event(self.connect) + self.sio.event(self.disconnect) + self.sio.on("message")(self.message) + self.sio.on("get_vertices")(self.on_get_vertices) + self.sio.on("build_vertex")(self.on_build_vertex) + self.sessions = {} # type: dict[str, dict] + + async def emit_error(self, sid, error): + await self.sio.emit("error", to=sid, data=error) + + async def connect(self, sid, environ): + logger.info(f"Socket connected: {sid}") + self.sessions[sid] = environ + + async def disconnect(self, sid): + logger.info(f"Socket disconnected: {sid}") + self.sessions.pop(sid, None) + + async def message(self, sid, data=None): + # Logic for handling messages + await self.emit_message(to=sid, data=data or {"foo": "bar", "baz": [1, 2, 3]}) + + async def emit_message(self, to, data): + # Abstracting sio.emit + await self.sio.emit("message", to=to, data=data) + + async def emit_token(self, to, data): + await self.sio.emit("token", to=to, data=data) + + async def on_get_vertices(self, sid, flow_id): + await get_vertices(self.sio, sid, flow_id, get_chat_service()) + + async def on_build_vertex(self, sid, flow_id, vertex_id, tweaks, inputs): + await build_vertex( + sio=self.sio, + sid=sid, + flow_id=flow_id, + vertex_id=vertex_id, + tweaks=tweaks, + inputs=inputs, + get_cache=self.get_cache, + set_cache=self.set_cache, + ) + + def get_cache(self, sid: str) -> Any: + """ + Get the cache for a client. + """ + return self.cache_service.get(sid) + + def set_cache(self, sid: str, build_result: Any) -> bool: + """ + Set the cache for a client. + """ + # client_id is the flow id but that already exists in the cache + # so we need to change it to something else + + result_dict = { + "result": build_result, + "type": type(build_result), + } + self.cache_service.upsert(sid, result_dict) + return sid in self.cache_service diff --git a/src/backend/base/langflow/services/socket/utils.py b/src/backend/base/langflow/services/socket/utils.py new file mode 100644 index 000000000..c1f012e18 --- /dev/null +++ b/src/backend/base/langflow/services/socket/utils.py @@ -0,0 +1,103 @@ +import time +from typing import Callable + +import socketio # type: ignore +from sqlmodel import select + +from langflow.api.utils import format_elapsed_time +from langflow.api.v1.schemas import ResultDataResponse, VertexBuildResponse +from langflow.graph.graph.base import Graph +from langflow.graph.vertex.base import Vertex +from langflow.services.database.models.flow.model import Flow +from langflow.services.deps import get_session +from langflow.services.monitor.utils import log_vertex_build + + +def set_socketio_server(socketio_server): + from langflow.services.deps import get_socket_service + + socket_service = get_socket_service() + socket_service.init(socketio_server) + + +async def get_vertices(sio, sid, flow_id, chat_service): + try: + session = get_session() + flow: Flow = session.exec(select(Flow).where(Flow.id == flow_id)).first() + if not flow or not flow.data: + await sio.emit("error", data="Invalid flow ID", to=sid) + return + + graph = Graph.from_payload(flow.data) + chat_service.set_cache(flow_id, graph) + vertices = graph.layered_topological_sort() + + # Emit the vertices to the client + await sio.emit("vertices_order", data=vertices, to=sid) + + except Exception as exc: + await sio.emit("error", data=str(exc), to=sid) + + +async def build_vertex( + sio: socketio.AsyncServer, + sid: str, + flow_id: str, + vertex_id: str, + get_cache: Callable, + set_cache: Callable, + tweaks=None, + inputs=None, +): + try: + cache = get_cache(flow_id) + graph = cache.get("result") + + if not isinstance(graph, Graph): + await sio.emit("error", data="Invalid graph", to=sid) + return + + vertex = graph.get_vertex(vertex_id) + if not vertex: + await sio.emit("error", data="Invalid vertex", to=sid) + return + start_time = time.perf_counter() + try: + if isinstance(vertex, Vertex) or not vertex._built: + await vertex.build(user_id=None, session_id=sid) + params = vertex._built_object_repr() + valid = True + result_dict = vertex.get_built_result() + # We need to set the artifacts to pass information + # to the frontend + vertex.set_artifacts() + artifacts = vertex.artifacts + timedelta = time.perf_counter() - start_time + duration = format_elapsed_time(timedelta) + result_dict = ResultDataResponse( + results=result_dict, + artifacts=artifacts, + duration=duration, + timedelta=timedelta, + ) + except Exception as exc: + params = str(exc) + valid = False + result_dict = ResultDataResponse(results={}) + artifacts = {} + set_cache(flow_id, graph) + await log_vertex_build( + flow_id=flow_id, + vertex_id=vertex_id, + valid=valid, + params=params, + data=result_dict, + artifacts=artifacts, + ) + + # Emit the vertex build response + response = VertexBuildResponse(valid=valid, params=params, id=vertex.id, data=result_dict) + await sio.emit("vertex_build", data=response.model_dump(), to=sid) + + except Exception as exc: + await sio.emit("error", data=str(exc), to=sid) diff --git a/src/backend/base/langflow/services/state/__init__.py b/src/backend/base/langflow/services/state/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/base/langflow/services/state/factory.py b/src/backend/base/langflow/services/state/factory.py new file mode 100644 index 000000000..e6c5ee740 --- /dev/null +++ b/src/backend/base/langflow/services/state/factory.py @@ -0,0 +1,13 @@ +from langflow.services.factory import ServiceFactory +from langflow.services.settings.service import SettingsService +from langflow.services.state.service import InMemoryStateService + + +class StateServiceFactory(ServiceFactory): + def __init__(self): + super().__init__(InMemoryStateService) + + def create(self, settings_service: SettingsService): + return InMemoryStateService( + settings_service, + ) diff --git a/src/backend/base/langflow/services/state/service.py b/src/backend/base/langflow/services/state/service.py new file mode 100644 index 000000000..b56f95148 --- /dev/null +++ b/src/backend/base/langflow/services/state/service.py @@ -0,0 +1,74 @@ +from collections import defaultdict +from threading import Lock +from typing import Callable + +from loguru import logger + +from langflow.services.base import Service +from langflow.services.settings.service import SettingsService + + +class StateService(Service): + name = "state_service" + + def append_state(self, key, new_state, run_id: str): + raise NotImplementedError + + def update_state(self, key, new_state, run_id: str): + raise NotImplementedError + + def get_state(self, key, run_id: str): + raise NotImplementedError + + def subscribe(self, key, observer: Callable): + raise NotImplementedError + + def notify_observers(self, key, new_state): + raise NotImplementedError + + +class InMemoryStateService(StateService): + def __init__(self, settings_service: SettingsService): + self.settings_service = settings_service + self.states: dict = {} + self.observers: dict = defaultdict(list) + self.lock = Lock() + + def append_state(self, key, new_state, run_id: str): + with self.lock: + if run_id not in self.states: + self.states[run_id] = {} + if key not in self.states[run_id]: + self.states[run_id][key] = [] + elif not isinstance(self.states[run_id][key], list): + self.states[run_id][key] = [self.states[run_id][key]] + self.states[run_id][key].append(new_state) + self.notify_append_observers(key, new_state) + + def update_state(self, key, new_state, run_id: str): + with self.lock: + if run_id not in self.states: + self.states[run_id] = {} + self.states[run_id][key] = new_state + self.notify_observers(key, new_state) + + def get_state(self, key, run_id: str): + with self.lock: + return self.states.get(run_id, {}).get(key, "") + + def subscribe(self, key, observer: Callable): + with self.lock: + if observer not in self.observers[key]: + self.observers[key].append(observer) + + def notify_observers(self, key, new_state): + for callback in self.observers[key]: + callback(key, new_state, append=False) + + def notify_append_observers(self, key, new_state): + for callback in self.observers[key]: + try: + callback(key, new_state, append=True) + except Exception as e: + logger.error(f"Error in observer {callback} for key {key}: {e}") + logger.warning("Callbacks not implemented yet") diff --git a/src/backend/base/langflow/services/storage/__init__.py b/src/backend/base/langflow/services/storage/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/base/langflow/services/storage/constants.py b/src/backend/base/langflow/services/storage/constants.py new file mode 100644 index 000000000..ddec29f3f --- /dev/null +++ b/src/backend/base/langflow/services/storage/constants.py @@ -0,0 +1,28 @@ +EXTENSION_TO_CONTENT_TYPE = { + "json": "application/json", + "txt": "text/plain", + "csv": "text/csv", + "html": "text/html", + "pdf": "application/pdf", + "png": "image/png", + "jpg": "image/jpeg", + "jpeg": "image/jpeg", + "gif": "image/gif", + "svg": "image/svg+xml", + "mp3": "audio/mpeg", + "wav": "audio/wav", + "mp4": "video/mp4", + "webm": "video/webm", + "zip": "application/zip", + "tar": "application/x-tar", + "gz": "application/gzip", + "doc": "application/msword", + "docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document", + "xls": "application/vnd.ms-excel", + "xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", + "ppt": "application/vnd.ms-powerpoint", + "pptx": "application/vnd.openxmlformats-officedocument.presentationml.presentation", + "xml": "application/xml", + "yaml": "application/x-yaml", + "yml": "application/x-yaml", +} diff --git a/src/backend/base/langflow/services/storage/factory.py b/src/backend/base/langflow/services/storage/factory.py new file mode 100644 index 000000000..1b2baf050 --- /dev/null +++ b/src/backend/base/langflow/services/storage/factory.py @@ -0,0 +1,29 @@ +from loguru import logger + +from langflow.services.factory import ServiceFactory +from langflow.services.session.service import SessionService +from langflow.services.settings.service import SettingsService +from langflow.services.storage.service import StorageService + + +class StorageServiceFactory(ServiceFactory): + def __init__(self): + super().__init__( + StorageService, + ) + + def create(self, session_service: SessionService, settings_service: SettingsService): + storage_type = settings_service.settings.STORAGE_TYPE + if storage_type.lower() == "local": + from .local import LocalStorageService + + return LocalStorageService(session_service, settings_service) + elif storage_type.lower() == "s3": + from .s3 import S3StorageService + + return S3StorageService(session_service, settings_service) + else: + logger.warning(f"Storage type {storage_type} not supported. Using local storage.") + from .local import LocalStorageService + + return LocalStorageService(session_service, settings_service) diff --git a/src/backend/base/langflow/services/storage/local.py b/src/backend/base/langflow/services/storage/local.py new file mode 100644 index 000000000..815059857 --- /dev/null +++ b/src/backend/base/langflow/services/storage/local.py @@ -0,0 +1,95 @@ +from pathlib import Path + +from loguru import logger + +from .service import StorageService + + +class LocalStorageService(StorageService): + """A service class for handling local storage operations without aiofiles.""" + + def __init__(self, session_service, settings_service): + """Initialize the local storage service with session and settings services.""" + super().__init__(session_service, settings_service) + self.data_dir = Path(settings_service.settings.CONFIG_DIR) + self.set_ready() + + def build_full_path(self, flow_id: str, file_name: str) -> str: + """Build the full path of a file in the local storage.""" + return str(self.data_dir / flow_id / file_name) + + async def save_file(self, flow_id: str, file_name: str, data: bytes): + """ + Save a file in the local storage. + + :param flow_id: The identifier for the flow. + :param file_name: The name of the file to be saved. + :param data: The byte content of the file. + :raises FileNotFoundError: If the specified flow does not exist. + :raises IsADirectoryError: If the file name is a directory. + :raises PermissionError: If there is no permission to write the file. + """ + folder_path = self.data_dir / flow_id + folder_path.mkdir(parents=True, exist_ok=True) + file_path = folder_path / file_name + + try: + with open(file_path, "wb") as f: + f.write(data) + logger.info(f"File {file_name} saved successfully in flow {flow_id}.") + except Exception as e: + logger.error(f"Error saving file {file_name} in flow {flow_id}: {e}") + raise e + + async def get_file(self, flow_id: str, file_name: str) -> bytes: + """ + Retrieve a file from the local storage. + + :param flow_id: The identifier for the flow. + :param file_name: The name of the file to be retrieved. + :return: The byte content of the file. + :raises FileNotFoundError: If the file does not exist. + """ + file_path = self.data_dir / flow_id / file_name + if not file_path.exists(): + logger.warning(f"File {file_name} not found in flow {flow_id}.") + raise FileNotFoundError(f"File {file_name} not found in flow {flow_id}") + + with open(file_path, "rb") as f: + logger.info(f"File {file_name} retrieved successfully from flow {flow_id}.") + return f.read() + + async def list_files(self, flow_id: str): + """ + List all files in a specified flow. + + :param flow_id: The identifier for the flow. + :return: A list of file names. + :raises FileNotFoundError: If the flow directory does not exist. + """ + folder_path = self.data_dir / flow_id + if not folder_path.exists() or not folder_path.is_dir(): + logger.warning(f"Flow {flow_id} directory does not exist.") + raise FileNotFoundError(f"Flow {flow_id} directory does not exist.") + + files = [file.name for file in folder_path.iterdir() if file.is_file()] + logger.info(f"Listed {len(files)} files in flow {flow_id}.") + return files + + async def delete_file(self, flow_id: str, file_name: str): + """ + Delete a file from the local storage. + + :param flow_id: The identifier for the flow. + :param file_name: The name of the file to be deleted. + """ + file_path = self.data_dir / flow_id / file_name + if file_path.exists(): + file_path.unlink() + logger.info(f"File {file_name} deleted successfully from flow {flow_id}.") + else: + logger.warning(f"Attempted to delete non-existent file {file_name} in flow {flow_id}.") + + def teardown(self): + """Perform any cleanup operations when the service is being torn down.""" + pass # No specific teardown actions required for local diff --git a/src/backend/base/langflow/services/storage/s3.py b/src/backend/base/langflow/services/storage/s3.py new file mode 100644 index 000000000..4426f377c --- /dev/null +++ b/src/backend/base/langflow/services/storage/s3.py @@ -0,0 +1,89 @@ +import boto3 # type: ignore +from botocore.exceptions import ClientError, NoCredentialsError # type: ignore +from loguru import logger + +from .service import StorageService + + +class S3StorageService(StorageService): + """A service class for handling operations with AWS S3 storage.""" + + async def __init__(self, session_service, settings_service): + """Initialize the S3 storage service with session and settings services.""" + super().__init__(session_service, settings_service) + self.bucket = "langflow" + self.s3_client = boto3.client("s3") + self.set_ready() + + async def save_file(self, folder: str, file_name: str, data): + """ + Save a file to the S3 bucket. + + :param folder: The folder in the bucket to save the file. + :param file_name: The name of the file to be saved. + :param data: The byte content of the file. + :raises Exception: If an error occurs during file saving. + """ + try: + self.s3_client.put_object(Bucket=self.bucket, Key=f"{folder}/{file_name}", Body=data) + logger.info(f"File {file_name} saved successfully in folder {folder}.") + except NoCredentialsError: + logger.error("Credentials not available for AWS S3.") + raise + except ClientError as e: + logger.error(f"Error saving file {file_name} in folder {folder}: {e}") + raise + + async def get_file(self, folder: str, file_name: str): + """ + Retrieve a file from the S3 bucket. + + :param folder: The folder in the bucket where the file is stored. + :param file_name: The name of the file to be retrieved. + :return: The byte content of the file. + :raises Exception: If an error occurs during file retrieval. + """ + try: + response = self.s3_client.get_object(Bucket=self.bucket, Key=f"{folder}/{file_name}") + logger.info(f"File {file_name} retrieved successfully from folder {folder}.") + return response["Body"].read() + except ClientError as e: + logger.error(f"Error retrieving file {file_name} from folder {folder}: {e}") + raise + + async def list_files(self, folder: str): + """ + List all files in a specified folder of the S3 bucket. + + :param folder: The folder in the bucket to list files from. + :return: A list of file names. + :raises Exception: If an error occurs during file listing. + """ + try: + response = self.s3_client.list_objects_v2(Bucket=self.bucket, Prefix=folder) + files = [item["Key"] for item in response.get("Contents", []) if "/" not in item["Key"][len(folder) :]] + logger.info(f"{len(files)} files listed in folder {folder}.") + return files + except ClientError as e: + logger.error(f"Error listing files in folder {folder}: {e}") + raise + + async def delete_file(self, folder: str, file_name: str): + """ + Delete a file from the S3 bucket. + + :param folder: The folder in the bucket where the file is stored. + :param file_name: The name of the file to be deleted. + :raises Exception: If an error occurs during file deletion. + """ + try: + self.s3_client.delete_object(Bucket=self.bucket, Key=f"{folder}/{file_name}") + logger.info(f"File {file_name} deleted successfully from folder {folder}.") + except ClientError as e: + logger.error(f"Error deleting file {file_name} from folder {folder}: {e}") + raise + + async def teardown(self): + """Perform any cleanup operations when the service is being torn down.""" + # No specific teardown actions required for S3 storage at the moment. + pass diff --git a/src/backend/base/langflow/services/storage/service.py b/src/backend/base/langflow/services/storage/service.py new file mode 100644 index 000000000..ac226bc31 --- /dev/null +++ b/src/backend/base/langflow/services/storage/service.py @@ -0,0 +1,42 @@ +from abc import abstractmethod +from typing import TYPE_CHECKING + +from langflow.services.base import Service + +if TYPE_CHECKING: + from langflow.services.session.service import SessionService + from langflow.services.settings.service import SettingsService + + +class StorageService(Service): + name = "storage_service" + + def __init__(self, session_service: "SessionService", settings_service: "SettingsService"): + self.settings_service = settings_service + self.session_service = session_service + self.set_ready() + + def build_full_path(self, flow_id: str, file_name: str) -> str: + raise NotImplementedError + + def set_ready(self): + self.ready = True + + @abstractmethod + async def save_file(self, flow_id: str, file_name: str, data) -> None: + raise NotImplementedError + + @abstractmethod + async def get_file(self, flow_id: str, file_name: str) -> bytes: + raise NotImplementedError + + @abstractmethod + async def list_files(self, flow_id: str) -> list[str]: + raise NotImplementedError + + @abstractmethod + async def delete_file(self, flow_id: str, file_name: str) -> bool: + raise NotImplementedError + + def teardown(self): + raise NotImplementedError diff --git a/src/backend/base/langflow/services/storage/utils.py b/src/backend/base/langflow/services/storage/utils.py new file mode 100644 index 000000000..a5515171b --- /dev/null +++ b/src/backend/base/langflow/services/storage/utils.py @@ -0,0 +1,5 @@ +from langflow.services.storage.constants import EXTENSION_TO_CONTENT_TYPE + + +def build_content_type_from_extension(extension: str): + return EXTENSION_TO_CONTENT_TYPE.get(extension.lower(), "application/octet-stream") diff --git a/src/backend/base/langflow/services/store/__init__.py b/src/backend/base/langflow/services/store/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/services/store/exceptions.py b/src/backend/base/langflow/services/store/exceptions.py similarity index 100% rename from src/backend/langflow/services/store/exceptions.py rename to src/backend/base/langflow/services/store/exceptions.py diff --git a/src/backend/langflow/services/store/factory.py b/src/backend/base/langflow/services/store/factory.py similarity index 99% rename from src/backend/langflow/services/store/factory.py rename to src/backend/base/langflow/services/store/factory.py index a25ad78c7..2bdc918bd 100644 --- a/src/backend/langflow/services/store/factory.py +++ b/src/backend/base/langflow/services/store/factory.py @@ -1,6 +1,7 @@ from typing import TYPE_CHECKING -from langflow.services.store.service import StoreService + from langflow.services.factory import ServiceFactory +from langflow.services.store.service import StoreService if TYPE_CHECKING: from langflow.services.settings.service import SettingsService diff --git a/src/backend/langflow/services/store/schema.py b/src/backend/base/langflow/services/store/schema.py similarity index 100% rename from src/backend/langflow/services/store/schema.py rename to src/backend/base/langflow/services/store/schema.py diff --git a/src/backend/langflow/services/store/service.py b/src/backend/base/langflow/services/store/service.py similarity index 100% rename from src/backend/langflow/services/store/service.py rename to src/backend/base/langflow/services/store/service.py diff --git a/src/backend/langflow/services/store/utils.py b/src/backend/base/langflow/services/store/utils.py similarity index 100% rename from src/backend/langflow/services/store/utils.py rename to src/backend/base/langflow/services/store/utils.py diff --git a/src/backend/base/langflow/services/task/__init__.py b/src/backend/base/langflow/services/task/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/base/langflow/services/task/backends/__init__.py b/src/backend/base/langflow/services/task/backends/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/services/task/backends/anyio.py b/src/backend/base/langflow/services/task/backends/anyio.py similarity index 100% rename from src/backend/langflow/services/task/backends/anyio.py rename to src/backend/base/langflow/services/task/backends/anyio.py diff --git a/src/backend/langflow/services/task/backends/base.py b/src/backend/base/langflow/services/task/backends/base.py similarity index 95% rename from src/backend/langflow/services/task/backends/base.py rename to src/backend/base/langflow/services/task/backends/base.py index ccbd9273b..93fbfd858 100644 --- a/src/backend/langflow/services/task/backends/base.py +++ b/src/backend/base/langflow/services/task/backends/base.py @@ -3,6 +3,8 @@ from typing import Any, Callable class TaskBackend(ABC): + name: str + @abstractmethod def launch_task(self, task_func: Callable[..., Any], *args: Any, **kwargs: Any): pass diff --git a/src/backend/langflow/services/task/backends/celery.py b/src/backend/base/langflow/services/task/backends/celery.py similarity index 99% rename from src/backend/langflow/services/task/backends/celery.py rename to src/backend/base/langflow/services/task/backends/celery.py index f23374549..cfb17ae3b 100644 --- a/src/backend/langflow/services/task/backends/celery.py +++ b/src/backend/base/langflow/services/task/backends/celery.py @@ -1,5 +1,8 @@ from typing import Any, Callable + from celery.result import AsyncResult # type: ignore + + from langflow.services.task.backends.base import TaskBackend from langflow.worker import celery_app diff --git a/src/backend/langflow/services/task/factory.py b/src/backend/base/langflow/services/task/factory.py similarity index 100% rename from src/backend/langflow/services/task/factory.py rename to src/backend/base/langflow/services/task/factory.py index e87eecc94..937f390ae 100644 --- a/src/backend/langflow/services/task/factory.py +++ b/src/backend/base/langflow/services/task/factory.py @@ -1,5 +1,5 @@ -from langflow.services.task.service import TaskService from langflow.services.factory import ServiceFactory +from langflow.services.task.service import TaskService class TaskServiceFactory(ServiceFactory): diff --git a/src/backend/langflow/services/task/service.py b/src/backend/base/langflow/services/task/service.py similarity index 76% rename from src/backend/langflow/services/task/service.py rename to src/backend/base/langflow/services/task/service.py index 3f7a81f2c..487b507cd 100644 --- a/src/backend/langflow/services/task/service.py +++ b/src/backend/base/langflow/services/task/service.py @@ -1,11 +1,14 @@ -from typing import Any, Callable, Coroutine, Union +from typing import TYPE_CHECKING, Any, Callable, Coroutine + +from loguru import logger from langflow.services.base import Service from langflow.services.task.backends.anyio import AnyIOBackend from langflow.services.task.backends.base import TaskBackend from langflow.services.task.utils import get_celery_worker_status -from langflow.utils.logger import configure -from loguru import logger + +if TYPE_CHECKING: + from langflow.services.settings.service import SettingsService def check_celery_availability(): @@ -20,28 +23,31 @@ def check_celery_availability(): return status -try: - configure() - status = check_celery_availability() - - USE_CELERY = status.get("availability") is not None -except ImportError: - USE_CELERY = False - - class TaskService(Service): name = "task_service" - def __init__(self): - self.backend = self.get_backend() + def __init__(self, settings_service: "SettingsService"): + self.settings_service = settings_service + try: + if self.settings_service.settings.CELERY_ENABLED: + USE_CELERY = True + status = check_celery_availability() + + USE_CELERY = status.get("availability") is not None + else: + USE_CELERY = False + except ImportError: + USE_CELERY = False + self.use_celery = USE_CELERY + self.backend = self.get_backend() @property def backend_name(self) -> str: return self.backend.name def get_backend(self) -> TaskBackend: - if USE_CELERY: + if self.use_celery: from langflow.services.task.backends.celery import CeleryBackend logger.debug("Using Celery backend") @@ -74,5 +80,5 @@ class TaskService(Service): task = self.backend.launch_task(task_func, *args, **kwargs) return await task if isinstance(task, Coroutine) else task - def get_task(self, task_id: Union[int, str]) -> Any: + def get_task(self, task_id: str) -> Any: return self.backend.get_task(task_id) diff --git a/src/backend/langflow/services/task/utils.py b/src/backend/base/langflow/services/task/utils.py similarity index 100% rename from src/backend/langflow/services/task/utils.py rename to src/backend/base/langflow/services/task/utils.py diff --git a/src/backend/langflow/services/utils.py b/src/backend/base/langflow/services/utils.py similarity index 71% rename from src/backend/langflow/services/utils.py rename to src/backend/base/langflow/services/utils.py index fcb077538..10c1a359e 100644 --- a/src/backend/langflow/services/utils.py +++ b/src/backend/base/langflow/services/utils.py @@ -1,59 +1,41 @@ +import importlib +import inspect + from loguru import logger from sqlmodel import Session, select from langflow.services.auth.utils import create_super_user, verify_password from langflow.services.database.utils import initialize_database +from langflow.services.factory import ServiceFactory from langflow.services.manager import service_manager from langflow.services.schema import ServiceType -from langflow.services.settings.constants import ( - DEFAULT_SUPERUSER, - DEFAULT_SUPERUSER_PASSWORD, -) +from langflow.services.settings.constants import DEFAULT_SUPERUSER, DEFAULT_SUPERUSER_PASSWORD +from langflow.services.socket.utils import set_socketio_server from .deps import get_db_service, get_session, get_settings_service -def get_factories_and_deps(): - from langflow.services.auth import factory as auth_factory - from langflow.services.cache import factory as cache_factory - from langflow.services.chat import factory as chat_factory - from langflow.services.credentials import factory as credentials_factory - from langflow.services.database import factory as database_factory - from langflow.services.plugins import factory as plugins_factory - from langflow.services.session import ( - factory as session_service_factory, - ) # type: ignore - from langflow.services.settings import factory as settings_factory - from langflow.services.store import factory as store_factory - from langflow.services.task import factory as task_factory +def get_factories(): + service_names = [ServiceType(service_type).value.replace("_service", "") for service_type in ServiceType] + base_module = "langflow.services" + factories = [] - return [ - (settings_factory.SettingsServiceFactory(), []), - ( - auth_factory.AuthServiceFactory(), - [ServiceType.SETTINGS_SERVICE], - ), - ( - database_factory.DatabaseServiceFactory(), - [ServiceType.SETTINGS_SERVICE], - ), - ( - cache_factory.CacheServiceFactory(), - [ServiceType.SETTINGS_SERVICE], - ), - (chat_factory.ChatServiceFactory(), []), - (task_factory.TaskServiceFactory(), []), - ( - session_service_factory.SessionServiceFactory(), - [ServiceType.CACHE_SERVICE], - ), - (plugins_factory.PluginServiceFactory(), [ServiceType.SETTINGS_SERVICE]), - (store_factory.StoreServiceFactory(), [ServiceType.SETTINGS_SERVICE]), - ( - credentials_factory.CredentialServiceFactory(), - [ServiceType.SETTINGS_SERVICE], - ), - ] + for name in service_names: + try: + module_name = f"{base_module}.{name}.factory" + module = importlib.import_module(module_name) + + # Find all classes in the module that are subclasses of ServiceFactory + for name, obj in inspect.getmembers(module, inspect.isclass): + if issubclass(obj, ServiceFactory) and obj is not ServiceFactory: + factories.append(obj()) + break + + except Exception as exc: + logger.exception(exc) + raise RuntimeError(f"Could not initialize services. Please check your settings. Error in {name}.") from exc + + return factories def get_or_create_super_user(session: Session, username, password, is_default): @@ -182,30 +164,29 @@ def initialize_session_service(): Initialize the session manager. """ from langflow.services.cache import factory as cache_factory - from langflow.services.session import ( - factory as session_service_factory, - ) # type: ignore + from langflow.services.session import factory as session_service_factory # type: ignore initialize_settings_service() - service_manager.register_factory(cache_factory.CacheServiceFactory(), dependencies=[ServiceType.SETTINGS_SERVICE]) + service_manager.register_factory( + cache_factory.CacheServiceFactory(), + ) service_manager.register_factory( session_service_factory.SessionServiceFactory(), - dependencies=[ServiceType.CACHE_SERVICE], ) -def initialize_services(fix_migration: bool = False): +def initialize_services(fix_migration: bool = False, socketio_server=None): """ Initialize all the services needed. """ - for factory, dependencies in get_factories_and_deps(): + for factory in get_factories(): try: - service_manager.register_factory(factory, dependencies=dependencies) + service_manager.register_factory(factory) except Exception as exc: logger.exception(exc) - raise RuntimeError("Could not initialize services. Please check your settings.") from exc + logger.error(f"Error initializing {factory}: {exc}") # Test cache connection service_manager.get(ServiceType.CACHE_SERVICE) @@ -221,3 +202,6 @@ def initialize_services(fix_migration: bool = False): except Exception as exc: logger.error(f"Error migrating flows: {exc}") raise RuntimeError("Error migrating flows") from exc + + # Initialize the SocketIO service + set_socketio_server(socketio_server) diff --git a/src/backend/base/langflow/services/variable/__init__.py b/src/backend/base/langflow/services/variable/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/services/credentials/factory.py b/src/backend/base/langflow/services/variable/factory.py similarity index 55% rename from src/backend/langflow/services/credentials/factory.py rename to src/backend/base/langflow/services/variable/factory.py index c44a43da4..aac384807 100644 --- a/src/backend/langflow/services/credentials/factory.py +++ b/src/backend/base/langflow/services/variable/factory.py @@ -1,15 +1,15 @@ from typing import TYPE_CHECKING -from langflow.services.credentials.service import CredentialService from langflow.services.factory import ServiceFactory +from langflow.services.variable.service import VariableService if TYPE_CHECKING: from langflow.services.settings.service import SettingsService -class CredentialServiceFactory(ServiceFactory): +class VariableServiceFactory(ServiceFactory): def __init__(self): - super().__init__(CredentialService) + super().__init__(VariableService) def create(self, settings_service: "SettingsService"): - return CredentialService(settings_service) + return VariableService(settings_service) diff --git a/src/backend/base/langflow/services/variable/service.py b/src/backend/base/langflow/services/variable/service.py new file mode 100644 index 000000000..23d1cd806 --- /dev/null +++ b/src/backend/base/langflow/services/variable/service.py @@ -0,0 +1,66 @@ +from typing import TYPE_CHECKING, Optional, Union +from uuid import UUID + +from fastapi import Depends +from sqlmodel import Session, select + +from langflow.services.auth import utils as auth_utils +from langflow.services.base import Service +from langflow.services.database.models.variable.model import Variable +from langflow.services.deps import get_session + +if TYPE_CHECKING: + from langflow.services.settings.service import SettingsService + + +class VariableService(Service): + name = "variable_service" + + def __init__(self, settings_service: "SettingsService"): + self.settings_service = settings_service + + def get_variable(self, user_id: Union[UUID, str], name: str, session: Session = Depends(get_session)) -> str: + # we get the credential from the database + # credential = session.query(Variable).filter(Variable.user_id == user_id, Variable.name == name).first() + variable = session.exec(select(Variable).where(Variable.user_id == user_id, Variable.name == name)).first() + # we decrypt the value + if not variable or not variable.value: + raise ValueError(f"{name} variable not found.") + decrypted = auth_utils.decrypt_api_key(variable.value, settings_service=self.settings_service) + return decrypted + + def list_variables(self, user_id: Union[UUID, str], session: Session = Depends(get_session)) -> list[Optional[str]]: + variables = session.exec(select(Variable).where(Variable.user_id == user_id)).all() + return [variable.name for variable in variables] + + def update_variable( + self, user_id: Union[UUID, str], name: str, value: str, session: Session = Depends(get_session) + ): + variable = session.exec(select(Variable).where(Variable.user_id == user_id, Variable.name == name)).first() + if not variable: + raise ValueError(f"{name} variable not found.") + encrypted = auth_utils.encrypt_api_key(value, settings_service=self.settings_service) + variable.value = encrypted + session.add(variable) + session.commit() + session.refresh(variable) + return variable + + def delete_variable(self, user_id: Union[UUID, str], name: str, session: Session = Depends(get_session)): + variable = session.exec(select(Variable).where(Variable.user_id == user_id, Variable.name == name)).first() + if not variable: + raise ValueError(f"{name} variable not found.") + session.delete(variable) + session.commit() + return variable + + def create_variable( + self, user_id: Union[UUID, str], name: str, value: str, session: Session = Depends(get_session) + ): + variable = Variable( + user_id=user_id, name=name, value=auth_utils.encrypt_api_key(value, settings_service=self.settings_service) + ) + session.add(variable) + session.commit() + session.refresh(variable) + return variable diff --git a/src/backend/langflow/settings.py b/src/backend/base/langflow/settings.py similarity index 100% rename from src/backend/langflow/settings.py rename to src/backend/base/langflow/settings.py diff --git a/src/backend/base/langflow/template/__init__.py b/src/backend/base/langflow/template/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/base/langflow/template/field/__init__.py b/src/backend/base/langflow/template/field/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/template/field/base.py b/src/backend/base/langflow/template/field/base.py similarity index 64% rename from src/backend/langflow/template/field/base.py rename to src/backend/base/langflow/template/field/base.py index 798d07b55..c68a5c476 100644 --- a/src/backend/langflow/template/field/base.py +++ b/src/backend/base/langflow/template/field/base.py @@ -1,11 +1,13 @@ from typing import Any, Callable, Optional, Union +from pydantic import BaseModel, ConfigDict, Field, field_serializer, field_validator, model_serializer, model_validator + from langflow.field_typing.range_spec import RangeSpec -from pydantic import BaseModel, ConfigDict, Field, field_serializer class TemplateField(BaseModel): model_config = ConfigDict() + field_type: str = Field(default="str", serialization_alias="type") """The type of field this is. Default is a string.""" @@ -28,7 +30,7 @@ class TemplateField(BaseModel): """The value of the field. Default is None.""" file_types: list[str] = Field(default=[], serialization_alias="fileTypes") - """List of file types associated with the field. Default is an empty list. (duplicate)""" + """List of file types associated with the field . Default is an empty list.""" file_path: Optional[str] = "" """The file path of the field if it is a file. Defaults to None.""" @@ -57,18 +59,45 @@ class TemplateField(BaseModel): info: Optional[str] = "" """Additional information about the field to be shown in the tooltip. Defaults to an empty string.""" - refresh: Optional[bool] = None - """Specifies if the field should be refreshed. Defaults to False.""" + real_time_refresh: Optional[bool] = None + """Specifies if the field should have real time refresh. `refresh_button` must be False. Defaults to None.""" + + refresh_button: Optional[bool] = None + """Specifies if the field should have a refresh button. Defaults to False.""" + refresh_button_text: Optional[str] = None + """Specifies the text for the refresh button. Defaults to None.""" range_spec: Optional[RangeSpec] = Field(default=None, serialization_alias="rangeSpec") """Range specification for the field. Defaults to None.""" - title_case: bool = True + load_from_db: bool = False + """Specifies if the field should be loaded from the database. Defaults to False.""" + title_case: bool = False """Specifies if the field should be displayed in title case. Defaults to True.""" def to_dict(self): return self.model_dump(by_alias=True, exclude_none=True) + @model_serializer(mode="wrap") + def serialize_model(self, handler): + result = handler(self) + # If the field is str, we add the Text input type + if self.field_type in ["str", "Text"]: + if "input_types" not in result: + result["input_types"] = ["Text"] + if self.field_type == "Text": + result["type"] = "str" + else: + result["type"] = self.field_type + return result + + @model_validator(mode="after") + def validate_model(self): + # if field_type is int, we need to set the range_spec + if self.field_type == "int" and self.range_spec is not None: + self.range_spec = RangeSpec.set_step_type("int", self.range_spec) + return self + @field_serializer("file_path") def serialize_file_path(self, value): return value if self.field_type == "file" else "" @@ -90,3 +119,12 @@ class TemplateField(BaseModel): if self.title_case: value = value.title() return value + + @field_validator("file_types") + def validate_file_types(cls, value): + if not isinstance(value, list): + raise ValueError("file_types must be a list") + return [ + (f".{file_type}" if isinstance(file_type, str) and not file_type.startswith(".") else file_type) + for file_type in value + ] diff --git a/src/backend/base/langflow/template/field/prompt.py b/src/backend/base/langflow/template/field/prompt.py new file mode 100644 index 000000000..72c57c5db --- /dev/null +++ b/src/backend/base/langflow/template/field/prompt.py @@ -0,0 +1,14 @@ +from typing import Optional + +from langflow.template.field.base import TemplateField + + +class DefaultPromptField(TemplateField): + name: str + display_name: Optional[str] = None + field_type: str = "str" + + advanced: bool = False + multiline: bool = True + input_types: list[str] = ["Document", "BaseOutputParser", "Record", "Text"] + value: str = "" # Set the value to empty string diff --git a/src/backend/langflow/template/frontend_node/__init__.py b/src/backend/base/langflow/template/frontend_node/__init__.py similarity index 95% rename from src/backend/langflow/template/frontend_node/__init__.py rename to src/backend/base/langflow/template/frontend_node/__init__.py index e13aa1ded..ceb2e0cb9 100644 --- a/src/backend/langflow/template/frontend_node/__init__.py +++ b/src/backend/base/langflow/template/frontend_node/__init__.py @@ -1,19 +1,21 @@ from langflow.template.frontend_node import ( agents, chains, + custom_components, + documentloaders, embeddings, llms, memories, prompts, + textsplitters, tools, vectorstores, - documentloaders, - textsplitters, - custom_components, + base, ) __all__ = [ "agents", + "base", "chains", "embeddings", "memories", diff --git a/src/backend/langflow/template/frontend_node/agents.py b/src/backend/base/langflow/template/frontend_node/agents.py similarity index 99% rename from src/backend/langflow/template/frontend_node/agents.py rename to src/backend/base/langflow/template/frontend_node/agents.py index 361abf531..0993c1736 100644 --- a/src/backend/langflow/template/frontend_node/agents.py +++ b/src/backend/base/langflow/template/frontend_node/agents.py @@ -1,6 +1,8 @@ from typing import Optional from langchain.agents import types + + from langflow.template.field.base import TemplateField from langflow.template.frontend_node.base import FrontendNode from langflow.template.template.base import Template diff --git a/src/backend/langflow/template/frontend_node/base.py b/src/backend/base/langflow/template/frontend_node/base.py similarity index 88% rename from src/backend/langflow/template/frontend_node/base.py rename to src/backend/base/langflow/template/frontend_node/base.py index daef4f9a8..8a051058e 100644 --- a/src/backend/langflow/template/frontend_node/base.py +++ b/src/backend/base/langflow/template/frontend_node/base.py @@ -2,12 +2,13 @@ import re from collections import defaultdict from typing import ClassVar, Dict, List, Optional, Union +from pydantic import BaseModel, Field, field_serializer, model_serializer + from langflow.template.field.base import TemplateField -from langflow.template.frontend_node.constants import CLASSES_TO_REMOVE, FORCE_SHOW_FIELDS +from langflow.template.frontend_node.constants import FORCE_SHOW_FIELDS from langflow.template.frontend_node.formatter import field_formatters from langflow.template.template.base import Template from langflow.utils import constants -from pydantic import BaseModel, Field, field_serializer, model_serializer class FieldFormatters(BaseModel): @@ -41,16 +42,40 @@ class FieldFormatters(BaseModel): class FrontendNode(BaseModel): _format_template: bool = True template: Template + """Template for the frontend node.""" description: Optional[str] = None + """Description of the frontend node.""" icon: Optional[str] = None + """Icon of the frontend node.""" + is_input: Optional[bool] = None + """Whether the frontend node is used as an input when processing the Graph. + If True, there should be a field named 'input_value'.""" + is_output: Optional[bool] = None + """Whether the frontend node is used as an output when processing the Graph. + If True, there should be a field named 'input_value'.""" + is_composition: Optional[bool] = None + """Whether the frontend node is used for composition.""" base_classes: List[str] + """List of base classes for the frontend node.""" name: str = "" + """Name of the frontend node.""" display_name: Optional[str] = "" + """Display name of the frontend node.""" documentation: str = "" + """Documentation of the frontend node.""" custom_fields: Optional[Dict] = defaultdict(list) + """Custom fields of the frontend node.""" output_types: List[str] = [] + """List of output types for the frontend node.""" full_path: Optional[str] = None + """Full path of the frontend node.""" field_formatters: FieldFormatters = Field(default_factory=FieldFormatters) + """Field formatters for the frontend node.""" + frozen: bool = False + """Whether the frontend node is frozen.""" + + field_order: list[str] = [] + """Order of the fields in the frontend node.""" beta: bool = False error: Optional[str] = None @@ -69,7 +94,8 @@ class FrontendNode(BaseModel): def process_base_classes(self, base_classes: List[str]) -> List[str]: """Removes unwanted base classes from the list of base classes.""" - return [base_class for base_class in base_classes if base_class not in CLASSES_TO_REMOVE] + sorted_base_classes = sorted(list(set(base_classes)), key=lambda x: x.lower()) + return sorted_base_classes @field_serializer("display_name") def process_display_name(self, display_name: str) -> str: diff --git a/src/backend/langflow/template/frontend_node/chains.py b/src/backend/base/langflow/template/frontend_node/chains.py similarity index 90% rename from src/backend/langflow/template/frontend_node/chains.py rename to src/backend/base/langflow/template/frontend_node/chains.py index 5ff211f68..4ce23a316 100644 --- a/src/backend/langflow/template/frontend_node/chains.py +++ b/src/backend/base/langflow/template/frontend_node/chains.py @@ -7,6 +7,11 @@ from langflow.template.template.base import Template class ChainFrontendNode(FrontendNode): + output_type: str = "Chain" + + def add_extra_base_classes(self) -> None: + self.base_classes.append("Text") + def add_extra_fields(self) -> None: if self.template.type_name == "ConversationalRetrievalChain": # add memory @@ -48,16 +53,16 @@ class ChainFrontendNode(FrontendNode): @staticmethod def format_field(field: TemplateField, name: Optional[str] = None) -> None: FrontendNode.format_field(field, name) - key = field.name or "" - if "name" == "RetrievalQA" and key == "memory": + + if "name" == "RetrievalQA" and field.name == "memory": field.show = False field.required = False field.advanced = False - if "key" in key: + if "key" in str(field.name): field.password = False field.show = False - if key in ["input_key", "output_key"]: + if field.name in ["input_key", "output_key"]: field.required = True field.show = True field.advanced = True @@ -71,26 +76,26 @@ class ChainFrontendNode(FrontendNode): # field.value = field.value.template # Separated for possible future changes - if key == "prompt" and field.value is None: + if field.name == "prompt" and field.value is None: field.required = True field.show = True field.advanced = False - if key == "memory": + if field.name == "memory": # field.required = False field.show = True field.advanced = False - if key == "verbose": + if field.name == "verbose": field.required = False field.show = False field.advanced = True - if key == "llm": + if field.name == "llm": field.required = True field.show = True field.advanced = False - field.field_type = "BaseLanguageModel" # temporary fix + field.field_type = "BaseLanguageModel" field.is_list = False - if key == "return_source_documents": + if field.name == "return_source_documents": field.required = False field.show = True field.advanced = True @@ -98,6 +103,7 @@ class ChainFrontendNode(FrontendNode): class SeriesCharacterChainNode(FrontendNode): + output_type: str = "Chain" name: str = "SeriesCharacterChain" template: Template = Template( type_name="SeriesCharacterChain", @@ -144,11 +150,12 @@ class SeriesCharacterChainNode(FrontendNode): "Chain", "ConversationChain", "SeriesCharacterChain", - "Callable", + "function", ] class TimeTravelGuideChainNode(FrontendNode): + output_type: str = "Chain" name: str = "TimeTravelGuideChain" template: Template = Template( type_name="TimeTravelGuideChain", @@ -184,6 +191,7 @@ class TimeTravelGuideChainNode(FrontendNode): class MidJourneyPromptChainNode(FrontendNode): + output_type: str = "Chain" name: str = "MidJourneyPromptChain" template: Template = Template( type_name="MidJourneyPromptChain", @@ -219,6 +227,7 @@ class MidJourneyPromptChainNode(FrontendNode): class CombineDocsChainNode(FrontendNode): + output_type: str = "Chain" name: str = "CombineDocsChain" template: Template = Template( type_name="load_qa_chain", @@ -245,7 +254,10 @@ class CombineDocsChainNode(FrontendNode): ], ) description: str = """Load question answering chain.""" - base_classes: list[str] = ["BaseCombineDocumentsChain", "Callable"] + base_classes: list[str] = ["BaseCombineDocumentsChain", "function"] + + def to_dict(self): + return super().to_dict() @staticmethod def format_field(field: TemplateField, name: Optional[str] = None) -> None: diff --git a/src/backend/langflow/template/frontend_node/constants.py b/src/backend/base/langflow/template/frontend_node/constants.py similarity index 92% rename from src/backend/langflow/template/frontend_node/constants.py rename to src/backend/base/langflow/template/frontend_node/constants.py index a213a2744..513ccd1ef 100644 --- a/src/backend/langflow/template/frontend_node/constants.py +++ b/src/backend/base/langflow/template/frontend_node/constants.py @@ -63,13 +63,3 @@ You can change this to use other APIs like JinaChat, LocalAI and Prem. INPUT_KEY_INFO = """The variable to be used as Chat Input when more than one variable is available.""" OUTPUT_KEY_INFO = """The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)""" - - -CLASSES_TO_REMOVE = [ - "RunnableSerializable", - "Serializable", - "BaseModel", - "object", - "Runnable", - "Generic", -] diff --git a/src/backend/langflow/template/frontend_node/custom_components.py b/src/backend/base/langflow/template/frontend_node/custom_components.py similarity index 92% rename from src/backend/langflow/template/frontend_node/custom_components.py rename to src/backend/base/langflow/template/frontend_node/custom_components.py index d604ae055..456aa7466 100644 --- a/src/backend/langflow/template/frontend_node/custom_components.py +++ b/src/backend/base/langflow/template/frontend_node/custom_components.py @@ -4,7 +4,8 @@ from langflow.template.field.base import TemplateField from langflow.template.frontend_node.base import FrontendNode from langflow.template.template.base import Template -DEFAULT_CUSTOM_COMPONENT_CODE = """from langflow import CustomComponent +DEFAULT_CUSTOM_COMPONENT_CODE = """from langflow.interface.custom.custom_component import CustomComponent + from typing import Optional, List, Dict, Union from langflow.field_typing import ( AgentExecutor, @@ -48,7 +49,7 @@ class CustomComponentFrontendNode(FrontendNode): _format_template: bool = False name: str = "CustomComponent" display_name: Optional[str] = "CustomComponent" - beta: bool = True + beta: bool = False template: Template = Template( type_name="CustomComponent", fields=[ diff --git a/src/backend/langflow/template/frontend_node/documentloaders.py b/src/backend/base/langflow/template/frontend_node/documentloaders.py similarity index 100% rename from src/backend/langflow/template/frontend_node/documentloaders.py rename to src/backend/base/langflow/template/frontend_node/documentloaders.py diff --git a/src/backend/langflow/template/frontend_node/embeddings.py b/src/backend/base/langflow/template/frontend_node/embeddings.py similarity index 100% rename from src/backend/langflow/template/frontend_node/embeddings.py rename to src/backend/base/langflow/template/frontend_node/embeddings.py diff --git a/src/backend/base/langflow/template/frontend_node/formatter/__init__.py b/src/backend/base/langflow/template/frontend_node/formatter/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/template/frontend_node/formatter/base.py b/src/backend/base/langflow/template/frontend_node/formatter/base.py similarity index 99% rename from src/backend/langflow/template/frontend_node/formatter/base.py rename to src/backend/base/langflow/template/frontend_node/formatter/base.py index f582bc298..20ba64eae 100644 --- a/src/backend/langflow/template/frontend_node/formatter/base.py +++ b/src/backend/base/langflow/template/frontend_node/formatter/base.py @@ -1,9 +1,10 @@ from abc import ABC, abstractmethod from typing import Optional -from langflow.template.field.base import TemplateField from pydantic import BaseModel +from langflow.template.field.base import TemplateField + class FieldFormatter(BaseModel, ABC): @abstractmethod diff --git a/src/backend/langflow/template/frontend_node/formatter/field_formatters.py b/src/backend/base/langflow/template/frontend_node/formatter/field_formatters.py similarity index 100% rename from src/backend/langflow/template/frontend_node/formatter/field_formatters.py rename to src/backend/base/langflow/template/frontend_node/formatter/field_formatters.py diff --git a/src/backend/langflow/template/frontend_node/llms.py b/src/backend/base/langflow/template/frontend_node/llms.py similarity index 100% rename from src/backend/langflow/template/frontend_node/llms.py rename to src/backend/base/langflow/template/frontend_node/llms.py diff --git a/src/backend/langflow/template/frontend_node/memories.py b/src/backend/base/langflow/template/frontend_node/memories.py similarity index 97% rename from src/backend/langflow/template/frontend_node/memories.py rename to src/backend/base/langflow/template/frontend_node/memories.py index fc9df6291..1cdc8febb 100644 --- a/src/backend/langflow/template/frontend_node/memories.py +++ b/src/backend/base/langflow/template/frontend_node/memories.py @@ -1,9 +1,6 @@ from typing import Optional -from langchain_community.chat_message_histories.mongodb import ( - DEFAULT_COLLECTION_NAME, - DEFAULT_DBNAME, -) +from langchain_community.chat_message_histories.mongodb import DEFAULT_COLLECTION_NAME, DEFAULT_DBNAME from langchain_community.chat_message_histories.postgres import DEFAULT_CONNECTION_STRING from langflow.template.field.base import TemplateField @@ -13,7 +10,8 @@ from langflow.template.template.base import Template class MemoryFrontendNode(FrontendNode): - #! Needs testing + frozen: bool = True + def add_extra_fields(self) -> None: # chat history should have another way to add common field? # prevent adding incorect field in ChatMessageHistory diff --git a/src/backend/langflow/template/frontend_node/output_parsers.py b/src/backend/base/langflow/template/frontend_node/output_parsers.py similarity index 99% rename from src/backend/langflow/template/frontend_node/output_parsers.py rename to src/backend/base/langflow/template/frontend_node/output_parsers.py index e9b4d3706..6b33ee680 100644 --- a/src/backend/langflow/template/frontend_node/output_parsers.py +++ b/src/backend/base/langflow/template/frontend_node/output_parsers.py @@ -1,4 +1,6 @@ from typing import Optional + + from langflow.template.field.base import TemplateField from langflow.template.frontend_node.base import FrontendNode diff --git a/src/backend/langflow/template/frontend_node/prompts.py b/src/backend/base/langflow/template/frontend_node/prompts.py similarity index 100% rename from src/backend/langflow/template/frontend_node/prompts.py rename to src/backend/base/langflow/template/frontend_node/prompts.py diff --git a/src/backend/langflow/template/frontend_node/retrievers.py b/src/backend/base/langflow/template/frontend_node/retrievers.py similarity index 100% rename from src/backend/langflow/template/frontend_node/retrievers.py rename to src/backend/base/langflow/template/frontend_node/retrievers.py diff --git a/src/backend/langflow/template/frontend_node/textsplitters.py b/src/backend/base/langflow/template/frontend_node/textsplitters.py similarity index 99% rename from src/backend/langflow/template/frontend_node/textsplitters.py rename to src/backend/base/langflow/template/frontend_node/textsplitters.py index e6d5bdc60..eb302e996 100644 --- a/src/backend/langflow/template/frontend_node/textsplitters.py +++ b/src/backend/base/langflow/template/frontend_node/textsplitters.py @@ -1,6 +1,7 @@ +from langchain.text_splitter import Language + from langflow.template.field.base import TemplateField from langflow.template.frontend_node.base import FrontendNode -from langchain.text_splitter import Language class TextSplittersFrontendNode(FrontendNode): diff --git a/src/backend/langflow/template/frontend_node/tools.py b/src/backend/base/langflow/template/frontend_node/tools.py similarity index 100% rename from src/backend/langflow/template/frontend_node/tools.py rename to src/backend/base/langflow/template/frontend_node/tools.py diff --git a/src/backend/langflow/template/frontend_node/utilities.py b/src/backend/base/langflow/template/frontend_node/utilities.py similarity index 100% rename from src/backend/langflow/template/frontend_node/utilities.py rename to src/backend/base/langflow/template/frontend_node/utilities.py index a5adb219d..51849189c 100644 --- a/src/backend/langflow/template/frontend_node/utilities.py +++ b/src/backend/base/langflow/template/frontend_node/utilities.py @@ -1,7 +1,7 @@ import ast from typing import Optional -from langflow.services.database.models.base import orjson_dumps +from langflow.services.database.models.base import orjson_dumps from langflow.template.field.base import TemplateField from langflow.template.frontend_node.base import FrontendNode diff --git a/src/backend/langflow/template/frontend_node/vectorstores.py b/src/backend/base/langflow/template/frontend_node/vectorstores.py similarity index 100% rename from src/backend/langflow/template/frontend_node/vectorstores.py rename to src/backend/base/langflow/template/frontend_node/vectorstores.py diff --git a/src/backend/base/langflow/template/template/__init__.py b/src/backend/base/langflow/template/template/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/template/template/base.py b/src/backend/base/langflow/template/template/base.py similarity index 97% rename from src/backend/langflow/template/template/base.py rename to src/backend/base/langflow/template/template/base.py index 9bc375b0f..d7632e239 100644 --- a/src/backend/langflow/template/template/base.py +++ b/src/backend/base/langflow/template/template/base.py @@ -1,14 +1,14 @@ from typing import Callable, Union +from pydantic import BaseModel, model_serializer + from langflow.template.field.base import TemplateField from langflow.utils.constants import DIRECT_TYPES -from pydantic import BaseModel, model_serializer class Template(BaseModel): type_name: str fields: list[TemplateField] - field_order: list[str] = [] def process_fields( self, @@ -30,7 +30,6 @@ class Template(BaseModel): for field in self.fields: result[field.name] = field.model_dump(by_alias=True, exclude_none=True) result["_type"] = result.pop("type_name") - result.pop("field_order", None) return result # For backwards compatibility diff --git a/src/backend/base/langflow/utils/__init__.py b/src/backend/base/langflow/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/utils/constants.py b/src/backend/base/langflow/utils/constants.py similarity index 98% rename from src/backend/langflow/utils/constants.py rename to src/backend/base/langflow/utils/constants.py index 9b01f8c2d..ac0a03fdf 100644 --- a/src/backend/langflow/utils/constants.py +++ b/src/backend/base/langflow/utils/constants.py @@ -158,8 +158,8 @@ LOADERS_INFO: List[Dict[str, Any]] = [ "loader": "UnstructuredMarkdownLoader", "name": "Unstructured Markdown (.md)", "import": "langchain_community.document_loaders.UnstructuredMarkdownLoader", - "defaultFor": ["md"], - "allowdTypes": ["md"], + "defaultFor": ["md", "mdx"], + "allowdTypes": ["md", "mdx"], }, { "loader": "UnstructuredPowerPointLoader", diff --git a/src/backend/langflow/utils/lazy_load.py b/src/backend/base/langflow/utils/lazy_load.py similarity index 100% rename from src/backend/langflow/utils/lazy_load.py rename to src/backend/base/langflow/utils/lazy_load.py diff --git a/src/backend/langflow/utils/logger.py b/src/backend/base/langflow/utils/logger.py similarity index 93% rename from src/backend/langflow/utils/logger.py rename to src/backend/base/langflow/utils/logger.py index 060ad9731..d92bdb0e1 100644 --- a/src/backend/langflow/utils/logger.py +++ b/src/backend/base/langflow/utils/logger.py @@ -25,10 +25,10 @@ def patching(record): def configure(log_level: Optional[str] = None, log_file: Optional[Path] = None): - if os.getenv("LANGFLOW_LOG_LEVEL") in VALID_LOG_LEVELS and log_level is None: + if os.getenv("LANGFLOW_LOG_LEVEL", "").upper() in VALID_LOG_LEVELS and log_level is None: log_level = os.getenv("LANGFLOW_LOG_LEVEL") if log_level is None: - log_level = "INFO" + log_level = "ERROR" # Human-readable log_format = ( "{time:YYYY-MM-DD HH:mm:ss} - " diff --git a/src/backend/langflow/utils/payload.py b/src/backend/base/langflow/utils/payload.py similarity index 100% rename from src/backend/langflow/utils/payload.py rename to src/backend/base/langflow/utils/payload.py diff --git a/src/backend/base/langflow/utils/schemas.py b/src/backend/base/langflow/utils/schemas.py new file mode 100644 index 000000000..3e6f17a5a --- /dev/null +++ b/src/backend/base/langflow/utils/schemas.py @@ -0,0 +1,55 @@ +import enum +from typing import Dict, List, Optional, Union + +from langchain_core.messages import BaseMessage +from pydantic import BaseModel, model_validator + + +class ChatOutputResponse(BaseModel): + """Chat output response schema.""" + + message: Union[str, List[Union[str, Dict]]] + sender: Optional[str] = "Machine" + sender_name: Optional[str] = "AI" + session_id: Optional[str] = None + stream_url: Optional[str] = None + component_id: Optional[str] = None + + @classmethod + def from_message( + cls, + message: BaseMessage, + sender: Optional[str] = "Machine", + sender_name: Optional[str] = "AI", + ): + """Build chat output response from message.""" + content = message.content + return cls(message=content, sender=sender, sender_name=sender_name) + + @model_validator(mode="after") + def validate_message(self): + """Validate message.""" + # The idea here is ensure the \n in message + # is compliant with markdown if sender is machine + # so, for example: + # \n\n -> \n\n + # \n -> \n\n + + if self.sender != "Machine": + return self + + # We need to make sure we don't duplicate \n + # in the message + message = self.message.replace("\n\n", "\n") + self.message = message.replace("\n", "\n\n") + return self + + +class ContainsEnumMeta(enum.EnumMeta): + def __contains__(cls, item): + try: + cls(item) + except ValueError: + return False + else: + return True diff --git a/src/backend/langflow/utils/util.py b/src/backend/base/langflow/utils/util.py similarity index 92% rename from src/backend/langflow/utils/util.py rename to src/backend/base/langflow/utils/util.py index 7e1206222..4ec2c526c 100644 --- a/src/backend/langflow/utils/util.py +++ b/src/backend/base/langflow/utils/util.py @@ -5,12 +5,17 @@ from functools import wraps from typing import Any, Dict, List, Optional, Union from docstring_parser import parse -from langchain_core.documents import Document +from langflow.schema.schema import Record from langflow.template.frontend_node.constants import FORCE_SHOW_FIELDS from langflow.utils import constants +def unescape_string(s: str): + # Replace escaped new line characters with actual new line characters + return s.replace("\\n", "\n") + + def remove_ansi_escape_codes(text): return re.sub(r"\x1b\[[0-9;]*[a-zA-Z]", "", text) @@ -86,7 +91,8 @@ def build_template_from_class(name: str, type_to_cls_dict: Dict, add_function: b if name_ == "default_factory": try: variables[class_field_items]["default"] = get_default_factory( - module=_class.__base__.__module__, function=value_ + module=_class.__base__.__module__, + function=value_, ) except Exception: variables[class_field_items]["default"] = None @@ -145,8 +151,8 @@ def build_template_from_method( "_type": _type, **{ name: { - "default": param.default if param.default != param.empty else None, - "type": param.annotation if param.annotation != param.empty else None, + "default": (param.default if param.default != param.empty else None), + "type": (param.annotation if param.annotation != param.empty else None), "required": param.default == param.empty, } for name, param in params.items() @@ -243,7 +249,7 @@ def format_dict(dictionary: Dict[str, Any], class_name: Optional[str] = None) -> """ for key, value in dictionary.items(): - if key == "_type": + if key in ["_type"]: continue _type: Union[str, type] = get_type(value) @@ -439,10 +445,20 @@ def add_options_to_field(value: Dict[str, Any], class_name: Optional[str], key: value["value"] = options_map[class_name][0] -def build_loader_repr_from_documents(documents: List[Document]) -> str: - if documents: - avg_length = sum(len(doc.page_content) for doc in documents) / len(documents) - return f"""{len(documents)} documents - \nAvg. Document Length (characters): {int(avg_length)} - Documents: {documents[:3]}...""" - return "0 documents" +def build_loader_repr_from_records(records: List[Record]) -> str: + """ + Builds a string representation of the loader based on the given records. + + Args: + records (List[Record]): A list of records. + + Returns: + str: A string representation of the loader. + + """ + if records: + avg_length = sum(len(doc.text) for doc in records) / len(records) + return f"""{len(records)} records + \nAvg. Record Length (characters): {int(avg_length)} + Records: {records[:3]}...""" + return "0 records" diff --git a/src/backend/langflow/utils/validate.py b/src/backend/base/langflow/utils/validate.py similarity index 97% rename from src/backend/langflow/utils/validate.py rename to src/backend/base/langflow/utils/validate.py index 6e6226568..0871dbd82 100644 --- a/src/backend/langflow/utils/validate.py +++ b/src/backend/base/langflow/utils/validate.py @@ -157,6 +157,9 @@ def create_class(code, class_name): if not hasattr(ast, "TypeIgnore"): ast.TypeIgnore = create_type_ignore_class() + # Replace from langflow import CustomComponent with from langflow.custom import CustomComponent + code = code.replace("from langflow import CustomComponent", "from langflow.custom import CustomComponent") + module = ast.parse(code) exec_globals = prepare_global_scope(code, module) @@ -200,8 +203,8 @@ def prepare_global_scope(code, module): imported_module = importlib.import_module(node.module) for alias in node.names: exec_globals[alias.name] = getattr(imported_module, alias.name) - except ModuleNotFoundError as e: - raise ModuleNotFoundError(f"Module {node.module} not found. Please install it and try again.") from e + except ModuleNotFoundError: + raise ModuleNotFoundError(f"Module {node.module} not found. Please install it and try again") return exec_globals diff --git a/src/backend/base/langflow/worker.py b/src/backend/base/langflow/worker.py new file mode 100644 index 000000000..a5d1cf956 --- /dev/null +++ b/src/backend/base/langflow/worker.py @@ -0,0 +1,37 @@ +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union + +from asgiref.sync import async_to_sync +from celery.exceptions import SoftTimeLimitExceeded # type: ignore + +from langflow.core.celery_app import celery_app + +if TYPE_CHECKING: + from langflow.graph.vertex.base import Vertex + + +@celery_app.task(acks_late=True) +def test_celery(word: str) -> str: + return f"test task return {word}" + + +@celery_app.task(bind=True, soft_time_limit=30, max_retries=3) +def build_vertex(self, vertex: "Vertex") -> "Vertex": + """ + Build a vertex + """ + try: + vertex.task_id = self.request.id + async_to_sync(vertex.build)() + return vertex + except SoftTimeLimitExceeded as e: + raise self.retry(exc=SoftTimeLimitExceeded("Task took too long"), countdown=2) from e + + +@celery_app.task(acks_late=True) +def process_graph_cached_task( + data_graph: Dict[str, Any], + inputs: Optional[Union[dict, List[dict]]] = None, + clear_cache=False, + session_id=None, +) -> Dict[str, Any]: + raise NotImplementedError("This task is not implemented yet") diff --git a/src/backend/base/poetry.lock b/src/backend/base/poetry.lock new file mode 100644 index 000000000..a0d213615 --- /dev/null +++ b/src/backend/base/poetry.lock @@ -0,0 +1,6271 @@ +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.9.3" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:939677b61f9d72a4fa2a042a5eee2a99a24001a67c13da113b2e30396567db54"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f5cd333fcf7590a18334c90f8c9147c837a6ec8a178e88d90a9b96ea03194cc"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82e6aa28dd46374f72093eda8bcd142f7771ee1eb9d1e223ff0fa7177a96b4a5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f56455b0c2c7cc3b0c584815264461d07b177f903a04481dfc33e08a89f0c26b"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bca77a198bb6e69795ef2f09a5f4c12758487f83f33d63acde5f0d4919815768"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e083c285857b78ee21a96ba1eb1b5339733c3563f72980728ca2b08b53826ca5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab40e6251c3873d86ea9b30a1ac6d7478c09277b32e14745d0d3c6e76e3c7e29"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df822ee7feaaeffb99c1a9e5e608800bd8eda6e5f18f5cfb0dc7eeb2eaa6bbec"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:acef0899fea7492145d2bbaaaec7b345c87753168589cc7faf0afec9afe9b747"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cd73265a9e5ea618014802ab01babf1940cecb90c9762d8b9e7d2cc1e1969ec6"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a78ed8a53a1221393d9637c01870248a6f4ea5b214a59a92a36f18151739452c"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6b0e029353361f1746bac2e4cc19b32f972ec03f0f943b390c4ab3371840aabf"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7cf5c9458e1e90e3c390c2639f1017a0379a99a94fdfad3a1fd966a2874bba52"}, + {file = "aiohttp-3.9.3-cp310-cp310-win32.whl", hash = "sha256:3e59c23c52765951b69ec45ddbbc9403a8761ee6f57253250c6e1536cacc758b"}, + {file = "aiohttp-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:055ce4f74b82551678291473f66dc9fb9048a50d8324278751926ff0ae7715e5"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b88f9386ff1ad91ace19d2a1c0225896e28815ee09fc6a8932fded8cda97c3d"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c46956ed82961e31557b6857a5ca153c67e5476972e5f7190015018760938da2"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07b837ef0d2f252f96009e9b8435ec1fef68ef8b1461933253d318748ec1acdc"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad46e6f620574b3b4801c68255492e0159d1712271cc99d8bdf35f2043ec266"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ed3e046ea7b14938112ccd53d91c1539af3e6679b222f9469981e3dac7ba1ce"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:039df344b45ae0b34ac885ab5b53940b174530d4dd8a14ed8b0e2155b9dddccb"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7943c414d3a8d9235f5f15c22ace69787c140c80b718dcd57caaade95f7cd93b"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84871a243359bb42c12728f04d181a389718710129b36b6aad0fc4655a7647d4"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5eafe2c065df5401ba06821b9a054d9cb2848867f3c59801b5d07a0be3a380ae"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9d3c9b50f19704552f23b4eaea1fc082fdd82c63429a6506446cbd8737823da3"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:f033d80bc6283092613882dfe40419c6a6a1527e04fc69350e87a9df02bbc283"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2c895a656dd7e061b2fd6bb77d971cc38f2afc277229ce7dd3552de8313a483e"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1f5a71d25cd8106eab05f8704cd9167b6e5187bcdf8f090a66c6d88b634802b4"}, + {file = "aiohttp-3.9.3-cp311-cp311-win32.whl", hash = "sha256:50fca156d718f8ced687a373f9e140c1bb765ca16e3d6f4fe116e3df7c05b2c5"}, + {file = "aiohttp-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:5fe9ce6c09668063b8447f85d43b8d1c4e5d3d7e92c63173e6180b2ac5d46dd8"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:38a19bc3b686ad55804ae931012f78f7a534cce165d089a2059f658f6c91fa60"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:770d015888c2a598b377bd2f663adfd947d78c0124cfe7b959e1ef39f5b13869"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee43080e75fc92bf36219926c8e6de497f9b247301bbf88c5c7593d931426679"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52df73f14ed99cee84865b95a3d9e044f226320a87af208f068ecc33e0c35b96"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc9b311743a78043b26ffaeeb9715dc360335e5517832f5a8e339f8a43581e4d"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b955ed993491f1a5da7f92e98d5dad3c1e14dc175f74517c4e610b1f2456fb11"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:504b6981675ace64c28bf4a05a508af5cde526e36492c98916127f5a02354d53"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fe5571784af92b6bc2fda8d1925cccdf24642d49546d3144948a6a1ed58ca5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ba39e9c8627edc56544c8628cc180d88605df3892beeb2b94c9bc857774848ca"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e5e46b578c0e9db71d04c4b506a2121c0cb371dd89af17a0586ff6769d4c58c1"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:938a9653e1e0c592053f815f7028e41a3062e902095e5a7dc84617c87267ebd5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:c3452ea726c76e92f3b9fae4b34a151981a9ec0a4847a627c43d71a15ac32aa6"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ff30218887e62209942f91ac1be902cc80cddb86bf00fbc6783b7a43b2bea26f"}, + {file = "aiohttp-3.9.3-cp312-cp312-win32.whl", hash = "sha256:38f307b41e0bea3294a9a2a87833191e4bcf89bb0365e83a8be3a58b31fb7f38"}, + {file = "aiohttp-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:b791a3143681a520c0a17e26ae7465f1b6f99461a28019d1a2f425236e6eedb5"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ed621426d961df79aa3b963ac7af0d40392956ffa9be022024cd16297b30c8c"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7f46acd6a194287b7e41e87957bfe2ad1ad88318d447caf5b090012f2c5bb528"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feeb18a801aacb098220e2c3eea59a512362eb408d4afd0c242044c33ad6d542"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f734e38fd8666f53da904c52a23ce517f1b07722118d750405af7e4123933511"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b40670ec7e2156d8e57f70aec34a7216407848dfe6c693ef131ddf6e76feb672"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdd215b7b7fd4a53994f238d0f46b7ba4ac4c0adb12452beee724ddd0743ae5d"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:017a21b0df49039c8f46ca0971b3a7fdc1f56741ab1240cb90ca408049766168"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e99abf0bba688259a496f966211c49a514e65afa9b3073a1fcee08856e04425b"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:648056db9a9fa565d3fa851880f99f45e3f9a771dd3ff3bb0c048ea83fb28194"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8aacb477dc26797ee089721536a292a664846489c49d3ef9725f992449eda5a8"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:522a11c934ea660ff8953eda090dcd2154d367dec1ae3c540aff9f8a5c109ab4"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5bce0dc147ca85caa5d33debc4f4d65e8e8b5c97c7f9f660f215fa74fc49a321"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b4af9f25b49a7be47c0972139e59ec0e8285c371049df1a63b6ca81fdd216a2"}, + {file = "aiohttp-3.9.3-cp38-cp38-win32.whl", hash = "sha256:298abd678033b8571995650ccee753d9458dfa0377be4dba91e4491da3f2be63"}, + {file = "aiohttp-3.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:69361bfdca5468c0488d7017b9b1e5ce769d40b46a9f4a2eed26b78619e9396c"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0fa43c32d1643f518491d9d3a730f85f5bbaedcbd7fbcae27435bb8b7a061b29"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:835a55b7ca49468aaaac0b217092dfdff370e6c215c9224c52f30daaa735c1c1"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06a9b2c8837d9a94fae16c6223acc14b4dfdff216ab9b7202e07a9a09541168f"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abf151955990d23f84205286938796c55ff11bbfb4ccfada8c9c83ae6b3c89a3"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59c26c95975f26e662ca78fdf543d4eeaef70e533a672b4113dd888bd2423caa"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f95511dd5d0e05fd9728bac4096319f80615aaef4acbecb35a990afebe953b0e"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:595f105710293e76b9dc09f52e0dd896bd064a79346234b521f6b968ffdd8e58"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7c8b816c2b5af5c8a436df44ca08258fc1a13b449393a91484225fcb7545533"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f1088fa100bf46e7b398ffd9904f4808a0612e1d966b4aa43baa535d1b6341eb"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f59dfe57bb1ec82ac0698ebfcdb7bcd0e99c255bd637ff613760d5f33e7c81b3"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:361a1026c9dd4aba0109e4040e2aecf9884f5cfe1b1b1bd3d09419c205e2e53d"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:363afe77cfcbe3a36353d8ea133e904b108feea505aa4792dad6585a8192c55a"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e2c45c208c62e955e8256949eb225bd8b66a4c9b6865729a786f2aa79b72e9d"}, + {file = "aiohttp-3.9.3-cp39-cp39-win32.whl", hash = "sha256:f7217af2e14da0856e082e96ff637f14ae45c10a5714b63c77f26d8884cf1051"}, + {file = "aiohttp-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:27468897f628c627230dba07ec65dc8d0db566923c48f29e084ce382119802bc"}, + {file = "aiohttp-3.9.3.tar.gz", hash = "sha256:90842933e5d1ff760fae6caca4b2b3edba53ba8f4b71e95dacf2818a2aca06f7"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "brotlicffi"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "alembic" +version = "1.13.1" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.8" +files = [ + {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"}, + {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["backports.zoneinfo"] + +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + +[[package]] +name = "anthropic" +version = "0.21.3" +description = "The official Python library for the anthropic API" +optional = false +python-versions = ">=3.7" +files = [ + {file = "anthropic-0.21.3-py3-none-any.whl", hash = "sha256:5869115453b543a46ded6515c9f29b8d610b6e94bbba3230ad80ac947d2b0862"}, + {file = "anthropic-0.21.3.tar.gz", hash = "sha256:02f1ab5694c497e2b2d42d30d51a4f2edcaca92d2ec86bb64fe78a9c7434a869"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +tokenizers = ">=0.13.0" +typing-extensions = ">=4.7,<5" + +[package.extras] +bedrock = ["boto3 (>=1.28.57)", "botocore (>=1.31.57)"] +vertex = ["google-auth (>=2,<3)"] + +[[package]] +name = "anyio" +version = "4.3.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, + {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + +[[package]] +name = "appnope" +version = "0.1.4" +description = "Disable App Nap on macOS >= 10.9" +optional = false +python-versions = ">=3.6" +files = [ + {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, + {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, +] + +[[package]] +name = "asgiref" +version = "3.8.1" +description = "ASGI specs, helper code, and adapters" +optional = false +python-versions = ">=3.8" +files = [ + {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, + {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} + +[package.extras] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] + +[[package]] +name = "astrapy" +version = "0.7.7" +description = "AstraPy is a Pythonic SDK for DataStax Astra" +optional = false +python-versions = ">=3.8.0,<4.0.0" +files = [ + {file = "astrapy-0.7.7-py3-none-any.whl", hash = "sha256:e5def4e3c5ceb06dfc996471250dc0c972b729c06336ea4aac006dadfc071a9a"}, + {file = "astrapy-0.7.7.tar.gz", hash = "sha256:4bf81096a0c26cce18a14a34bb5f699649fd7d90b4ec6050f3d7c0274722d769"}, +] + +[package.dependencies] +cassio = ">=0.1.4,<0.2.0" +deprecation = ">=2.1.0,<2.2.0" +httpx = {version = ">=0.25.2,<1", extras = ["http2"]} +toml = ">=0.10.2,<0.11.0" + +[[package]] +name = "asttokens" +version = "2.4.1" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = "*" +files = [ + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, +] + +[package.dependencies] +six = ">=1.12.0" + +[package.extras] +astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] +test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bcrypt" +version = "4.0.1" +description = "Modern password hashing for your software and your servers" +optional = false +python-versions = ">=3.6" +files = [ + {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"}, + {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"}, + {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"}, + {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"}, + {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"}, + {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"}, + {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"}, + {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"}, + {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"}, + {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"}, + {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"}, + {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"}, + {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"}, + {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"}, + {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, +] + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + +[[package]] +name = "bidict" +version = "0.23.1" +description = "The bidirectional mapping library for Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bidict-0.23.1-py3-none-any.whl", hash = "sha256:5dae8d4d79b552a71cbabc7deb25dfe8ce710b17ff41711e13010ead2abfc3e5"}, + {file = "bidict-0.23.1.tar.gz", hash = "sha256:03069d763bc387bbd20e7d49914e75fc4132a41937fa3405417e1a5a2d006d71"}, +] + +[[package]] +name = "blinker" +version = "1.7.0" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.8" +files = [ + {file = "blinker-1.7.0-py3-none-any.whl", hash = "sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9"}, + {file = "blinker-1.7.0.tar.gz", hash = "sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182"}, +] + +[[package]] +name = "brotli" +version = "1.1.0" +description = "Python bindings for the Brotli compression library" +optional = false +python-versions = "*" +files = [ + {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1140c64812cb9b06c922e77f1c26a75ec5e3f0fb2bf92cc8c58720dec276752"}, + {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8fd5270e906eef71d4a8d19b7c6a43760c6abcfcc10c9101d14eb2357418de9"}, + {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ae56aca0402a0f9a3431cddda62ad71666ca9d4dc3a10a142b9dce2e3c0cda3"}, + {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43ce1b9935bfa1ede40028054d7f48b5469cd02733a365eec8a329ffd342915d"}, + {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7c4855522edb2e6ae7fdb58e07c3ba9111e7621a8956f481c68d5d979c93032e"}, + {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:38025d9f30cf4634f8309c6874ef871b841eb3c347e90b0851f63d1ded5212da"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6a904cb26bfefc2f0a6f240bdf5233be78cd2488900a2f846f3c3ac8489ab80"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"}, + {file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"}, + {file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"}, + {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"}, + {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c8146669223164fc87a7e3de9f81e9423c67a79d6b3447994dfb9c95da16e2d6"}, + {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30924eb4c57903d5a7526b08ef4a584acc22ab1ffa085faceb521521d2de32dd"}, + {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ceb64bbc6eac5a140ca649003756940f8d6a7c444a68af170b3187623b43bebf"}, + {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a469274ad18dc0e4d316eefa616d1d0c2ff9da369af19fa6f3daa4f09671fd61"}, + {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524f35912131cc2cabb00edfd8d573b07f2d9f21fa824bd3fb19725a9cf06327"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5b3cc074004d968722f51e550b41a27be656ec48f8afaeeb45ebf65b561481dd"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"}, + {file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"}, + {file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"}, + {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"}, + {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"}, + {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"}, + {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f4bf76817c14aa98cc6697ac02f3972cb8c3da93e9ef16b9c66573a68014f91"}, + {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0c5516f0aed654134a2fc936325cc2e642f8a0e096d075209672eb321cff408"}, + {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c3020404e0b5eefd7c9485ccf8393cfb75ec38ce75586e046573c9dc29967a0"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ed11165dd45ce798d99a136808a794a748d5dc38511303239d4e2363c0695dc"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"}, + {file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"}, + {file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"}, + {file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"}, + {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"}, + {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"}, + {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4d4a848d1837973bf0f4b5e54e3bec977d99be36a7895c61abb659301b02c112"}, + {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fdc3ff3bfccdc6b9cc7c342c03aa2400683f0cb891d46e94b64a197910dc4064"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5eeb539606f18a0b232d4ba45adccde4125592f3f636a6182b4a8a436548b914"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"}, + {file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"}, + {file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"}, + {file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"}, + {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f733d788519c7e3e71f0855c96618720f5d3d60c3cb829d8bbb722dddce37985"}, + {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:929811df5462e182b13920da56c6e0284af407d1de637d8e536c5cd00a7daf60"}, + {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b63b949ff929fbc2d6d3ce0e924c9b93c9785d877a21a1b678877ffbbc4423a"}, + {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d192f0f30804e55db0d0e0a35d83a9fead0e9a359a9ed0285dbacea60cc10a84"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f296c40e23065d0d6650c4aefe7470d2a25fffda489bcc3eb66083f3ac9f6643"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"}, + {file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"}, + {file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"}, + {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"}, + {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:03d20af184290887bdea3f0f78c4f737d126c74dc2f3ccadf07e54ceca3bf208"}, + {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6172447e1b368dcbc458925e5ddaf9113477b0ed542df258d84fa28fc45ceea7"}, + {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a743e5a28af5f70f9c080380a5f908d4d21d40e8f0e0c8901604d15cfa9ba751"}, + {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0541e747cce78e24ea12d69176f6a7ddb690e62c425e01d31cc065e69ce55b48"}, + {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cdbc1fc1bc0bff1cef838eafe581b55bfbffaed4ed0318b724d0b71d4d377619"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:890b5a14ce214389b2cc36ce82f3093f96f4cc730c1cffdbefff77a7c71f2a97"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"}, + {file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"}, + {file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"}, + {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"}, + {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7905193081db9bfa73b1219140b3d315831cbff0d8941f22da695832f0dd188f"}, + {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a77def80806c421b4b0af06f45d65a136e7ac0bdca3c09d9e2ea4e515367c7e9"}, + {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dadd1314583ec0bf2d1379f7008ad627cd6336625d6679cf2f8e67081b83acf"}, + {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:901032ff242d479a0efa956d853d16875d42157f98951c0230f69e69f9c09bac"}, + {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22fc2a8549ffe699bfba2256ab2ed0421a7b8fadff114a3d201794e45a9ff578"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ae15b066e5ad21366600ebec29a7ccbc86812ed267e4b28e860b8ca16a2bc474"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"}, + {file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"}, + {file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"}, + {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"}, +] + +[[package]] +name = "build" +version = "1.2.1" +description = "A simple, correct Python build frontend" +optional = false +python-versions = ">=3.8" +files = [ + {file = "build-1.2.1-py3-none-any.whl", hash = "sha256:75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4"}, + {file = "build-1.2.1.tar.gz", hash = "sha256:526263f4870c26f26c433545579475377b2b7588b6f1eac76a001e873ae3e19d"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "os_name == \"nt\""} +importlib-metadata = {version = ">=4.6", markers = "python_full_version < \"3.10.2\""} +packaging = ">=19.1" +pyproject_hooks = "*" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} + +[package.extras] +docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"] +test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "setuptools (>=56.0.0)", "setuptools (>=67.8.0)", "wheel (>=0.36.0)"] +typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"] +uv = ["uv (>=0.1.18)"] +virtualenv = ["virtualenv (>=20.0.35)"] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cassandra-driver" +version = "3.29.1" +description = "DataStax Driver for Apache Cassandra" +optional = false +python-versions = "*" +files = [ + {file = "cassandra-driver-3.29.1.tar.gz", hash = "sha256:38e9c2a2f2a9664bb03f1f852d5fccaeff2163942b5db35dffcf8bf32a51cfe5"}, + {file = "cassandra_driver-3.29.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8f175c7616a63ca48cb8bd4acc443e2a3d889964d5157cead761f23cc8db7bd"}, + {file = "cassandra_driver-3.29.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7d66398952b9cd21c40edff56e22b6d3bce765edc94b207ddb5896e7bc9aa088"}, + {file = "cassandra_driver-3.29.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bbc6f575ef109ce5d4abfa2033bf36c394032abd83e32ab671159ce68e7e17b"}, + {file = "cassandra_driver-3.29.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78f241af75696adb3e470209e2fbb498804c99e2b197d24d74774eee6784f283"}, + {file = "cassandra_driver-3.29.1-cp310-cp310-win32.whl", hash = "sha256:54d9e651a742d6ca3d874ef8d06a40fa032d2dba97142da2d36f60c5675e39f8"}, + {file = "cassandra_driver-3.29.1-cp310-cp310-win_amd64.whl", hash = "sha256:630dc5423cd40eba0ee9db31065e2238098ff1a25a6b1bd36360f85738f26e4b"}, + {file = "cassandra_driver-3.29.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0b841d38c96bb878d31df393954863652d6d3a85f47bcc00fd1d70a5ea73023f"}, + {file = "cassandra_driver-3.29.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:19cc7375f673e215bd4cbbefae2de9f07830be7dabef55284a2d2ff8d8691efe"}, + {file = "cassandra_driver-3.29.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b74b355be3dcafe652fffda8f14f385ccc1a8dae9df28e6080cc660da39b45f"}, + {file = "cassandra_driver-3.29.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e6dac7eddd3f4581859f180383574068a3f113907811b4dad755a8ace4c3fbd"}, + {file = "cassandra_driver-3.29.1-cp311-cp311-win32.whl", hash = "sha256:293a79dba417112b56320ed0013d71fd7520f5fc4a5fd2ac8000c762c6dd5b07"}, + {file = "cassandra_driver-3.29.1-cp311-cp311-win_amd64.whl", hash = "sha256:7c2374fdf1099047a6c9c8329c79d71ad11e61d9cca7de92a0f49655da4bdd8a"}, + {file = "cassandra_driver-3.29.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4431a0c836f33a33c733c84997fbdb6398be005c4d18a8c8525c469fdc29393c"}, + {file = "cassandra_driver-3.29.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d23b08381b171a9e42ace483a82457edcddada9e8367e31677b97538cde2dc34"}, + {file = "cassandra_driver-3.29.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4beb29a0139e63a10a5b9a3c7b72c30a4e6e20c9f0574f9d22c0d4144fe3d348"}, + {file = "cassandra_driver-3.29.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b206423cc454a78f16b411e7cb641dddc26168ac2e18f2c13665f5f3c89868c"}, + {file = "cassandra_driver-3.29.1-cp312-cp312-win32.whl", hash = "sha256:ac898cca7303a3a2a3070513eee12ef0f1be1a0796935c5b8aa13dae8c0a7f7e"}, + {file = "cassandra_driver-3.29.1-cp312-cp312-win_amd64.whl", hash = "sha256:4ad0c9fb2229048ad6ff8c6ddbf1fdc78b111f2b061c66237c2257fcc4a31b14"}, + {file = "cassandra_driver-3.29.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4282c5deac462e4bb0f6fd0553a33d514dbd5ee99d0812594210080330ddd1a2"}, + {file = "cassandra_driver-3.29.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:41ca7eea069754002418d3bdfbd3dfd150ea12cb9db474ab1a01fa4679a05bcb"}, + {file = "cassandra_driver-3.29.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6639ccb268c4dc754bc45e03551711780d0e02cb298ab26cde1f42b7bcc74f8"}, + {file = "cassandra_driver-3.29.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a9d7d3b1be24a7f113b5404186ccccc977520401303a8fe78ba34134cad2482"}, + {file = "cassandra_driver-3.29.1-cp38-cp38-win32.whl", hash = "sha256:81c8fd556c6e1bb93577e69c1f10a3fadf7ddb93958d226ccbb72389396e9a92"}, + {file = "cassandra_driver-3.29.1-cp38-cp38-win_amd64.whl", hash = "sha256:cfe70ed0f27af949de2767ea9cef4092584e8748759374a55bf23c30746c7b23"}, + {file = "cassandra_driver-3.29.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a2c03c1d834ac1a0ae39f9af297a8cd38829003ce910b08b324fb3abe488ce2b"}, + {file = "cassandra_driver-3.29.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9a3e1e2b01f3b7a5cf75c97401bce830071d99c42464352087d7475e0161af93"}, + {file = "cassandra_driver-3.29.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90c42006665a4e490b0766b70f3d637f36a30accbef2da35d6d4081c0e0bafc3"}, + {file = "cassandra_driver-3.29.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c1aca41f45772f9759e8246030907d92bc35fbbdc91525a3cb9b49939b80ad7"}, + {file = "cassandra_driver-3.29.1-cp39-cp39-win32.whl", hash = "sha256:ce4a66245d4a0c8b07fdcb6398698c2c42eb71245fb49cff39435bb702ff7be6"}, + {file = "cassandra_driver-3.29.1-cp39-cp39-win_amd64.whl", hash = "sha256:4cae69ceb1b1d9383e988a1b790115253eacf7867ceb15ed2adb736e3ce981be"}, +] + +[package.dependencies] +geomet = ">=0.1,<0.3" + +[package.extras] +cle = ["cryptography (>=35.0)"] +graph = ["gremlinpython (==3.4.6)"] + +[[package]] +name = "cassio" +version = "0.1.5" +description = "A framework-agnostic Python library to seamlessly integrate Apache Cassandra(R) with ML/LLM/genAI workloads." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cassio-0.1.5-py3-none-any.whl", hash = "sha256:cf1d11f255c040bc0aede4963ca020840133377aa54f7f15d2f819d6553d52ce"}, + {file = "cassio-0.1.5.tar.gz", hash = "sha256:88c50c34d46a1bfffca1e0b600318a6efef45e6c18a56ddabe208cbede8dcc27"}, +] + +[package.dependencies] +cassandra-driver = ">=3.28.0" +numpy = ">=1.0" +requests = ">=2" + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "chroma-hnswlib" +version = "0.7.3" +description = "Chromas fork of hnswlib" +optional = false +python-versions = "*" +files = [ + {file = "chroma-hnswlib-0.7.3.tar.gz", hash = "sha256:b6137bedde49fffda6af93b0297fe00429fc61e5a072b1ed9377f909ed95a932"}, + {file = "chroma_hnswlib-0.7.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59d6a7c6f863c67aeb23e79a64001d537060b6995c3eca9a06e349ff7b0998ca"}, + {file = "chroma_hnswlib-0.7.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d71a3f4f232f537b6152947006bd32bc1629a8686df22fd97777b70f416c127a"}, + {file = "chroma_hnswlib-0.7.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c92dc1ebe062188e53970ba13f6b07e0ae32e64c9770eb7f7ffa83f149d4210"}, + {file = "chroma_hnswlib-0.7.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49da700a6656fed8753f68d44b8cc8ae46efc99fc8a22a6d970dc1697f49b403"}, + {file = "chroma_hnswlib-0.7.3-cp310-cp310-win_amd64.whl", hash = "sha256:108bc4c293d819b56476d8f7865803cb03afd6ca128a2a04d678fffc139af029"}, + {file = "chroma_hnswlib-0.7.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:11e7ca93fb8192214ac2b9c0943641ac0daf8f9d4591bb7b73be808a83835667"}, + {file = "chroma_hnswlib-0.7.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6f552e4d23edc06cdeb553cdc757d2fe190cdeb10d43093d6a3319f8d4bf1c6b"}, + {file = "chroma_hnswlib-0.7.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f96f4d5699e486eb1fb95849fe35ab79ab0901265805be7e60f4eaa83ce263ec"}, + {file = "chroma_hnswlib-0.7.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:368e57fe9ebae05ee5844840fa588028a023d1182b0cfdb1d13f607c9ea05756"}, + {file = "chroma_hnswlib-0.7.3-cp311-cp311-win_amd64.whl", hash = "sha256:b7dca27b8896b494456db0fd705b689ac6b73af78e186eb6a42fea2de4f71c6f"}, + {file = "chroma_hnswlib-0.7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:70f897dc6218afa1d99f43a9ad5eb82f392df31f57ff514ccf4eeadecd62f544"}, + {file = "chroma_hnswlib-0.7.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aef10b4952708f5a1381c124a29aead0c356f8d7d6e0b520b778aaa62a356f4"}, + {file = "chroma_hnswlib-0.7.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ee2d8d1529fca3898d512079144ec3e28a81d9c17e15e0ea4665697a7923253"}, + {file = "chroma_hnswlib-0.7.3-cp37-cp37m-win_amd64.whl", hash = "sha256:a4021a70e898783cd6f26e00008b494c6249a7babe8774e90ce4766dd288c8ba"}, + {file = "chroma_hnswlib-0.7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a8f61fa1d417fda848e3ba06c07671f14806a2585272b175ba47501b066fe6b1"}, + {file = "chroma_hnswlib-0.7.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d7563be58bc98e8f0866907368e22ae218d6060601b79c42f59af4eccbbd2e0a"}, + {file = "chroma_hnswlib-0.7.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51b8d411486ee70d7b66ec08cc8b9b6620116b650df9c19076d2d8b6ce2ae914"}, + {file = "chroma_hnswlib-0.7.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d706782b628e4f43f1b8a81e9120ac486837fbd9bcb8ced70fe0d9b95c72d77"}, + {file = "chroma_hnswlib-0.7.3-cp38-cp38-win_amd64.whl", hash = "sha256:54f053dedc0e3ba657f05fec6e73dd541bc5db5b09aa8bc146466ffb734bdc86"}, + {file = "chroma_hnswlib-0.7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e607c5a71c610a73167a517062d302c0827ccdd6e259af6e4869a5c1306ffb5d"}, + {file = "chroma_hnswlib-0.7.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2358a795870156af6761890f9eb5ca8cade57eb10c5f046fe94dae1faa04b9e"}, + {file = "chroma_hnswlib-0.7.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cea425df2e6b8a5e201fff0d922a1cc1d165b3cfe762b1408075723c8892218"}, + {file = "chroma_hnswlib-0.7.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:454df3dd3e97aa784fba7cf888ad191e0087eef0fd8c70daf28b753b3b591170"}, + {file = "chroma_hnswlib-0.7.3-cp39-cp39-win_amd64.whl", hash = "sha256:df587d15007ca701c6de0ee7d5585dd5e976b7edd2b30ac72bc376b3c3f85882"}, +] + +[package.dependencies] +numpy = "*" + +[[package]] +name = "chromadb" +version = "0.4.24" +description = "Chroma." +optional = false +python-versions = ">=3.8" +files = [ + {file = "chromadb-0.4.24-py3-none-any.whl", hash = "sha256:3a08e237a4ad28b5d176685bd22429a03717fe09d35022fb230d516108da01da"}, + {file = "chromadb-0.4.24.tar.gz", hash = "sha256:a5c80b4e4ad9b236ed2d4899a5b9e8002b489293f2881cb2cadab5b199ee1c72"}, +] + +[package.dependencies] +bcrypt = ">=4.0.1" +build = ">=1.0.3" +chroma-hnswlib = "0.7.3" +fastapi = ">=0.95.2" +grpcio = ">=1.58.0" +importlib-resources = "*" +kubernetes = ">=28.1.0" +mmh3 = ">=4.0.1" +numpy = ">=1.22.5" +onnxruntime = ">=1.14.1" +opentelemetry-api = ">=1.2.0" +opentelemetry-exporter-otlp-proto-grpc = ">=1.2.0" +opentelemetry-instrumentation-fastapi = ">=0.41b0" +opentelemetry-sdk = ">=1.2.0" +orjson = ">=3.9.12" +overrides = ">=7.3.1" +posthog = ">=2.4.0" +pulsar-client = ">=3.1.0" +pydantic = ">=1.9" +pypika = ">=0.48.9" +PyYAML = ">=6.0.0" +requests = ">=2.28" +tenacity = ">=8.2.3" +tokenizers = ">=0.13.2" +tqdm = ">=4.65.0" +typer = ">=0.9.0" +typing-extensions = ">=4.5.0" +uvicorn = {version = ">=0.18.3", extras = ["standard"]} + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coloredlogs" +version = "15.0.1" +description = "Colored terminal output for Python's logging module" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "coloredlogs-15.0.1-py2.py3-none-any.whl", hash = "sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934"}, + {file = "coloredlogs-15.0.1.tar.gz", hash = "sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0"}, +] + +[package.dependencies] +humanfriendly = ">=9.1" + +[package.extras] +cron = ["capturer (>=2.4)"] + +[[package]] +name = "comm" +version = "0.2.2" +description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +optional = false +python-versions = ">=3.8" +files = [ + {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, + {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, +] + +[package.dependencies] +traitlets = ">=4" + +[package.extras] +test = ["pytest"] + +[[package]] +name = "configargparse" +version = "1.7" +description = "A drop-in replacement for argparse that allows options to also be set via config files and/or environment variables." +optional = false +python-versions = ">=3.5" +files = [ + {file = "ConfigArgParse-1.7-py3-none-any.whl", hash = "sha256:d249da6591465c6c26df64a9f73d2536e743be2f244eb3ebe61114af2f94f86b"}, + {file = "ConfigArgParse-1.7.tar.gz", hash = "sha256:e7067471884de5478c58a511e529f0f9bd1c66bfef1dea90935438d6c23306d1"}, +] + +[package.extras] +test = ["PyYAML", "mock", "pytest"] +yaml = ["PyYAML"] + +[[package]] +name = "coverage" +version = "7.4.4" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, + {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, + {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, + {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, + {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, + {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, + {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, + {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, + {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, + {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, + {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, + {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, + {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "cryptography" +version = "42.0.5" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16"}, + {file = "cryptography-42.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278"}, + {file = "cryptography-42.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d"}, + {file = "cryptography-42.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da"}, + {file = "cryptography-42.0.5-cp37-abi3-win32.whl", hash = "sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74"}, + {file = "cryptography-42.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940"}, + {file = "cryptography-42.0.5-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc"}, + {file = "cryptography-42.0.5-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc"}, + {file = "cryptography-42.0.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30"}, + {file = "cryptography-42.0.5-cp39-abi3-win32.whl", hash = "sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413"}, + {file = "cryptography-42.0.5-cp39-abi3-win_amd64.whl", hash = "sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c"}, + {file = "cryptography-42.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac"}, + {file = "cryptography-42.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd"}, + {file = "cryptography-42.0.5.tar.gz", hash = "sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "dataclasses-json" +version = "0.6.4" +description = "Easily serialize dataclasses to and from JSON." +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "dataclasses_json-0.6.4-py3-none-any.whl", hash = "sha256:f90578b8a3177f7552f4e1a6e535e84293cd5da421fcce0642d49c0d7bdf8df2"}, + {file = "dataclasses_json-0.6.4.tar.gz", hash = "sha256:73696ebf24936560cca79a2430cbc4f3dd23ac7bf46ed17f38e5e5e7657a6377"}, +] + +[package.dependencies] +marshmallow = ">=3.18.0,<4.0.0" +typing-inspect = ">=0.4.0,<1" + +[[package]] +name = "debugpy" +version = "1.8.1" +description = "An implementation of the Debug Adapter Protocol for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "debugpy-1.8.1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:3bda0f1e943d386cc7a0e71bfa59f4137909e2ed947fb3946c506e113000f741"}, + {file = "debugpy-1.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dda73bf69ea479c8577a0448f8c707691152e6c4de7f0c4dec5a4bc11dee516e"}, + {file = "debugpy-1.8.1-cp310-cp310-win32.whl", hash = "sha256:3a79c6f62adef994b2dbe9fc2cc9cc3864a23575b6e387339ab739873bea53d0"}, + {file = "debugpy-1.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:7eb7bd2b56ea3bedb009616d9e2f64aab8fc7000d481faec3cd26c98a964bcdd"}, + {file = "debugpy-1.8.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:016a9fcfc2c6b57f939673c874310d8581d51a0fe0858e7fac4e240c5eb743cb"}, + {file = "debugpy-1.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd97ed11a4c7f6d042d320ce03d83b20c3fb40da892f994bc041bbc415d7a099"}, + {file = "debugpy-1.8.1-cp311-cp311-win32.whl", hash = "sha256:0de56aba8249c28a300bdb0672a9b94785074eb82eb672db66c8144fff673146"}, + {file = "debugpy-1.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:1a9fe0829c2b854757b4fd0a338d93bc17249a3bf69ecf765c61d4c522bb92a8"}, + {file = "debugpy-1.8.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3ebb70ba1a6524d19fa7bb122f44b74170c447d5746a503e36adc244a20ac539"}, + {file = "debugpy-1.8.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2e658a9630f27534e63922ebf655a6ab60c370f4d2fc5c02a5b19baf4410ace"}, + {file = "debugpy-1.8.1-cp312-cp312-win32.whl", hash = "sha256:caad2846e21188797a1f17fc09c31b84c7c3c23baf2516fed5b40b378515bbf0"}, + {file = "debugpy-1.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:edcc9f58ec0fd121a25bc950d4578df47428d72e1a0d66c07403b04eb93bcf98"}, + {file = "debugpy-1.8.1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:7a3afa222f6fd3d9dfecd52729bc2e12c93e22a7491405a0ecbf9e1d32d45b39"}, + {file = "debugpy-1.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d915a18f0597ef685e88bb35e5d7ab968964b7befefe1aaea1eb5b2640b586c7"}, + {file = "debugpy-1.8.1-cp38-cp38-win32.whl", hash = "sha256:92116039b5500633cc8d44ecc187abe2dfa9b90f7a82bbf81d079fcdd506bae9"}, + {file = "debugpy-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:e38beb7992b5afd9d5244e96ad5fa9135e94993b0c551ceebf3fe1a5d9beb234"}, + {file = "debugpy-1.8.1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:bfb20cb57486c8e4793d41996652e5a6a885b4d9175dd369045dad59eaacea42"}, + {file = "debugpy-1.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efd3fdd3f67a7e576dd869c184c5dd71d9aaa36ded271939da352880c012e703"}, + {file = "debugpy-1.8.1-cp39-cp39-win32.whl", hash = "sha256:58911e8521ca0c785ac7a0539f1e77e0ce2df753f786188f382229278b4cdf23"}, + {file = "debugpy-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:6df9aa9599eb05ca179fb0b810282255202a66835c6efb1d112d21ecb830ddd3"}, + {file = "debugpy-1.8.1-py2.py3-none-any.whl", hash = "sha256:28acbe2241222b87e255260c76741e1fbf04fdc3b6d094fcf57b6c6f75ce1242"}, + {file = "debugpy-1.8.1.zip", hash = "sha256:f696d6be15be87aef621917585f9bb94b1dc9e8aced570db1b8a6fc14e8f9b42"}, +] + +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + +[[package]] +name = "defusedxml" +version = "0.7.1" +description = "XML bomb protection for Python stdlib modules" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, + {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "deprecation" +version = "2.1.0" +description = "A library to handle automated deprecations" +optional = false +python-versions = "*" +files = [ + {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, + {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, +] + +[package.dependencies] +packaging = "*" + +[[package]] +name = "dill" +version = "0.3.8" +description = "serialize all of Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, + {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] + +[[package]] +name = "distro" +version = "1.9.0" +description = "Distro - an OS platform information API" +optional = false +python-versions = ">=3.6" +files = [ + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, +] + +[[package]] +name = "docstring-parser" +version = "0.15" +description = "Parse Python docstrings in reST, Google and Numpydoc format" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "docstring_parser-0.15-py3-none-any.whl", hash = "sha256:d1679b86250d269d06a99670924d6bce45adc00b08069dae8c47d98e89b667a9"}, + {file = "docstring_parser-0.15.tar.gz", hash = "sha256:48ddc093e8b1865899956fcc03b03e66bb7240c310fac5af81814580c55bf682"}, +] + +[[package]] +name = "duckdb" +version = "0.9.2" +description = "DuckDB embedded database" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:aadcea5160c586704c03a8a796c06a8afffbefefb1986601104a60cb0bfdb5ab"}, + {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:08215f17147ed83cbec972175d9882387366de2ed36c21cbe4add04b39a5bcb4"}, + {file = "duckdb-0.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee6c2a8aba6850abef5e1be9dbc04b8e72a5b2c2b67f77892317a21fae868fe7"}, + {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ff49f3da9399900fd58b5acd0bb8bfad22c5147584ad2427a78d937e11ec9d0"}, + {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5ac5baf8597efd2bfa75f984654afcabcd698342d59b0e265a0bc6f267b3f0"}, + {file = "duckdb-0.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:81c6df905589a1023a27e9712edb5b724566587ef280a0c66a7ec07c8083623b"}, + {file = "duckdb-0.9.2-cp310-cp310-win32.whl", hash = "sha256:a298cd1d821c81d0dec8a60878c4b38c1adea04a9675fb6306c8f9083bbf314d"}, + {file = "duckdb-0.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:492a69cd60b6cb4f671b51893884cdc5efc4c3b2eb76057a007d2a2295427173"}, + {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:061a9ea809811d6e3025c5de31bc40e0302cfb08c08feefa574a6491e882e7e8"}, + {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a43f93be768af39f604b7b9b48891f9177c9282a408051209101ff80f7450d8f"}, + {file = "duckdb-0.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ac29c8c8f56fff5a681f7bf61711ccb9325c5329e64f23cb7ff31781d7b50773"}, + {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b14d98d26bab139114f62ade81350a5342f60a168d94b27ed2c706838f949eda"}, + {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:796a995299878913e765b28cc2b14c8e44fae2f54ab41a9ee668c18449f5f833"}, + {file = "duckdb-0.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6cb64ccfb72c11ec9c41b3cb6181b6fd33deccceda530e94e1c362af5f810ba1"}, + {file = "duckdb-0.9.2-cp311-cp311-win32.whl", hash = "sha256:930740cb7b2cd9e79946e1d3a8f66e15dc5849d4eaeff75c8788d0983b9256a5"}, + {file = "duckdb-0.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:c28f13c45006fd525001b2011cdf91fa216530e9751779651e66edc0e446be50"}, + {file = "duckdb-0.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fbce7bbcb4ba7d99fcec84cec08db40bc0dd9342c6c11930ce708817741faeeb"}, + {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15a82109a9e69b1891f0999749f9e3265f550032470f51432f944a37cfdc908b"}, + {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9490fb9a35eb74af40db5569d90df8a04a6f09ed9a8c9caa024998c40e2506aa"}, + {file = "duckdb-0.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:696d5c6dee86c1a491ea15b74aafe34ad2b62dcd46ad7e03b1d00111ca1a8c68"}, + {file = "duckdb-0.9.2-cp37-cp37m-win32.whl", hash = "sha256:4f0935300bdf8b7631ddfc838f36a858c1323696d8c8a2cecbd416bddf6b0631"}, + {file = "duckdb-0.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:0aab900f7510e4d2613263865570203ddfa2631858c7eb8cbed091af6ceb597f"}, + {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7d8130ed6a0c9421b135d0743705ea95b9a745852977717504e45722c112bf7a"}, + {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:974e5de0294f88a1a837378f1f83330395801e9246f4e88ed3bfc8ada65dcbee"}, + {file = "duckdb-0.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4fbc297b602ef17e579bb3190c94d19c5002422b55814421a0fc11299c0c1100"}, + {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1dd58a0d84a424924a35b3772419f8cd78a01c626be3147e4934d7a035a8ad68"}, + {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11a1194a582c80dfb57565daa06141727e415ff5d17e022dc5f31888a5423d33"}, + {file = "duckdb-0.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:be45d08541002a9338e568dca67ab4f20c0277f8f58a73dfc1435c5b4297c996"}, + {file = "duckdb-0.9.2-cp38-cp38-win32.whl", hash = "sha256:dd6f88aeb7fc0bfecaca633629ff5c986ac966fe3b7dcec0b2c48632fd550ba2"}, + {file = "duckdb-0.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:28100c4a6a04e69aa0f4a6670a6d3d67a65f0337246a0c1a429f3f28f3c40b9a"}, + {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ae5bf0b6ad4278e46e933e51473b86b4b932dbc54ff097610e5b482dd125552"}, + {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e5d0bb845a80aa48ed1fd1d2d285dd352e96dc97f8efced2a7429437ccd1fe1f"}, + {file = "duckdb-0.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ce262d74a52500d10888110dfd6715989926ec936918c232dcbaddb78fc55b4"}, + {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6935240da090a7f7d2666f6d0a5e45ff85715244171ca4e6576060a7f4a1200e"}, + {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5cfb93e73911696a98b9479299d19cfbc21dd05bb7ab11a923a903f86b4d06e"}, + {file = "duckdb-0.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:64e3bc01751f31e7572d2716c3e8da8fe785f1cdc5be329100818d223002213f"}, + {file = "duckdb-0.9.2-cp39-cp39-win32.whl", hash = "sha256:6e5b80f46487636368e31b61461940e3999986359a78660a50dfdd17dd72017c"}, + {file = "duckdb-0.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:e6142a220180dbeea4f341708bd5f9501c5c962ce7ef47c1cadf5e8810b4cb13"}, + {file = "duckdb-0.9.2.tar.gz", hash = "sha256:3843afeab7c3fc4a4c0b53686a4cc1d9cdbdadcbb468d60fef910355ecafd447"}, +] + +[[package]] +name = "ecdsa" +version = "0.18.0" +description = "ECDSA cryptographic signature library (pure python)" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"}, + {file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"}, +] + +[package.dependencies] +six = ">=1.9.0" + +[package.extras] +gmpy = ["gmpy"] +gmpy2 = ["gmpy2"] + +[[package]] +name = "emoji" +version = "2.11.0" +description = "Emoji for Python" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +files = [ + {file = "emoji-2.11.0-py2.py3-none-any.whl", hash = "sha256:63fc9107f06c6c2e48e5078ce9575cef98518f5ac09474f6148a43e989989582"}, + {file = "emoji-2.11.0.tar.gz", hash = "sha256:772eaa30f4e0b1ce95148a092df4c7dc97644532c03225326b0fd05e8a9f72a3"}, +] + +[package.extras] +dev = ["coverage", "coveralls", "pytest"] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "execnet" +version = "2.0.2" +description = "execnet: rapid multi-Python deployment" +optional = false +python-versions = ">=3.7" +files = [ + {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"}, + {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"}, +] + +[package.extras] +testing = ["hatch", "pre-commit", "pytest", "tox"] + +[[package]] +name = "executing" +version = "2.0.1" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = ">=3.5" +files = [ + {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, + {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, +] + +[package.extras] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] + +[[package]] +name = "fastapi" +version = "0.109.2" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fastapi-0.109.2-py3-none-any.whl", hash = "sha256:2c9bab24667293b501cad8dd388c05240c850b58ec5876ee3283c47d6e1e3a4d"}, + {file = "fastapi-0.109.2.tar.gz", hash = "sha256:f3817eac96fe4f65a2ebb4baa000f394e55f5fccdaf7f75250804bc58f354f73"}, +] + +[package.dependencies] +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +starlette = ">=0.36.3,<0.37.0" +typing-extensions = ">=4.8.0" + +[package.extras] +all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] + +[[package]] +name = "filelock" +version = "3.13.3" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.13.3-py3-none-any.whl", hash = "sha256:5ffa845303983e7a0b7ae17636509bc97997d58afeafa72fb141a17b152284cb"}, + {file = "filelock-3.13.3.tar.gz", hash = "sha256:a79895a25bbefdf55d1a2a0a80968f7dbb28edcd6d4234a0afb3f37ecde4b546"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "flask" +version = "3.0.2" +description = "A simple framework for building complex web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask-3.0.2-py3-none-any.whl", hash = "sha256:3232e0e9c850d781933cf0207523d1ece087eb8d87b23777ae38456e2fbe7c6e"}, + {file = "flask-3.0.2.tar.gz", hash = "sha256:822c03f4b799204250a7ee84b1eddc40665395333973dfb9deebfe425fefcb7d"}, +] + +[package.dependencies] +blinker = ">=1.6.2" +click = ">=8.1.3" +itsdangerous = ">=2.1.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=3.0.0" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + +[[package]] +name = "flask-cors" +version = "4.0.0" +description = "A Flask extension adding a decorator for CORS support" +optional = false +python-versions = "*" +files = [ + {file = "Flask-Cors-4.0.0.tar.gz", hash = "sha256:f268522fcb2f73e2ecdde1ef45e2fd5c71cc48fe03cffb4b441c6d1b40684eb0"}, + {file = "Flask_Cors-4.0.0-py2.py3-none-any.whl", hash = "sha256:bc3492bfd6368d27cfe79c7821df5a8a319e1a6d5eab277a3794be19bdc51783"}, +] + +[package.dependencies] +Flask = ">=0.9" + +[[package]] +name = "flask-login" +version = "0.6.3" +description = "User authentication and session management for Flask." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Flask-Login-0.6.3.tar.gz", hash = "sha256:5e23d14a607ef12806c699590b89d0f0e0d67baeec599d75947bf9c147330333"}, + {file = "Flask_Login-0.6.3-py3-none-any.whl", hash = "sha256:849b25b82a436bf830a054e74214074af59097171562ab10bfa999e6b78aae5d"}, +] + +[package.dependencies] +Flask = ">=1.0.4" +Werkzeug = ">=1.0.1" + +[[package]] +name = "flatbuffers" +version = "24.3.25" +description = "The FlatBuffers serialization format for Python" +optional = false +python-versions = "*" +files = [ + {file = "flatbuffers-24.3.25-py2.py3-none-any.whl", hash = "sha256:8dbdec58f935f3765e4f7f3cf635ac3a77f83568138d6a2311f524ec96364812"}, + {file = "flatbuffers-24.3.25.tar.gz", hash = "sha256:de2ec5b203f21441716617f38443e0a8ebf3d25bf0d9c0bb0ce68fa00ad546a4"}, +] + +[[package]] +name = "frozenlist" +version = "1.4.1" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +] + +[[package]] +name = "fsspec" +version = "2024.3.1" +description = "File-system specification" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fsspec-2024.3.1-py3-none-any.whl", hash = "sha256:918d18d41bf73f0e2b261824baeb1b124bcf771767e3a26425cd7dec3332f512"}, + {file = "fsspec-2024.3.1.tar.gz", hash = "sha256:f39780e282d7d117ffb42bb96992f8a90795e4d0fb0f661a70ca39fe9c43ded9"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +devel = ["pytest", "pytest-cov"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +tqdm = ["tqdm"] + +[[package]] +name = "geomet" +version = "0.2.1.post1" +description = "GeoJSON <-> WKT/WKB conversion utilities" +optional = false +python-versions = ">2.6, !=3.3.*, <4" +files = [ + {file = "geomet-0.2.1.post1-py3-none-any.whl", hash = "sha256:a41a1e336b381416d6cbed7f1745c848e91defaa4d4c1bdc1312732e46ffad2b"}, + {file = "geomet-0.2.1.post1.tar.gz", hash = "sha256:91d754f7c298cbfcabd3befdb69c641c27fe75e808b27aa55028605761d17e95"}, +] + +[package.dependencies] +click = "*" +six = "*" + +[[package]] +name = "gevent" +version = "24.2.1" +description = "Coroutine-based network library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "gevent-24.2.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:6f947a9abc1a129858391b3d9334c45041c08a0f23d14333d5b844b6e5c17a07"}, + {file = "gevent-24.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde283313daf0b34a8d1bab30325f5cb0f4e11b5869dbe5bc61f8fe09a8f66f3"}, + {file = "gevent-24.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a1df555431f5cd5cc189a6ee3544d24f8c52f2529134685f1e878c4972ab026"}, + {file = "gevent-24.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14532a67f7cb29fb055a0e9b39f16b88ed22c66b96641df8c04bdc38c26b9ea5"}, + {file = "gevent-24.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd23df885318391856415e20acfd51a985cba6919f0be78ed89f5db9ff3a31cb"}, + {file = "gevent-24.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ca80b121bbec76d7794fcb45e65a7eca660a76cc1a104ed439cdbd7df5f0b060"}, + {file = "gevent-24.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9913c45d1be52d7a5db0c63977eebb51f68a2d5e6fd922d1d9b5e5fd758cc98"}, + {file = "gevent-24.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:918cdf8751b24986f915d743225ad6b702f83e1106e08a63b736e3a4c6ead789"}, + {file = "gevent-24.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:3d5325ccfadfd3dcf72ff88a92fb8fc0b56cacc7225f0f4b6dcf186c1a6eeabc"}, + {file = "gevent-24.2.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:03aa5879acd6b7076f6a2a307410fb1e0d288b84b03cdfd8c74db8b4bc882fc5"}, + {file = "gevent-24.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8bb35ce57a63c9a6896c71a285818a3922d8ca05d150fd1fe49a7f57287b836"}, + {file = "gevent-24.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d7f87c2c02e03d99b95cfa6f7a776409083a9e4d468912e18c7680437b29222c"}, + {file = "gevent-24.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:968581d1717bbcf170758580f5f97a2925854943c45a19be4d47299507db2eb7"}, + {file = "gevent-24.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7899a38d0ae7e817e99adb217f586d0a4620e315e4de577444ebeeed2c5729be"}, + {file = "gevent-24.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:f5e8e8d60e18d5f7fd49983f0c4696deeddaf6e608fbab33397671e2fcc6cc91"}, + {file = "gevent-24.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fbfdce91239fe306772faab57597186710d5699213f4df099d1612da7320d682"}, + {file = "gevent-24.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cdf66977a976d6a3cfb006afdf825d1482f84f7b81179db33941f2fc9673bb1d"}, + {file = "gevent-24.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:1dffb395e500613e0452b9503153f8f7ba587c67dd4a85fc7cd7aa7430cb02cc"}, + {file = "gevent-24.2.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:6c47ae7d1174617b3509f5d884935e788f325eb8f1a7efc95d295c68d83cce40"}, + {file = "gevent-24.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7cac622e11b4253ac4536a654fe221249065d9a69feb6cdcd4d9af3503602e0"}, + {file = "gevent-24.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bf5b9c72b884c6f0c4ed26ef204ee1f768b9437330422492c319470954bc4cc7"}, + {file = "gevent-24.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5de3c676e57177b38857f6e3cdfbe8f38d1cd754b63200c0615eaa31f514b4f"}, + {file = "gevent-24.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4faf846ed132fd7ebfbbf4fde588a62d21faa0faa06e6f468b7faa6f436b661"}, + {file = "gevent-24.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:368a277bd9278ddb0fde308e6a43f544222d76ed0c4166e0d9f6b036586819d9"}, + {file = "gevent-24.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f8a04cf0c5b7139bc6368b461257d4a757ea2fe89b3773e494d235b7dd51119f"}, + {file = "gevent-24.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9d8d0642c63d453179058abc4143e30718b19a85cbf58c2744c9a63f06a1d388"}, + {file = "gevent-24.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:94138682e68ec197db42ad7442d3cf9b328069c3ad8e4e5022e6b5cd3e7ffae5"}, + {file = "gevent-24.2.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:8f4b8e777d39013595a7740b4463e61b1cfe5f462f1b609b28fbc1e4c4ff01e5"}, + {file = "gevent-24.2.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141a2b24ad14f7b9576965c0c84927fc85f824a9bb19f6ec1e61e845d87c9cd8"}, + {file = "gevent-24.2.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9202f22ef811053077d01f43cc02b4aaf4472792f9fd0f5081b0b05c926cca19"}, + {file = "gevent-24.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2955eea9c44c842c626feebf4459c42ce168685aa99594e049d03bedf53c2800"}, + {file = "gevent-24.2.1-cp38-cp38-win32.whl", hash = "sha256:44098038d5e2749b0784aabb27f1fcbb3f43edebedf64d0af0d26955611be8d6"}, + {file = "gevent-24.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:117e5837bc74a1673605fb53f8bfe22feb6e5afa411f524c835b2ddf768db0de"}, + {file = "gevent-24.2.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:2ae3a25ecce0a5b0cd0808ab716bfca180230112bb4bc89b46ae0061d62d4afe"}, + {file = "gevent-24.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7ceb59986456ce851160867ce4929edaffbd2f069ae25717150199f8e1548b8"}, + {file = "gevent-24.2.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:2e9ac06f225b696cdedbb22f9e805e2dd87bf82e8fa5e17756f94e88a9d37cf7"}, + {file = "gevent-24.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:90cbac1ec05b305a1b90ede61ef73126afdeb5a804ae04480d6da12c56378df1"}, + {file = "gevent-24.2.1-cp39-cp39-win32.whl", hash = "sha256:782a771424fe74bc7e75c228a1da671578c2ba4ddb2ca09b8f959abdf787331e"}, + {file = "gevent-24.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:3adfb96637f44010be8abd1b5e73b5070f851b817a0b182e601202f20fa06533"}, + {file = "gevent-24.2.1-pp310-pypy310_pp73-macosx_11_0_universal2.whl", hash = "sha256:7b00f8c9065de3ad226f7979154a7b27f3b9151c8055c162332369262fc025d8"}, + {file = "gevent-24.2.1.tar.gz", hash = "sha256:432fc76f680acf7cf188c2ee0f5d3ab73b63c1f03114c7cd8a34cebbe5aa2056"}, +] + +[package.dependencies] +cffi = {version = ">=1.12.2", markers = "platform_python_implementation == \"CPython\" and sys_platform == \"win32\""} +greenlet = [ + {version = ">=2.0.0", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.11\""}, + {version = ">=3.0rc3", markers = "platform_python_implementation == \"CPython\" and python_version >= \"3.11\""}, +] +"zope.event" = "*" +"zope.interface" = "*" + +[package.extras] +dnspython = ["dnspython (>=1.16.0,<2.0)", "idna"] +docs = ["furo", "repoze.sphinx.autointerface", "sphinx", "sphinxcontrib-programoutput", "zope.schema"] +monitor = ["psutil (>=5.7.0)"] +recommended = ["cffi (>=1.12.2)", "dnspython (>=1.16.0,<2.0)", "idna", "psutil (>=5.7.0)"] +test = ["cffi (>=1.12.2)", "coverage (>=5.0)", "dnspython (>=1.16.0,<2.0)", "idna", "objgraph", "psutil (>=5.7.0)", "requests"] + +[[package]] +name = "geventhttpclient" +version = "2.0.12" +description = "http client library for gevent" +optional = false +python-versions = "*" +files = [ + {file = "geventhttpclient-2.0.12-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6d0fafc15bbd93b1f271b4c14b327d15c6930c8d78d8ee0d8a55c9cd3e34c18f"}, + {file = "geventhttpclient-2.0.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3f429ece7b6612ef333e9bbeb205513cec33a178f545b3612530a9c5c36a0310"}, + {file = "geventhttpclient-2.0.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:20ffc5a2b9cb5557d529d9296ffdaa5057a23e6bb439a905160a787079ec78a2"}, + {file = "geventhttpclient-2.0.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80a96670c5ab668f52dcaf705640b442faeafb2bfd2e54d5f08ac29ac80aab12"}, + {file = "geventhttpclient-2.0.12-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4766aff690198119c998474d9c20c1b3ffaff337d0d62a6d8b19cc871c3a276d"}, + {file = "geventhttpclient-2.0.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6d15f459737178e2b9a1d37b32161955a7d72062a3fc473d88c9e9f146cff22"}, + {file = "geventhttpclient-2.0.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a75007314fc15947fd94f154e139a6f78a4d40ed70d52dbb1724e7ea2d732ca7"}, + {file = "geventhttpclient-2.0.12-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:16e440152ea4f943dfc476462c1c3f29d47d583e679b58bccac9bfaa33eedcfd"}, + {file = "geventhttpclient-2.0.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e1d9c9b533b6c0b5a7eac23f68b25c8d3db1d38b8e504756c53366d2622a24a5"}, + {file = "geventhttpclient-2.0.12-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:758dd4a3172f740255f755fd393f0888e879a7102a537bba98a35a417be30d3e"}, + {file = "geventhttpclient-2.0.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:933426c92e85d8f6717c4d61f2c6c99fbb7d84c91373400eaf381052a35ea414"}, + {file = "geventhttpclient-2.0.12-cp310-cp310-win32.whl", hash = "sha256:e70247c900c4e4413af155e49f342055af9eb20c141735cce36d8a9dc10dc314"}, + {file = "geventhttpclient-2.0.12-cp310-cp310-win_amd64.whl", hash = "sha256:8dac40240fe446b94dd8645e2691d077b98b1e109ed945d2c91923c300c6f66d"}, + {file = "geventhttpclient-2.0.12-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3e3b3b2857ed48dd8af15c8e09539c8e0bf3721f515c0a8f3cfcbe0090196cc4"}, + {file = "geventhttpclient-2.0.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:114cfa7f4db7dcb5603ade4744bc6f5d6d168c94b05aca052e2fc84c906d2009"}, + {file = "geventhttpclient-2.0.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:904aaab04a8c4ebf52217930242693509cfbbd90f2b2afc454e14da82710367f"}, + {file = "geventhttpclient-2.0.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56935ebec38a7c9ccc3dcadaebf2624601567504cd3a44794dc9262aca147040"}, + {file = "geventhttpclient-2.0.12-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bff88eededd1f915cd86de5e8a891e1988b6d42093cc07be5fe3133f8baf170c"}, + {file = "geventhttpclient-2.0.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:212014f4133938ba6453dbfa6d3a643c915dd4873d1de1d6172e4c6c178e4a6c"}, + {file = "geventhttpclient-2.0.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d471e79a55d37ad021a4832b0895bccb638f70664040a29230e156a0b92b23d"}, + {file = "geventhttpclient-2.0.12-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:599c4d37d928323b5f0353434f73de9e88f305f59a5472ffc7f5c131a2485512"}, + {file = "geventhttpclient-2.0.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fddf2b3c4d5d99b826561173be04adbc92cab52081ba142c2158e0ba3b08b762"}, + {file = "geventhttpclient-2.0.12-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5075c7f15e0a39b7ceae6afcb0b3a66c0ab9364a9eb589b7f51b567835fae5d7"}, + {file = "geventhttpclient-2.0.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:58a6f9d93ef2b1a09479564c951bc7b058350bd757628a32945f274cd314fb98"}, + {file = "geventhttpclient-2.0.12-cp311-cp311-win32.whl", hash = "sha256:a0bb5a35b199356b0c9b5ec3c3152ebfe4ecbd79e00d486d461920a9d96d1fd2"}, + {file = "geventhttpclient-2.0.12-cp311-cp311-win_amd64.whl", hash = "sha256:972a92f4a453a3ef4737e79e7e64f3089a06f385e13493fa19381486e893bd98"}, + {file = "geventhttpclient-2.0.12-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0bee74f32eed6278f3837e07983b5a6b186920c7eb3b35bc6e97360697632655"}, + {file = "geventhttpclient-2.0.12-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fb85d8ed42cc389e5cdac06221f16cb6bca9dbbf5219c44d0731f742a6bffc09"}, + {file = "geventhttpclient-2.0.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c302a16328406003cf4d7d71f59cafc2d42f13d5dc9ea4c8bd248390de985a86"}, + {file = "geventhttpclient-2.0.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3266ef4da21a47d0181d4e3cb5209494e3ce6e4d4cc71414ea74b3a1f7e0e921"}, + {file = "geventhttpclient-2.0.12-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acb7a257e8f4f0c0335a259f2e9eae527fa042db9ea2e4580a381e9c01fc49f4"}, + {file = "geventhttpclient-2.0.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4741d66098b2b289f584effa7de3ae7bf1efb06e2d83abdbbc468a0a4dec6b3a"}, + {file = "geventhttpclient-2.0.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef2b523043ab9c6057ed19993f629e3fa47f8f92a319f5682de05e604ed8cc9"}, + {file = "geventhttpclient-2.0.12-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:235058a6e420b2aae196a4ba7e23f81ebc2dc3acf6baa9d85dc99963b3e0f0cf"}, + {file = "geventhttpclient-2.0.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:c918d731e0fe676b4c06f53081526f4d3f4836b7a72be7b46c90603a280260fa"}, + {file = "geventhttpclient-2.0.12-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:9e7696a61b384f8d2a075cca9633fdcc897c43eabbcf70fca492206241fa1a3b"}, + {file = "geventhttpclient-2.0.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:105a1aa161223079dbd669b4334cd765964b5917ca4f3da8c5b59c4ef36a0adf"}, + {file = "geventhttpclient-2.0.12-cp312-cp312-win32.whl", hash = "sha256:09e13c05633d1eeb2cba68835618f4ee25c5a7b466c47cfdb01174f61e51a23d"}, + {file = "geventhttpclient-2.0.12-cp312-cp312-win_amd64.whl", hash = "sha256:f853438a1850d45fb434e42ffbb06be81af558e5dd9243d530c2cdc5f804627f"}, + {file = "geventhttpclient-2.0.12-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:62ad2ac321967ff92768e93ea18cb59f8920fbae5b42340b93e7cf11ee4f35d3"}, + {file = "geventhttpclient-2.0.12-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de8b6618a354bded39224def8b6e8b939c468f0edeb2570fdacd9003fd14c57c"}, + {file = "geventhttpclient-2.0.12-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:902ba66284d40dd97a693e952e4bb2f59528806ee40ecd586471fd5bca7fb295"}, + {file = "geventhttpclient-2.0.12-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ef6c9acff6ce379c8a851554954eee6481c35531d82888a46ccada0ea17a791"}, + {file = "geventhttpclient-2.0.12-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e8abf4ccd59d58f7aa91f4c68760d82534bac5c5c9b7d2ccb4c0a5fc69585ff"}, + {file = "geventhttpclient-2.0.12-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:bdeed563faa09fd51ee4606b92f69ecd42b67277ed590f2921816941ed031177"}, + {file = "geventhttpclient-2.0.12-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5a7d9b7e2dbc962f143938cdef8a657af1ccf423b2443a194b86ba0e85735c23"}, + {file = "geventhttpclient-2.0.12-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:a9a7ea4665418abe093e48576769181ae3c75a97cafe107c0463a169af755b2c"}, + {file = "geventhttpclient-2.0.12-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:19488a212c858792fd4fa83be568d4cdbbd4c1267b03b10b6a8a654fd862d2f9"}, + {file = "geventhttpclient-2.0.12-cp36-cp36m-win32.whl", hash = "sha256:445b80548062ef6c1b30e5e1b4ec471937fda64b783da953462972f48c2038de"}, + {file = "geventhttpclient-2.0.12-cp36-cp36m-win_amd64.whl", hash = "sha256:bf283bdbb4b935bfef114e1af19535a602e717ae9a7e8408ab61775d06a463b4"}, + {file = "geventhttpclient-2.0.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:370086ced942449f9b60590d215eec7f81fe54d7e3ee3add6b2f014ccac4f38d"}, + {file = "geventhttpclient-2.0.12-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e405735db8261ca99d9b80fda3f46d457f04b98a7ce0e49bb35ca32c2a5bbb2d"}, + {file = "geventhttpclient-2.0.12-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f4680b0ed5e588437797026f25829eb9681954ac64470dc8014436910b2fb09"}, + {file = "geventhttpclient-2.0.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad053e7b4ac2f9fcdb02a5d9b99fd72acf28265ba8be7392a25235bb631d2511"}, + {file = "geventhttpclient-2.0.12-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64232158542f2adfde24f41c1e3ed731cca67e469e653ac7634815af168551b4"}, + {file = "geventhttpclient-2.0.12-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9be5c3f68e4f41aceccae38508029a70b1fb3f9fc840b7c55971f5fbe099d7e4"}, + {file = "geventhttpclient-2.0.12-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:06b4276075f4f3eeb30a3c1476f40d53030159450def58c1d8c3b724411d8ed9"}, + {file = "geventhttpclient-2.0.12-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b311beb0657394b5df09af05ec5d84058f3531f3176ab1a0f7f4eae7b56bc315"}, + {file = "geventhttpclient-2.0.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b6a9d00b58527328d9f7a0a20b715d4e780a990b0fb75b556085417c22d73dd0"}, + {file = "geventhttpclient-2.0.12-cp37-cp37m-win32.whl", hash = "sha256:987ef3bd0d7e3b01cafc8e135ab6e8f977b60eeda096ead2cb5504124896b1a2"}, + {file = "geventhttpclient-2.0.12-cp37-cp37m-win_amd64.whl", hash = "sha256:dca64867b2d79433eb8557db00e74e17a2f0d444a9a90fb6f49cadaeddf703a5"}, + {file = "geventhttpclient-2.0.12-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:707467d6e8ad19749e7130b7c7bcb3a446c8e4f66454e1d47f4dfffa974683da"}, + {file = "geventhttpclient-2.0.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c2e436a2c41c71db17fd46df4925eb5e4d3856eb1b5fda0ce6b1137a6c6c87fa"}, + {file = "geventhttpclient-2.0.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f88d2f3a20afa999548622b31dbc3db5aa355c3032f3ae96a4195c5f938fee92"}, + {file = "geventhttpclient-2.0.12-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a6581b8de9fa4b44916dcfabdc608409cfcf02fac39a62d40f6bcf6af726ad"}, + {file = "geventhttpclient-2.0.12-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c91e0ee50f8a1ea3a268f06c5bd44efe86b7f57961d7c923602038fcc010c3c"}, + {file = "geventhttpclient-2.0.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e3031817b8f2411086765de4bb1080c755b009ee8dc4a6111ad74f6ff4a363f"}, + {file = "geventhttpclient-2.0.12-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ff9a95e2d2035c1f5ac726166a598ea4071412c304a74a8cd5d2d8dfbf40b5e"}, + {file = "geventhttpclient-2.0.12-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:04f41d8f14e241e8d0c828ff59634674e98f96f39f6d12f43009a7332c4e2c82"}, + {file = "geventhttpclient-2.0.12-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bea7376205629e8964f624b08d6836892e8d17ed8b8a57d5d2edbd7983440652"}, + {file = "geventhttpclient-2.0.12-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fd9baf30e2bdd3110394365998037a45b43f86804b8f3c77f194f64eddc7dc54"}, + {file = "geventhttpclient-2.0.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:23c27b04ad25258959c088c0d87832befc7be2b09c5c35fdd76e417f5b546da0"}, + {file = "geventhttpclient-2.0.12-cp38-cp38-win32.whl", hash = "sha256:792e154009f6f63e3fbbe4b3109780ada275c4ed29659430c06dc8e1b2ed03ef"}, + {file = "geventhttpclient-2.0.12-cp38-cp38-win_amd64.whl", hash = "sha256:7b41a0510297a8ebbeffbef082e0896ecf37d5302999a3b58d208193c3c3e362"}, + {file = "geventhttpclient-2.0.12-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5006e34586bba4ebd5a7a5482f9e7743e1b3b9ff50c994105fb45e43044c38c9"}, + {file = "geventhttpclient-2.0.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d812074192822f57603973d6bcced0f02c6cc371cf63e729793f193c874f30ce"}, + {file = "geventhttpclient-2.0.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2a64bd8bce446be4fe869b64af310cd218d2329aa4e9d85b6a060da93c62296b"}, + {file = "geventhttpclient-2.0.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7fc536f2972c75da85f9360d0a3e5433baf6d777442a013052f9a501311ddcd"}, + {file = "geventhttpclient-2.0.12-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a60dec2ac44f494af9e42889dd7f7d653545b4c4892da4acbe383c0ffc305a1"}, + {file = "geventhttpclient-2.0.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa2ef1b92662ee9866bda52123f6f266ff4479437e7b5037a6427cf09e071e25"}, + {file = "geventhttpclient-2.0.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b8215e9a018a3634bdef4891634ceb9b10f47292b0091a1d96c363d8d5d7fdd"}, + {file = "geventhttpclient-2.0.12-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:90d5c0974518d35514a8896529d113e778e9d42d10699ac6051cd3e8f1ff81f6"}, + {file = "geventhttpclient-2.0.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:83c28617b02b6ab53653324b2a9ff2d4a4b1f1582fbc4e70f47d2ef9fe6ab1f7"}, + {file = "geventhttpclient-2.0.12-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:d8c7dfa2bcd15988a350e90b32c6b5426924f2ffd0ce75f52ca2d5ef540b3fbc"}, + {file = "geventhttpclient-2.0.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ded99bdbe7953f0734720d9908ed6f808fd12e97de05508822695a87b69f10f2"}, + {file = "geventhttpclient-2.0.12-cp39-cp39-win32.whl", hash = "sha256:ebcd7311901e52929d2bd3af9442970fdd12b200285d9a55d52994e033e73050"}, + {file = "geventhttpclient-2.0.12-cp39-cp39-win_amd64.whl", hash = "sha256:204c3976b2a4fcefe8f157fe303da45b85fc31147bdfce7b53b1098f05f1cad2"}, + {file = "geventhttpclient-2.0.12-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c651d22fae3084502afc876e8c73d521496f512e16828939333f17cad64ea47f"}, + {file = "geventhttpclient-2.0.12-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45c111addb5b27431805a8ad544dec292a7153cc44b68df28e782821431970d8"}, + {file = "geventhttpclient-2.0.12-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14cb7f4854d77c63506e31677fb548d137b20cbe34a11b5442f065b1e46c2246"}, + {file = "geventhttpclient-2.0.12-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8ac257aa714999b523282c0da6faf4d333d44131cea3b15fe802e00d35dd5c2"}, + {file = "geventhttpclient-2.0.12-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d97a41f959cd331eb8a633ed8edf6cc002a2a41a21e94876db833729b803924f"}, + {file = "geventhttpclient-2.0.12-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6ecb9a600a2da862b079ef3ebdffc9acec089c914bebc0c54614049584bfbb94"}, + {file = "geventhttpclient-2.0.12-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:662bb04e99466c25a1bf8b47351f79b339b6627721bb357bf3bc0d263c394176"}, + {file = "geventhttpclient-2.0.12-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80a6b4c9e1ade3ae090b7b679d5b691d0c87460612983d4ab951043f859adffb"}, + {file = "geventhttpclient-2.0.12-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13a359605dab2b92df4ef1bab7f1bec26e82acdc4253828a508f55375af50b48"}, + {file = "geventhttpclient-2.0.12-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:fc17f57be8254329715702d00536a443c29b52f2ef750bc0650554fb3b7e33e7"}, + {file = "geventhttpclient-2.0.12-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b58096bcaaa259e8d107749539b1d3804fc6ec395e91dec8040d448d298861c8"}, + {file = "geventhttpclient-2.0.12-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eb66bff9ed4d4f0bced3498746d86c949bf99e2440ceb968e6e7c542b3982b0"}, + {file = "geventhttpclient-2.0.12-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0158f45fd611b585c54380d981181c303313f3e059395310112805f53998d061"}, + {file = "geventhttpclient-2.0.12-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13129723ba3568f0a373cbd612130e2d78b3f284cf6a62385e26a92d7627a570"}, + {file = "geventhttpclient-2.0.12-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:571be0c875503ef5088cb417e84b707c922e3e2bd5e302e609d25e008cf037eb"}, + {file = "geventhttpclient-2.0.12-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:46e1706e3a44bb3423fc8d10b44e71c8a52c6535e22d483519dde008723c4f25"}, + {file = "geventhttpclient-2.0.12-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9de259de7ccc19b47537e21b47a74442ad64d1a1a262b971713d6af8cc8f16f9"}, + {file = "geventhttpclient-2.0.12-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4d777dced8a8e04fd8e0811c3b764d9a476b6a4c865f10079cc4f27b95b37196"}, + {file = "geventhttpclient-2.0.12-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcd4f45055a2e2f66e67016599d3fac33bc67b3bd67b672c1503a5de7543c1b6"}, + {file = "geventhttpclient-2.0.12-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:61b078cfc4b34a0d50224adf80c7eeae8e23fe6d8cb35926ccd3f3a6b86f921f"}, + {file = "geventhttpclient-2.0.12.tar.gz", hash = "sha256:ebea08e79c1aa7d03b43936b347c0f87356e6fb1c6845735a11f23c949c655f7"}, +] + +[package.dependencies] +brotli = "*" +certifi = "*" +gevent = ">=0.13" +six = "*" + +[[package]] +name = "google-auth" +version = "2.29.0" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, + {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] + +[[package]] +name = "googleapis-common-protos" +version = "1.63.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, + {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, +] + +[package.dependencies] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + +[[package]] +name = "greenlet" +version = "3.0.3" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.7" +files = [ + {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, + {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, + {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, + {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, + {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, + {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, + {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, + {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, + {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, + {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, + {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, + {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, + {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, + {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, + {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, + {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, + {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, + {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, + {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, + {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, + {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, + {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, + {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, + {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, + {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, + {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, + {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, + {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "grpcio" +version = "1.62.1" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpcio-1.62.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:179bee6f5ed7b5f618844f760b6acf7e910988de77a4f75b95bbfaa8106f3c1e"}, + {file = "grpcio-1.62.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:48611e4fa010e823ba2de8fd3f77c1322dd60cb0d180dc6630a7e157b205f7ea"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b2a0e71b0a2158aa4bce48be9f8f9eb45cbd17c78c7443616d00abbe2a509f6d"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbe80577c7880911d3ad65e5ecc997416c98f354efeba2f8d0f9112a67ed65a5"}, + {file = "grpcio-1.62.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58f6c693d446964e3292425e1d16e21a97a48ba9172f2d0df9d7b640acb99243"}, + {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:77c339403db5a20ef4fed02e4d1a9a3d9866bf9c0afc77a42234677313ea22f3"}, + {file = "grpcio-1.62.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b5a4ea906db7dec694098435d84bf2854fe158eb3cd51e1107e571246d4d1d70"}, + {file = "grpcio-1.62.1-cp310-cp310-win32.whl", hash = "sha256:4187201a53f8561c015bc745b81a1b2d278967b8de35f3399b84b0695e281d5f"}, + {file = "grpcio-1.62.1-cp310-cp310-win_amd64.whl", hash = "sha256:844d1f3fb11bd1ed362d3fdc495d0770cfab75761836193af166fee113421d66"}, + {file = "grpcio-1.62.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:833379943d1728a005e44103f17ecd73d058d37d95783eb8f0b28ddc1f54d7b2"}, + {file = "grpcio-1.62.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:c7fcc6a32e7b7b58f5a7d27530669337a5d587d4066060bcb9dee7a8c833dfb7"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:fa7d28eb4d50b7cbe75bb8b45ed0da9a1dc5b219a0af59449676a29c2eed9698"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48f7135c3de2f298b833be8b4ae20cafe37091634e91f61f5a7eb3d61ec6f660"}, + {file = "grpcio-1.62.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71f11fd63365ade276c9d4a7b7df5c136f9030e3457107e1791b3737a9b9ed6a"}, + {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b49fd8fe9f9ac23b78437da94c54aa7e9996fbb220bac024a67469ce5d0825f"}, + {file = "grpcio-1.62.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:482ae2ae78679ba9ed5752099b32e5fe580443b4f798e1b71df412abf43375db"}, + {file = "grpcio-1.62.1-cp311-cp311-win32.whl", hash = "sha256:1faa02530b6c7426404372515fe5ddf66e199c2ee613f88f025c6f3bd816450c"}, + {file = "grpcio-1.62.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bd90b8c395f39bc82a5fb32a0173e220e3f401ff697840f4003e15b96d1befc"}, + {file = "grpcio-1.62.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:b134d5d71b4e0837fff574c00e49176051a1c532d26c052a1e43231f252d813b"}, + {file = "grpcio-1.62.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d1f6c96573dc09d50dbcbd91dbf71d5cf97640c9427c32584010fbbd4c0e0037"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:359f821d4578f80f41909b9ee9b76fb249a21035a061a327f91c953493782c31"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a485f0c2010c696be269184bdb5ae72781344cb4e60db976c59d84dd6354fac9"}, + {file = "grpcio-1.62.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b50b09b4dc01767163d67e1532f948264167cd27f49e9377e3556c3cba1268e1"}, + {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3227c667dccbe38f2c4d943238b887bac588d97c104815aecc62d2fd976e014b"}, + {file = "grpcio-1.62.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3952b581eb121324853ce2b191dae08badb75cd493cb4e0243368aa9e61cfd41"}, + {file = "grpcio-1.62.1-cp312-cp312-win32.whl", hash = "sha256:83a17b303425104d6329c10eb34bba186ffa67161e63fa6cdae7776ff76df73f"}, + {file = "grpcio-1.62.1-cp312-cp312-win_amd64.whl", hash = "sha256:6696ffe440333a19d8d128e88d440f91fb92c75a80ce4b44d55800e656a3ef1d"}, + {file = "grpcio-1.62.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:e3393b0823f938253370ebef033c9fd23d27f3eae8eb9a8f6264900c7ea3fb5a"}, + {file = "grpcio-1.62.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:83e7ccb85a74beaeae2634f10eb858a0ed1a63081172649ff4261f929bacfd22"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:882020c87999d54667a284c7ddf065b359bd00251fcd70279ac486776dbf84ec"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a10383035e864f386fe096fed5c47d27a2bf7173c56a6e26cffaaa5a361addb1"}, + {file = "grpcio-1.62.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:960edebedc6b9ada1ef58e1c71156f28689978188cd8cff3b646b57288a927d9"}, + {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:23e2e04b83f347d0aadde0c9b616f4726c3d76db04b438fd3904b289a725267f"}, + {file = "grpcio-1.62.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:978121758711916d34fe57c1f75b79cdfc73952f1481bb9583399331682d36f7"}, + {file = "grpcio-1.62.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9084086190cc6d628f282e5615f987288b95457292e969b9205e45b442276407"}, + {file = "grpcio-1.62.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:22bccdd7b23c420a27fd28540fb5dcbc97dc6be105f7698cb0e7d7a420d0e362"}, + {file = "grpcio-1.62.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:8999bf1b57172dbc7c3e4bb3c732658e918f5c333b2942243f10d0d653953ba9"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:d9e52558b8b8c2f4ac05ac86344a7417ccdd2b460a59616de49eb6933b07a0bd"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1714e7bc935780bc3de1b3fcbc7674209adf5208ff825799d579ffd6cd0bd505"}, + {file = "grpcio-1.62.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8842ccbd8c0e253c1f189088228f9b433f7a93b7196b9e5b6f87dba393f5d5d"}, + {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1f1e7b36bdff50103af95a80923bf1853f6823dd62f2d2a2524b66ed74103e49"}, + {file = "grpcio-1.62.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bba97b8e8883a8038606480d6b6772289f4c907f6ba780fa1f7b7da7dfd76f06"}, + {file = "grpcio-1.62.1-cp38-cp38-win32.whl", hash = "sha256:a7f615270fe534548112a74e790cd9d4f5509d744dd718cd442bf016626c22e4"}, + {file = "grpcio-1.62.1-cp38-cp38-win_amd64.whl", hash = "sha256:e6c8c8693df718c5ecbc7babb12c69a4e3677fd11de8886f05ab22d4e6b1c43b"}, + {file = "grpcio-1.62.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:73db2dc1b201d20ab7083e7041946910bb991e7e9761a0394bbc3c2632326483"}, + {file = "grpcio-1.62.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:407b26b7f7bbd4f4751dbc9767a1f0716f9fe72d3d7e96bb3ccfc4aace07c8de"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f8de7c8cef9261a2d0a62edf2ccea3d741a523c6b8a6477a340a1f2e417658de"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd5c8a1af40ec305d001c60236308a67e25419003e9bb3ebfab5695a8d0b369"}, + {file = "grpcio-1.62.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be0477cb31da67846a33b1a75c611f88bfbcd427fe17701b6317aefceee1b96f"}, + {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:60dcd824df166ba266ee0cfaf35a31406cd16ef602b49f5d4dfb21f014b0dedd"}, + {file = "grpcio-1.62.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:973c49086cabab773525f6077f95e5a993bfc03ba8fc32e32f2c279497780585"}, + {file = "grpcio-1.62.1-cp39-cp39-win32.whl", hash = "sha256:12859468e8918d3bd243d213cd6fd6ab07208195dc140763c00dfe901ce1e1b4"}, + {file = "grpcio-1.62.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7209117bbeebdfa5d898205cc55153a51285757902dd73c47de498ad4d11332"}, + {file = "grpcio-1.62.1.tar.gz", hash = "sha256:6c455e008fa86d9e9a9d85bb76da4277c0d7d9668a3bfa70dbe86e9f3c759947"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.62.1)"] + +[[package]] +name = "gunicorn" +version = "21.2.0" +description = "WSGI HTTP Server for UNIX" +optional = false +python-versions = ">=3.5" +files = [ + {file = "gunicorn-21.2.0-py3-none-any.whl", hash = "sha256:3213aa5e8c24949e792bcacfc176fef362e7aac80b76c56f6b5122bf350722f0"}, + {file = "gunicorn-21.2.0.tar.gz", hash = "sha256:88ec8bff1d634f98e61b9f65bc4bf3cd918a90806c6f5c48bc5603849ec81033"}, +] + +[package.dependencies] +packaging = "*" + +[package.extras] +eventlet = ["eventlet (>=0.24.1)"] +gevent = ["gevent (>=1.4.0)"] +setproctitle = ["setproctitle"] +tornado = ["tornado (>=0.2)"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "h2" +version = "4.1.0" +description = "HTTP/2 State-Machine based protocol implementation" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, + {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, +] + +[package.dependencies] +hpack = ">=4.0,<5" +hyperframe = ">=6.0,<7" + +[[package]] +name = "hpack" +version = "4.0.0" +description = "Pure-Python HPACK header compression" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, + {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, +] + +[[package]] +name = "httpcore" +version = "1.0.5" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] + +[[package]] +name = "httptools" +version = "0.6.1" +description = "A collection of framework independent HTTP protocol utils." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, + {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, + {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, + {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, + {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, + {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, + {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, + {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, + {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, + {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, + {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, + {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, + {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, + {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, + {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, + {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, + {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, + {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, + {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, + {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, + {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, + {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, + {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, + {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, + {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, + {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, + {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, + {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, + {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, + {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, + {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, + {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, + {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, + {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, + {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, + {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, +] + +[package.extras] +test = ["Cython (>=0.29.24,<0.30.0)"] + +[[package]] +name = "httpx" +version = "0.27.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +h2 = {version = ">=3,<5", optional = true, markers = "extra == \"http2\""} +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "huggingface-hub" +version = "0.22.2" +description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "huggingface_hub-0.22.2-py3-none-any.whl", hash = "sha256:3429e25f38ccb834d310804a3b711e7e4953db5a9e420cc147a5e194ca90fd17"}, + {file = "huggingface_hub-0.22.2.tar.gz", hash = "sha256:32e9a9a6843c92f253ff9ca16b9985def4d80a93fb357af5353f770ef74a81be"}, +] + +[package.dependencies] +filelock = "*" +fsspec = ">=2023.5.0" +packaging = ">=20.9" +pyyaml = ">=5.1" +requests = "*" +tqdm = ">=4.42.1" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +cli = ["InquirerPy (==0.3.4)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] +hf-transfer = ["hf-transfer (>=0.1.4)"] +inference = ["aiohttp", "minijinja (>=1.0)"] +quality = ["mypy (==1.5.1)", "ruff (>=0.3.0)"] +tensorflow = ["graphviz", "pydot", "tensorflow"] +tensorflow-testing = ["keras (<3.0)", "tensorflow"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +torch = ["safetensors", "torch"] +typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] + +[[package]] +name = "humanfriendly" +version = "10.0" +description = "Human friendly output for text interfaces using Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477"}, + {file = "humanfriendly-10.0.tar.gz", hash = "sha256:6b0b831ce8f15f7300721aa49829fc4e83921a9a301cc7f606be6686a2288ddc"}, +] + +[package.dependencies] +pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} + +[[package]] +name = "hyperframe" +version = "6.0.1" +description = "HTTP/2 framing layer for Python" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, + {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "importlib-metadata" +version = "7.0.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.0.0-py3-none-any.whl", hash = "sha256:d97503976bb81f40a193d41ee6570868479c69d5068651eb039c40d850c59d67"}, + {file = "importlib_metadata-7.0.0.tar.gz", hash = "sha256:7fc841f8b8332803464e5dc1c63a2e59121f46ca186c0e2e182e80bf8c1319f7"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "importlib-resources" +version = "6.4.0" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"}, + {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "ipykernel" +version = "6.29.4" +description = "IPython Kernel for Jupyter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ipykernel-6.29.4-py3-none-any.whl", hash = "sha256:1181e653d95c6808039c509ef8e67c4126b3b3af7781496c7cbfb5ed938a27da"}, + {file = "ipykernel-6.29.4.tar.gz", hash = "sha256:3d44070060f9475ac2092b760123fadf105d2e2493c24848b6691a7c4f42af5c"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "platform_system == \"Darwin\""} +comm = ">=0.1.1" +debugpy = ">=1.6.5" +ipython = ">=7.23.1" +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +matplotlib-inline = ">=0.1" +nest-asyncio = "*" +packaging = "*" +psutil = "*" +pyzmq = ">=24" +tornado = ">=6.1" +traitlets = ">=5.4.0" + +[package.extras] +cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] +pyqt5 = ["pyqt5"] +pyside6 = ["pyside6"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "ipython" +version = "8.23.0" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.10" +files = [ + {file = "ipython-8.23.0-py3-none-any.whl", hash = "sha256:07232af52a5ba146dc3372c7bf52a0f890a23edf38d77caef8d53f9cdc2584c1"}, + {file = "ipython-8.23.0.tar.gz", hash = "sha256:7468edaf4f6de3e1b912e57f66c241e6fd3c7099f2ec2136e239e142e800274d"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} +prompt-toolkit = ">=3.0.41,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5.13.0" +typing-extensions = {version = "*", markers = "python_version < \"3.12\""} + +[package.extras] +all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] +black = ["black"] +doc = ["docrepr", "exceptiongroup", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "stack-data", "typing-extensions"] +kernel = ["ipykernel"] +matplotlib = ["matplotlib"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] + +[[package]] +name = "itsdangerous" +version = "2.1.2" +description = "Safely pass data to untrusted environments and back." +optional = false +python-versions = ">=3.7" +files = [ + {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, + {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, +] + +[[package]] +name = "jedi" +version = "0.19.1" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +files = [ + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, +] + +[package.dependencies] +parso = ">=0.8.3,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jq" +version = "1.7.0" +description = "jq is a lightweight and flexible JSON processor." +optional = false +python-versions = ">=3.5" +files = [ + {file = "jq-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d8fae014fa8b2704322a5baa39c112176d9acb71e22ebdb8e21c1c864ecff654"}, + {file = "jq-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:40fe068d1fdf2c712885b69be90ddb3e61bca3e4346ab3994641a4fbbeb7be82"}, + {file = "jq-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ec105a0057f2f922d195e1d75d4b0ae41c4b38655ead04d1a3a47988fcb1939"}, + {file = "jq-1.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38e2041ca578275334eff9e1d913ae386210345e5ae71cd9c16e3f208dc81deb"}, + {file = "jq-1.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce1df1b6fffeeeb265d4ea3397e9875ab170ba5a7af6b7997c2fd755934df065"}, + {file = "jq-1.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:05ebdaa868f068967d9e7cbf76e59e61fbdafa565dbc3579c387fb1f248592bb"}, + {file = "jq-1.7.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b3f916cb812fcd26bb1b006634d9c0eff240090196ca0ebb5d229b344f624e53"}, + {file = "jq-1.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9ad7749a16a16bafd6cebafd5e40990b641b4b6b7b661326864677effc44a500"}, + {file = "jq-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2e99ea17b708f55e8bed2f4f68c022119184b17eb15987b384db12e8b6702bd5"}, + {file = "jq-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:76735cd19de65c15964d330adbc2c84add8e55dea35ebfe17b9acf88a06a7d57"}, + {file = "jq-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6b841ddd9089429fc0621d07d1c34ff24f7d6a6245c10125b82806f61e36ae8"}, + {file = "jq-1.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d6b1fc2515b7be92195d50b68f82329cc0250c7fbca790b887d74902ba33870"}, + {file = "jq-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb6546a57a3ceeed41961be2f1417b4e7a5b3170cca7bb82f5974d2ba9acaab6"}, + {file = "jq-1.7.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3427ad0f377f188953958e36b76167c8d11b8c8c61575c22deafa4aba58d601f"}, + {file = "jq-1.7.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:79b9603219fa5082df97d265d71c426613286bd0e5378a8739ce39056fa1e2dc"}, + {file = "jq-1.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a2981a24765a747163e0daa23648372b72a006e727895b95d032632aa51094bd"}, + {file = "jq-1.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a0cc15b2ed511a1a8784c7c7dc07781e28d84a65934062de52487578732e0514"}, + {file = "jq-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:90032c2c4e710157d333d166818ede8b9c8ef0f697e59c9427304edc47146f3d"}, + {file = "jq-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e715d5f0bdfc0be0ff33cd0a3f6f51f8bc5ad464fab737e2048a1b46b45bb582"}, + {file = "jq-1.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76cc5a1ca3a540a5753dbd592f701c1ec7c9cc256becba604490283c055f3f1c"}, + {file = "jq-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:293b6e8e4b652d96fdeae7dd5ffb1644199d8b6fc1f95d528c16451925c0482e"}, + {file = "jq-1.7.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f103868b8902d4ee7f643248bdd7a2de9f9396e4b262f42745b9f624c834d07a"}, + {file = "jq-1.7.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e9c5ccfa3cf65f92b60c5805ef725f7cd799f2dc16e8601c6e8f12f38a9f48f3"}, + {file = "jq-1.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0ca25608d51fdbf8bd5c682b433e1cb9f497155a7c1ea5901524df099f1ceff3"}, + {file = "jq-1.7.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6a2d34d962ce2da5136dab2664fc7efad9f71024d0dc328702f2dc70b4e2735c"}, + {file = "jq-1.7.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:757e8c4cb0cb1175f0aaa227f0a26e4765ba5da04d0bc875b0bd933eff6bd0a0"}, + {file = "jq-1.7.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d097098a628171b87961fb0400117ac340b1eb40cbbee2e58208c4254c23c20"}, + {file = "jq-1.7.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45bc842806d71bd5839c190a88fd071ac5a0a8a1dd601e83228494a19f14559c"}, + {file = "jq-1.7.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f0629743417f8709305d1f77d3929493912efdc3fd1cce3a7fcc76b81bc6b82d"}, + {file = "jq-1.7.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:9b9a49e8b14d3a368011ed1412c8c3e193a7135d5eb4310d77ee643470112b47"}, + {file = "jq-1.7.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a10e3f88b6d2bbb4c47b368f919ec7b648196bf9c60a5cc921d04239d68240c2"}, + {file = "jq-1.7.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:aa85b47effb4152e1cf1120607f475a1c11395d072323ff23e8bb59ce6752713"}, + {file = "jq-1.7.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9413f67ea28037e37ccf8951f9f0b380f31d79162f33e216faa6bd0d8eca0dc7"}, + {file = "jq-1.7.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3daf3b3443c4e871c23ac1e698eb70d1225b46a4ac79c73968234adcd70f3ed8"}, + {file = "jq-1.7.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbe03f95ab02dc045691c3b5c7da8d8c2128e60450fb2124ea8b49034c74f158"}, + {file = "jq-1.7.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a6b2e9f4e63644a30726c58c25d80015f9b83325b125615a46e10d4439b9dc99"}, + {file = "jq-1.7.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9fffcffc8e56585223878edd7c5d719eb8547281d64af2bac43911f1bb9e7029"}, + {file = "jq-1.7.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:95d4bcd5a999ce0aaadaadcaca967989f0efc96c1097a81746b21b6126cf7aaf"}, + {file = "jq-1.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0137445eb67c43eb0eb46933aff7e8afbbd6c5aaf8574efd5df536dc9d177d1d"}, + {file = "jq-1.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ee0e9307b6d4fe89a8556a92c1db65e0d66218bcc13fdeb92a09645a55ff87a"}, + {file = "jq-1.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e0f95cecb690df66f23a8d76c746d2ed15671de3f6101140e3fe2b98b97e0a8"}, + {file = "jq-1.7.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95e472aa54efe418d3627dcd2a369ac0b21e1a5e352550144fd5f0c40585a5b7"}, + {file = "jq-1.7.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4be2a2b56fa139f3235cdb8422ea16eccdd48d62bf91d9fac10761cd55d26c84"}, + {file = "jq-1.7.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7db8260ecb57827bb3fb6f44d4a6f0db0570ded990eee95a5fd3ac9ba14f60d7"}, + {file = "jq-1.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fdbb7ff2dfce2cc0f421f498dcb64176997bd9d9e6cab474e59577e7bff3090d"}, + {file = "jq-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:396bef4b4c9c1ebe3e0e04e287bc79a861b991e12db45681c398d3906ee85468"}, + {file = "jq-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18d8a81c6e241585a0bf748903082d65c4eaa6ba80248f507e5cebda36e05c6c"}, + {file = "jq-1.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade00a39990fdfe0acc7d2a900e3e5e6b11a71eb5289954ff0df31ac0afae25b"}, + {file = "jq-1.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c777e88f3cce496c17f5c3bdbc7d74ff12b5cbdaea30f3a374f3cc92e5bba8d"}, + {file = "jq-1.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79957008c67d8f1d9134cd0e01044bff5d795f7e94db9532a9fe9212e1f88a77"}, + {file = "jq-1.7.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2bc5cb77dd12e861296cfa69587aa6797ccfee4f5f3aa571b02f0273ab1efec1"}, + {file = "jq-1.7.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8e10a5937aab9c383632ab151f73d43dc0c4be99f62221a7044988dc8ddd4bdc"}, + {file = "jq-1.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1e6e13e0f8d3204aefe861159160116e822c90bae773a3ccdd4d9e79a06e086e"}, + {file = "jq-1.7.0-pp310-pypy310_pp73-macosx_10_13_x86_64.whl", hash = "sha256:0cdbd32463ef632b0b4ca6dab434e2387342bc5c895b411ec6b2a14bbf4b2c12"}, + {file = "jq-1.7.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:558a5c6b4430e05fa59c4b5631c0d3fc0f163100390c03edc1993663f59d8a9b"}, + {file = "jq-1.7.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bbf77138cdd8d306bf335d998525a0477e4cb6f00eb6f361288f5b82274e84c"}, + {file = "jq-1.7.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2e6919481ff43754ae9b17a98c877995d5e1346be114c71cd0dfd8ff7d0cd60"}, + {file = "jq-1.7.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b0584ff33b2a9cc021edec325af4e0fa9fbd54cce80c1f7b8e0ba4cf2d75508"}, + {file = "jq-1.7.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a6e7259880ab7e75e845fb4d56c6d18922c68789d25d7cdbb6f433d9e714613a"}, + {file = "jq-1.7.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d472cdd0bcb3d47c87b00ff841edff41c79fe4422523c4a7c8bf913fb950f7f"}, + {file = "jq-1.7.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a3430de179f8a7b0baf5675d5ee400f97344085d79f190a90fc0c7df990cbcc"}, + {file = "jq-1.7.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acbb375bdb2a44f1a643123b8ec57563bb5542673f0399799ab5662ce90bf4a5"}, + {file = "jq-1.7.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:39a0c71ed2f1ec0462d54678333f1b14d9f25fd62a9f46df140d68552f79d204"}, + {file = "jq-1.7.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:306c1e3ba531d7dc3284e128689f0b75409a4e8e8a3bdac2c51cc26f2d3cca58"}, + {file = "jq-1.7.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88b8b0cc838c7387dc5e8c45b192c7504acd0510514658d2d5cd1716fcf15fe3"}, + {file = "jq-1.7.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c75e16e542f4abaae25727b9fc4eeaf69cb07122be8a2a7672d02feb3a1cc9a"}, + {file = "jq-1.7.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4828ac689a67fd9c021796bcacd95811bab806939dd6316eb0c2d3de016c584"}, + {file = "jq-1.7.0-pp39-pypy39_pp73-macosx_10_13_x86_64.whl", hash = "sha256:c94f95b27720d2db7f1039fdd371f70bc0cac8e204cbfd0626176d7b8a3053d6"}, + {file = "jq-1.7.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d5ff445fc9b1eb4623a914e04bea9511e654e9143cde82b039383af4f7dc36f2"}, + {file = "jq-1.7.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07e369ff021fad38a29d6a7a3fc24f7d313e9a239b15ce4eefaffee637466400"}, + {file = "jq-1.7.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:553dfbf674069cb20533d7d74cd8a9d7982bab8e4a5b473fde105d99278df09f"}, + {file = "jq-1.7.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9fbc76f6fec66e5e58cc84f20a5de80addd3c64ad87a748f5c5f6b4ef01bc8c"}, + {file = "jq-1.7.0.tar.gz", hash = "sha256:f460d1f2c3791617e4fb339fa24efbdbebe672b02c861f057358553642047040"}, +] + +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpointer" +version = "2.4" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] + +[[package]] +name = "jupyter-client" +version = "8.6.1" +description = "Jupyter protocol implementation and client libraries" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_client-8.6.1-py3-none-any.whl", hash = "sha256:3b7bd22f058434e3b9a7ea4b1500ed47de2713872288c0d511d19926f99b459f"}, + {file = "jupyter_client-8.6.1.tar.gz", hash = "sha256:e842515e2bab8e19186d89fdfea7abd15e39dd581f94e399f00e2af5a1652d3f"}, +] + +[package.dependencies] +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +python-dateutil = ">=2.8.2" +pyzmq = ">=23.0" +tornado = ">=6.2" +traitlets = ">=5.3" + +[package.extras] +docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] + +[[package]] +name = "jupyter-core" +version = "5.7.2" +description = "Jupyter core package. A base package on which Jupyter projects rely." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, + {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, +] + +[package.dependencies] +platformdirs = ">=2.5" +pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +traitlets = ">=5.3" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] +test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "kubernetes" +version = "29.0.0" +description = "Kubernetes python client" +optional = false +python-versions = ">=3.6" +files = [ + {file = "kubernetes-29.0.0-py2.py3-none-any.whl", hash = "sha256:ab8cb0e0576ccdfb71886366efb102c6a20f268d817be065ce7f9909c631e43e"}, + {file = "kubernetes-29.0.0.tar.gz", hash = "sha256:c4812e227ae74d07d53c88293e564e54b850452715a59a927e7e1bc6b9a60459"}, +] + +[package.dependencies] +certifi = ">=14.05.14" +google-auth = ">=1.0.1" +oauthlib = ">=3.2.2" +python-dateutil = ">=2.5.3" +pyyaml = ">=5.4.1" +requests = "*" +requests-oauthlib = "*" +six = ">=1.9.0" +urllib3 = ">=1.24.2" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" + +[package.extras] +adal = ["adal (>=1.0.2)"] + +[[package]] +name = "langchain" +version = "0.1.14" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain-0.1.14-py3-none-any.whl", hash = "sha256:94f9b5df2421faaf762d4f43b9d65c270c2f701934580d281e4c6226deef7234"}, + {file = "langchain-0.1.14.tar.gz", hash = "sha256:124c6244cf3957616b98f2df07dc2992fc40dff6ed1a62d8ee8a40f1e0260a40"}, +] + +[package.dependencies] +aiohttp = ">=3.8.3,<4.0.0" +async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""} +dataclasses-json = ">=0.5.7,<0.7" +jsonpatch = ">=1.33,<2.0" +langchain-community = ">=0.0.30,<0.1" +langchain-core = ">=0.1.37,<0.2.0" +langchain-text-splitters = ">=0.0.1,<0.1" +langsmith = ">=0.1.17,<0.2.0" +numpy = ">=1,<2" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-textanalytics (>=5.3.0,<6.0.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (<2)"] +clarifai = ["clarifai (>=9.1.0)"] +cli = ["typer (>=0.9.0,<0.10.0)"] +cohere = ["cohere (>=4,<6)"] +docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"] +embeddings = ["sentence-transformers (>=2,<3)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<6)", "couchbase (>=4.1.9,<5.0.0)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "langchain-openai (>=0.0.2,<0.1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"] +javascript = ["esprima (>=4.0.1,<5.0.0)"] +llms = ["clarifai (>=9.1.0)", "cohere (>=4,<6)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"] +openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"] +qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"] +text-helpers = ["chardet (>=5.1.0,<6.0.0)"] + +[[package]] +name = "langchain-anthropic" +version = "0.1.4" +description = "An integration package connecting AnthropicMessages and LangChain" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langchain_anthropic-0.1.4-py3-none-any.whl", hash = "sha256:9b3e28c1c0f7a502495b240c6c015d7fc57d04fb381fae389ecdce8847de5777"}, + {file = "langchain_anthropic-0.1.4.tar.gz", hash = "sha256:d772f7111335953d23393cac8173a0a1ee65b5fe0dc137c6b7a6db2a06fbcac4"}, +] + +[package.dependencies] +anthropic = ">=0.17.0,<1" +defusedxml = ">=0.7.1,<0.8.0" +langchain-core = ">=0.1,<0.2" + +[[package]] +name = "langchain-astradb" +version = "0.1.0" +description = "An integration package connecting Astra DB and LangChain" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langchain_astradb-0.1.0-py3-none-any.whl", hash = "sha256:c6686089da343fce8c31e36c9162323e88888300b09d56b72347a19449d7361f"}, + {file = "langchain_astradb-0.1.0.tar.gz", hash = "sha256:c8a3426c9daa2beeec2dc7a718186b0b9c388082e9543e0bc07363712cc3b947"}, +] + +[package.dependencies] +astrapy = ">=0.7.7,<0.8.0" +langchain-core = ">=0.1.31,<0.2.0" +numpy = ">=1,<2" + +[[package]] +name = "langchain-community" +version = "0.0.31" +description = "Community contributed LangChain integrations." +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_community-0.0.31-py3-none-any.whl", hash = "sha256:905c01b978a1cef7fdcddd2d9241dedc9987db6f23ba1b58d974e38b1cdf2775"}, + {file = "langchain_community-0.0.31.tar.gz", hash = "sha256:9a970bc2bb59bb4c204b696d8c62c2534f6ddb31005005cc1b7d7f934e58a5fc"}, +] + +[package.dependencies] +aiohttp = ">=3.8.3,<4.0.0" +dataclasses-json = ">=0.5.7,<0.7" +langchain-core = ">=0.1.37,<0.2.0" +langsmith = ">=0.1.0,<0.2.0" +numpy = ">=1,<2" +PyYAML = ">=5.3" +requests = ">=2,<3" +SQLAlchemy = ">=1.4,<3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +cli = ["typer (>=0.9.0,<0.10.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cloudpickle (>=2.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "friendli-client (>=1.2.4,<2.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.3,<6.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "premai (>=0.3.25,<0.4.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tidb-vector (>=0.0.3,<1.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "vdms (>=0.0.20,<0.0.21)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)", "zhipuai (>=1.0.7,<2.0.0)"] + +[[package]] +name = "langchain-core" +version = "0.1.40" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_core-0.1.40-py3-none-any.whl", hash = "sha256:618dbb7ab44d8b263b91e384db1ff07d0db256ae5bdafa0123a115b6a75a13f1"}, + {file = "langchain_core-0.1.40.tar.gz", hash = "sha256:34c06fc0e6d3534b738c63f85403446b4be71161665b7e091f9bb19c914ec100"}, +] + +[package.dependencies] +jsonpatch = ">=1.33,<2.0" +langsmith = ">=0.1.0,<0.2.0" +packaging = ">=23.2,<24.0" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +extended-testing = ["jinja2 (>=3,<4)"] + +[[package]] +name = "langchain-experimental" +version = "0.0.56" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_experimental-0.0.56-py3-none-any.whl", hash = "sha256:91fd7a723b0ef3193a63726745523efdd5dd7134116d838c312cfdbf4b354298"}, + {file = "langchain_experimental-0.0.56.tar.gz", hash = "sha256:ebb1c34815739d3af50c9b709c57b91d0357d567ad2042acb724853c6ba1d735"}, +] + +[package.dependencies] +langchain = ">=0.1.14,<0.2.0" +langchain-core = ">=0.1.37,<0.2.0" + +[package.extras] +extended-testing = ["faker (>=19.3.1,<20.0.0)", "jinja2 (>=3,<4)", "pandas (>=2.0.1,<3.0.0)", "presidio-analyzer (>=2.2.352,<3.0.0)", "presidio-anonymizer (>=2.2.352,<3.0.0)", "sentence-transformers (>=2,<3)", "tabulate (>=0.9.0,<0.10.0)", "vowpal-wabbit-next (==0.6.0)"] + +[[package]] +name = "langchain-openai" +version = "0.1.1" +description = "An integration package connecting OpenAI and LangChain" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_openai-0.1.1-py3-none-any.whl", hash = "sha256:5cf4df5d2550af673337eafedaeec014ba52f9a25aeb8451206ca254bed01e5c"}, + {file = "langchain_openai-0.1.1.tar.gz", hash = "sha256:d10e9a9fc4c8ea99ca98f23808ce44c7dcdd65354ac07ad10afe874ecf3401ca"}, +] + +[package.dependencies] +langchain-core = ">=0.1.33,<0.2.0" +openai = ">=1.10.0,<2.0.0" +tiktoken = ">=0.5.2,<1" + +[[package]] +name = "langchain-text-splitters" +version = "0.0.1" +description = "LangChain text splitting utilities" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langchain_text_splitters-0.0.1-py3-none-any.whl", hash = "sha256:f5b802f873f5ff6a8b9259ff34d53ed989666ef4e1582e6d1adb3b5520e3839a"}, + {file = "langchain_text_splitters-0.0.1.tar.gz", hash = "sha256:ac459fa98799f5117ad5425a9330b21961321e30bc19a2a2f9f761ddadd62aa1"}, +] + +[package.dependencies] +langchain-core = ">=0.1.28,<0.2.0" + +[package.extras] +extended-testing = ["lxml (>=5.1.0,<6.0.0)"] + +[[package]] +name = "langsmith" +version = "0.1.39" +description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langsmith-0.1.39-py3-none-any.whl", hash = "sha256:85c19177162585728001cb7ae91ab48ca4abe39b7bc1ff783212ac426ded222b"}, + {file = "langsmith-0.1.39.tar.gz", hash = "sha256:2aec9d2f9cc664042d2121b13da569b0902aff842c86b17b440245d57da84ec5"}, +] + +[package.dependencies] +orjson = ">=3.9.14,<4.0.0" +pydantic = ">=1,<3" +requests = ">=2,<3" + +[[package]] +name = "locust" +version = "2.24.1" +description = "Developer friendly load testing framework" +optional = false +python-versions = ">=3.8" +files = [ + {file = "locust-2.24.1-py3-none-any.whl", hash = "sha256:7f6ed4dc289aad66c304582e6d25e4de5d7c3b175b580332442ab2be35b9d916"}, + {file = "locust-2.24.1.tar.gz", hash = "sha256:094161d44d94839bf1120fd7898b7abb9c143833743ba7c096beb470a236b9a7"}, +] + +[package.dependencies] +ConfigArgParse = ">=1.5.5" +flask = ">=2.0.0" +Flask-Cors = ">=3.0.10" +Flask-Login = ">=0.6.3" +gevent = ">=22.10.2" +geventhttpclient = ">=2.0.11" +msgpack = ">=1.0.0" +psutil = ">=5.9.1" +pywin32 = {version = "*", markers = "platform_system == \"Windows\""} +pyzmq = ">=25.0.0" +requests = ">=2.26.0" +roundrobin = ">=0.0.2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +Werkzeug = ">=2.0.0" + +[[package]] +name = "loguru" +version = "0.7.2" +description = "Python logging made (stupidly) simple" +optional = false +python-versions = ">=3.5" +files = [ + {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, + {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} +win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} + +[package.extras] +dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"] + +[[package]] +name = "lxml" +version = "5.2.1" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=3.6" +files = [ + {file = "lxml-5.2.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1f7785f4f789fdb522729ae465adcaa099e2a3441519df750ebdccc481d961a1"}, + {file = "lxml-5.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6cc6ee342fb7fa2471bd9b6d6fdfc78925a697bf5c2bcd0a302e98b0d35bfad3"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:794f04eec78f1d0e35d9e0c36cbbb22e42d370dda1609fb03bcd7aeb458c6377"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817d420c60a5183953c783b0547d9eb43b7b344a2c46f69513d5952a78cddf3"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2213afee476546a7f37c7a9b4ad4d74b1e112a6fafffc9185d6d21f043128c81"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b070bbe8d3f0f6147689bed981d19bbb33070225373338df755a46893528104a"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e02c5175f63effbd7c5e590399c118d5db6183bbfe8e0d118bdb5c2d1b48d937"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:3dc773b2861b37b41a6136e0b72a1a44689a9c4c101e0cddb6b854016acc0aa8"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:d7520db34088c96cc0e0a3ad51a4fd5b401f279ee112aa2b7f8f976d8582606d"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:bcbf4af004f98793a95355980764b3d80d47117678118a44a80b721c9913436a"}, + {file = "lxml-5.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2b44bec7adf3e9305ce6cbfa47a4395667e744097faed97abb4728748ba7d47"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1c5bb205e9212d0ebddf946bc07e73fa245c864a5f90f341d11ce7b0b854475d"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2c9d147f754b1b0e723e6afb7ba1566ecb162fe4ea657f53d2139bbf894d050a"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3545039fa4779be2df51d6395e91a810f57122290864918b172d5dc7ca5bb433"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a91481dbcddf1736c98a80b122afa0f7296eeb80b72344d7f45dc9f781551f56"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2ddfe41ddc81f29a4c44c8ce239eda5ade4e7fc305fb7311759dd6229a080052"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a7baf9ffc238e4bf401299f50e971a45bfcc10a785522541a6e3179c83eabf0a"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:31e9a882013c2f6bd2f2c974241bf4ba68c85eba943648ce88936d23209a2e01"}, + {file = "lxml-5.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0a15438253b34e6362b2dc41475e7f80de76320f335e70c5528b7148cac253a1"}, + {file = "lxml-5.2.1-cp310-cp310-win32.whl", hash = "sha256:6992030d43b916407c9aa52e9673612ff39a575523c5f4cf72cdef75365709a5"}, + {file = "lxml-5.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:da052e7962ea2d5e5ef5bc0355d55007407087392cf465b7ad84ce5f3e25fe0f"}, + {file = "lxml-5.2.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:70ac664a48aa64e5e635ae5566f5227f2ab7f66a3990d67566d9907edcbbf867"}, + {file = "lxml-5.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1ae67b4e737cddc96c99461d2f75d218bdf7a0c3d3ad5604d1f5e7464a2f9ffe"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f18a5a84e16886898e51ab4b1d43acb3083c39b14c8caeb3589aabff0ee0b270"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6f2c8372b98208ce609c9e1d707f6918cc118fea4e2c754c9f0812c04ca116d"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:394ed3924d7a01b5bd9a0d9d946136e1c2f7b3dc337196d99e61740ed4bc6fe1"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d077bc40a1fe984e1a9931e801e42959a1e6598edc8a3223b061d30fbd26bbc"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:764b521b75701f60683500d8621841bec41a65eb739b8466000c6fdbc256c240"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:3a6b45da02336895da82b9d472cd274b22dc27a5cea1d4b793874eead23dd14f"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:5ea7b6766ac2dfe4bcac8b8595107665a18ef01f8c8343f00710b85096d1b53a"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:e196a4ff48310ba62e53a8e0f97ca2bca83cdd2fe2934d8b5cb0df0a841b193a"}, + {file = "lxml-5.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:200e63525948e325d6a13a76ba2911f927ad399ef64f57898cf7c74e69b71095"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dae0ed02f6b075426accbf6b2863c3d0a7eacc1b41fb40f2251d931e50188dad"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:ab31a88a651039a07a3ae327d68ebdd8bc589b16938c09ef3f32a4b809dc96ef"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:df2e6f546c4df14bc81f9498bbc007fbb87669f1bb707c6138878c46b06f6510"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5dd1537e7cc06efd81371f5d1a992bd5ab156b2b4f88834ca852de4a8ea523fa"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9b9ec9c9978b708d488bec36b9e4c94d88fd12ccac3e62134a9d17ddba910ea9"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8e77c69d5892cb5ba71703c4057091e31ccf534bd7f129307a4d084d90d014b8"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a8d5c70e04aac1eda5c829a26d1f75c6e5286c74743133d9f742cda8e53b9c2f"}, + {file = "lxml-5.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c94e75445b00319c1fad60f3c98b09cd63fe1134a8a953dcd48989ef42318534"}, + {file = "lxml-5.2.1-cp311-cp311-win32.whl", hash = "sha256:4951e4f7a5680a2db62f7f4ab2f84617674d36d2d76a729b9a8be4b59b3659be"}, + {file = "lxml-5.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:5c670c0406bdc845b474b680b9a5456c561c65cf366f8db5a60154088c92d102"}, + {file = "lxml-5.2.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:abc25c3cab9ec7fcd299b9bcb3b8d4a1231877e425c650fa1c7576c5107ab851"}, + {file = "lxml-5.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6935bbf153f9a965f1e07c2649c0849d29832487c52bb4a5c5066031d8b44fd5"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d793bebb202a6000390a5390078e945bbb49855c29c7e4d56a85901326c3b5d9"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afd5562927cdef7c4f5550374acbc117fd4ecc05b5007bdfa57cc5355864e0a4"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0e7259016bc4345a31af861fdce942b77c99049d6c2107ca07dc2bba2435c1d9"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:530e7c04f72002d2f334d5257c8a51bf409db0316feee7c87e4385043be136af"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59689a75ba8d7ffca577aefd017d08d659d86ad4585ccc73e43edbfc7476781a"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f9737bf36262046213a28e789cc82d82c6ef19c85a0cf05e75c670a33342ac2c"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:3a74c4f27167cb95c1d4af1c0b59e88b7f3e0182138db2501c353555f7ec57f4"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:68a2610dbe138fa8c5826b3f6d98a7cfc29707b850ddcc3e21910a6fe51f6ca0"}, + {file = "lxml-5.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f0a1bc63a465b6d72569a9bba9f2ef0334c4e03958e043da1920299100bc7c08"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c2d35a1d047efd68027817b32ab1586c1169e60ca02c65d428ae815b593e65d4"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:79bd05260359170f78b181b59ce871673ed01ba048deef4bf49a36ab3e72e80b"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:865bad62df277c04beed9478fe665b9ef63eb28fe026d5dedcb89b537d2e2ea6"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:44f6c7caff88d988db017b9b0e4ab04934f11e3e72d478031efc7edcac6c622f"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:71e97313406ccf55d32cc98a533ee05c61e15d11b99215b237346171c179c0b0"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:057cdc6b86ab732cf361f8b4d8af87cf195a1f6dc5b0ff3de2dced242c2015e0"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f3bbbc998d42f8e561f347e798b85513ba4da324c2b3f9b7969e9c45b10f6169"}, + {file = "lxml-5.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:491755202eb21a5e350dae00c6d9a17247769c64dcf62d8c788b5c135e179dc4"}, + {file = "lxml-5.2.1-cp312-cp312-win32.whl", hash = "sha256:8de8f9d6caa7f25b204fc861718815d41cbcf27ee8f028c89c882a0cf4ae4134"}, + {file = "lxml-5.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:f2a9efc53d5b714b8df2b4b3e992accf8ce5bbdfe544d74d5c6766c9e1146a3a"}, + {file = "lxml-5.2.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:70a9768e1b9d79edca17890175ba915654ee1725975d69ab64813dd785a2bd5c"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c38d7b9a690b090de999835f0443d8aa93ce5f2064035dfc48f27f02b4afc3d0"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5670fb70a828663cc37552a2a85bf2ac38475572b0e9b91283dc09efb52c41d1"}, + {file = "lxml-5.2.1-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:958244ad566c3ffc385f47dddde4145088a0ab893504b54b52c041987a8c1863"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2a66bf12fbd4666dd023b6f51223aed3d9f3b40fef06ce404cb75bafd3d89536"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:9123716666e25b7b71c4e1789ec829ed18663152008b58544d95b008ed9e21e9"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:0c3f67e2aeda739d1cc0b1102c9a9129f7dc83901226cc24dd72ba275ced4218"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5d5792e9b3fb8d16a19f46aa8208987cfeafe082363ee2745ea8b643d9cc5b45"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:88e22fc0a6684337d25c994381ed8a1580a6f5ebebd5ad41f89f663ff4ec2885"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:21c2e6b09565ba5b45ae161b438e033a86ad1736b8c838c766146eff8ceffff9"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_s390x.whl", hash = "sha256:afbbdb120d1e78d2ba8064a68058001b871154cc57787031b645c9142b937a62"}, + {file = "lxml-5.2.1-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:627402ad8dea044dde2eccde4370560a2b750ef894c9578e1d4f8ffd54000461"}, + {file = "lxml-5.2.1-cp36-cp36m-win32.whl", hash = "sha256:e89580a581bf478d8dcb97d9cd011d567768e8bc4095f8557b21c4d4c5fea7d0"}, + {file = "lxml-5.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:59565f10607c244bc4c05c0c5fa0c190c990996e0c719d05deec7030c2aa8289"}, + {file = "lxml-5.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:857500f88b17a6479202ff5fe5f580fc3404922cd02ab3716197adf1ef628029"}, + {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56c22432809085b3f3ae04e6e7bdd36883d7258fcd90e53ba7b2e463efc7a6af"}, + {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a55ee573116ba208932e2d1a037cc4b10d2c1cb264ced2184d00b18ce585b2c0"}, + {file = "lxml-5.2.1-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:6cf58416653c5901e12624e4013708b6e11142956e7f35e7a83f1ab02f3fe456"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:64c2baa7774bc22dd4474248ba16fe1a7f611c13ac6123408694d4cc93d66dbd"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:74b28c6334cca4dd704e8004cba1955af0b778cf449142e581e404bd211fb619"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7221d49259aa1e5a8f00d3d28b1e0b76031655ca74bb287123ef56c3db92f213"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3dbe858ee582cbb2c6294dc85f55b5f19c918c2597855e950f34b660f1a5ede6"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:04ab5415bf6c86e0518d57240a96c4d1fcfc3cb370bb2ac2a732b67f579e5a04"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:6ab833e4735a7e5533711a6ea2df26459b96f9eec36d23f74cafe03631647c41"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f443cdef978430887ed55112b491f670bba6462cea7a7742ff8f14b7abb98d75"}, + {file = "lxml-5.2.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9e2addd2d1866fe112bc6f80117bcc6bc25191c5ed1bfbcf9f1386a884252ae8"}, + {file = "lxml-5.2.1-cp37-cp37m-win32.whl", hash = "sha256:f51969bac61441fd31f028d7b3b45962f3ecebf691a510495e5d2cd8c8092dbd"}, + {file = "lxml-5.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:b0b58fbfa1bf7367dde8a557994e3b1637294be6cf2169810375caf8571a085c"}, + {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3e183c6e3298a2ed5af9d7a356ea823bccaab4ec2349dc9ed83999fd289d14d5"}, + {file = "lxml-5.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:804f74efe22b6a227306dd890eecc4f8c59ff25ca35f1f14e7482bbce96ef10b"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08802f0c56ed150cc6885ae0788a321b73505d2263ee56dad84d200cab11c07a"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8c09ed18ecb4ebf23e02b8e7a22a05d6411911e6fabef3a36e4f371f4f2585"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3d30321949861404323c50aebeb1943461a67cd51d4200ab02babc58bd06a86"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:b560e3aa4b1d49e0e6c847d72665384db35b2f5d45f8e6a5c0072e0283430533"}, + {file = "lxml-5.2.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:058a1308914f20784c9f4674036527e7c04f7be6fb60f5d61353545aa7fcb739"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:adfb84ca6b87e06bc6b146dc7da7623395db1e31621c4785ad0658c5028b37d7"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:417d14450f06d51f363e41cace6488519038f940676ce9664b34ebf5653433a5"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a2dfe7e2473f9b59496247aad6e23b405ddf2e12ef0765677b0081c02d6c2c0b"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bf2e2458345d9bffb0d9ec16557d8858c9c88d2d11fed53998512504cd9df49b"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:58278b29cb89f3e43ff3e0c756abbd1518f3ee6adad9e35b51fb101c1c1daaec"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:64641a6068a16201366476731301441ce93457eb8452056f570133a6ceb15fca"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:78bfa756eab503673991bdcf464917ef7845a964903d3302c5f68417ecdc948c"}, + {file = "lxml-5.2.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:11a04306fcba10cd9637e669fd73aa274c1c09ca64af79c041aa820ea992b637"}, + {file = "lxml-5.2.1-cp38-cp38-win32.whl", hash = "sha256:66bc5eb8a323ed9894f8fa0ee6cb3e3fb2403d99aee635078fd19a8bc7a5a5da"}, + {file = "lxml-5.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:9676bfc686fa6a3fa10cd4ae6b76cae8be26eb5ec6811d2a325636c460da1806"}, + {file = "lxml-5.2.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cf22b41fdae514ee2f1691b6c3cdeae666d8b7fa9434de445f12bbeee0cf48dd"}, + {file = "lxml-5.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ec42088248c596dbd61d4ae8a5b004f97a4d91a9fd286f632e42e60b706718d7"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd53553ddad4a9c2f1f022756ae64abe16da1feb497edf4d9f87f99ec7cf86bd"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feaa45c0eae424d3e90d78823f3828e7dc42a42f21ed420db98da2c4ecf0a2cb"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddc678fb4c7e30cf830a2b5a8d869538bc55b28d6c68544d09c7d0d8f17694dc"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:853e074d4931dbcba7480d4dcab23d5c56bd9607f92825ab80ee2bd916edea53"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc4691d60512798304acb9207987e7b2b7c44627ea88b9d77489bbe3e6cc3bd4"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:beb72935a941965c52990f3a32d7f07ce869fe21c6af8b34bf6a277b33a345d3"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:6588c459c5627fefa30139be4d2e28a2c2a1d0d1c265aad2ba1935a7863a4913"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:588008b8497667f1ddca7c99f2f85ce8511f8f7871b4a06ceede68ab62dff64b"}, + {file = "lxml-5.2.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6787b643356111dfd4032b5bffe26d2f8331556ecb79e15dacb9275da02866e"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7c17b64b0a6ef4e5affae6a3724010a7a66bda48a62cfe0674dabd46642e8b54"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:27aa20d45c2e0b8cd05da6d4759649170e8dfc4f4e5ef33a34d06f2d79075d57"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d4f2cc7060dc3646632d7f15fe68e2fa98f58e35dd5666cd525f3b35d3fed7f8"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff46d772d5f6f73564979cd77a4fffe55c916a05f3cb70e7c9c0590059fb29ef"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:96323338e6c14e958d775700ec8a88346014a85e5de73ac7967db0367582049b"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:52421b41ac99e9d91934e4d0d0fe7da9f02bfa7536bb4431b4c05c906c8c6919"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:7a7efd5b6d3e30d81ec68ab8a88252d7c7c6f13aaa875009fe3097eb4e30b84c"}, + {file = "lxml-5.2.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ed777c1e8c99b63037b91f9d73a6aad20fd035d77ac84afcc205225f8f41188"}, + {file = "lxml-5.2.1-cp39-cp39-win32.whl", hash = "sha256:644df54d729ef810dcd0f7732e50e5ad1bd0a135278ed8d6bcb06f33b6b6f708"}, + {file = "lxml-5.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:9ca66b8e90daca431b7ca1408cae085d025326570e57749695d6a01454790e95"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9b0ff53900566bc6325ecde9181d89afadc59c5ffa39bddf084aaedfe3b06a11"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd6037392f2d57793ab98d9e26798f44b8b4da2f2464388588f48ac52c489ea1"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b9c07e7a45bb64e21df4b6aa623cb8ba214dfb47d2027d90eac197329bb5e94"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3249cc2989d9090eeac5467e50e9ec2d40704fea9ab72f36b034ea34ee65ca98"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f42038016852ae51b4088b2862126535cc4fc85802bfe30dea3500fdfaf1864e"}, + {file = "lxml-5.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:533658f8fbf056b70e434dff7e7aa611bcacb33e01f75de7f821810e48d1bb66"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:622020d4521e22fb371e15f580d153134bfb68d6a429d1342a25f051ec72df1c"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efa7b51824aa0ee957ccd5a741c73e6851de55f40d807f08069eb4c5a26b2baa"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c6ad0fbf105f6bcc9300c00010a2ffa44ea6f555df1a2ad95c88f5656104817"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e233db59c8f76630c512ab4a4daf5a5986da5c3d5b44b8e9fc742f2a24dbd460"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a014510830df1475176466b6087fc0c08b47a36714823e58d8b8d7709132a96"}, + {file = "lxml-5.2.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:d38c8f50ecf57f0463399569aa388b232cf1a2ffb8f0a9a5412d0db57e054860"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5aea8212fb823e006b995c4dda533edcf98a893d941f173f6c9506126188860d"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff097ae562e637409b429a7ac958a20aab237a0378c42dabaa1e3abf2f896e5f"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f5d65c39f16717a47c36c756af0fb36144069c4718824b7533f803ecdf91138"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3d0c3dd24bb4605439bf91068598d00c6370684f8de4a67c2992683f6c309d6b"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e32be23d538753a8adb6c85bd539f5fd3b15cb987404327c569dfc5fd8366e85"}, + {file = "lxml-5.2.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cc518cea79fd1e2f6c90baafa28906d4309d24f3a63e801d855e7424c5b34144"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a0af35bd8ebf84888373630f73f24e86bf016642fb8576fba49d3d6b560b7cbc"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8aca2e3a72f37bfc7b14ba96d4056244001ddcc18382bd0daa087fd2e68a354"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ca1e8188b26a819387b29c3895c47a5e618708fe6f787f3b1a471de2c4a94d9"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c8ba129e6d3b0136a0f50345b2cb3db53f6bda5dd8c7f5d83fbccba97fb5dcb5"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e998e304036198b4f6914e6a1e2b6f925208a20e2042563d9734881150c6c246"}, + {file = "lxml-5.2.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d3be9b2076112e51b323bdf6d5a7f8a798de55fb8d95fcb64bd179460cdc0704"}, + {file = "lxml-5.2.1.tar.gz", hash = "sha256:3f7765e69bbce0906a7c74d5fe46d2c7a7596147318dbc08e4a2431f3060e306"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html-clean = ["lxml-html-clean"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=3.0.10)"] + +[[package]] +name = "mako" +version = "1.3.2" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Mako-1.3.2-py3-none-any.whl", hash = "sha256:32a99d70754dfce237019d17ffe4a282d2d3351b9c476e90d8a60e63f133b80c"}, + {file = "Mako-1.3.2.tar.gz", hash = "sha256:2a0c8ad7f6274271b3bb7467dd37cf9cc6dab4bc19cb69a4ef10669402de698e"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "marshmallow" +version = "3.21.1" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.21.1-py3-none-any.whl", hash = "sha256:f085493f79efb0644f270a9bf2892843142d80d7174bbbd2f3713f2a589dc633"}, + {file = "marshmallow-3.21.1.tar.gz", hash = "sha256:4e65e9e0d80fc9e609574b9983cf32579f305c718afb30d7233ab818571768c3"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==4.0.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "matplotlib-inline" +version = "0.1.6" +description = "Inline Matplotlib backend for Jupyter" +optional = false +python-versions = ">=3.5" +files = [ + {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, + {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, +] + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mmh3" +version = "4.1.0" +description = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions." +optional = false +python-versions = "*" +files = [ + {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:be5ac76a8b0cd8095784e51e4c1c9c318c19edcd1709a06eb14979c8d850c31a"}, + {file = "mmh3-4.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98a49121afdfab67cd80e912b36404139d7deceb6773a83620137aaa0da5714c"}, + {file = "mmh3-4.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5259ac0535874366e7d1a5423ef746e0d36a9e3c14509ce6511614bdc5a7ef5b"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5950827ca0453a2be357696da509ab39646044e3fa15cad364eb65d78797437"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dd0f652ae99585b9dd26de458e5f08571522f0402155809fd1dc8852a613a39"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d25548070942fab1e4a6f04d1626d67e66d0b81ed6571ecfca511f3edf07e6"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53db8d9bad3cb66c8f35cbc894f336273f63489ce4ac416634932e3cbe79eb5b"}, + {file = "mmh3-4.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75da0f615eb55295a437264cc0b736753f830b09d102aa4c2a7d719bc445ec05"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b926b07fd678ea84b3a2afc1fa22ce50aeb627839c44382f3d0291e945621e1a"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c5b053334f9b0af8559d6da9dc72cef0a65b325ebb3e630c680012323c950bb6"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bf33dc43cd6de2cb86e0aa73a1cc6530f557854bbbe5d59f41ef6de2e353d7b"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fa7eacd2b830727ba3dd65a365bed8a5c992ecd0c8348cf39a05cc77d22f4970"}, + {file = "mmh3-4.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:42dfd6742b9e3eec599f85270617debfa0bbb913c545bb980c8a4fa7b2d047da"}, + {file = "mmh3-4.1.0-cp310-cp310-win32.whl", hash = "sha256:2974ad343f0d39dcc88e93ee6afa96cedc35a9883bc067febd7ff736e207fa47"}, + {file = "mmh3-4.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:74699a8984ded645c1a24d6078351a056f5a5f1fe5838870412a68ac5e28d865"}, + {file = "mmh3-4.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:f0dc874cedc23d46fc488a987faa6ad08ffa79e44fb08e3cd4d4cf2877c00a00"}, + {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3280a463855b0eae64b681cd5b9ddd9464b73f81151e87bb7c91a811d25619e6"}, + {file = "mmh3-4.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:97ac57c6c3301769e757d444fa7c973ceb002cb66534b39cbab5e38de61cd896"}, + {file = "mmh3-4.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7b6502cdb4dbd880244818ab363c8770a48cdccecf6d729ade0241b736b5ec0"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52ba2da04671a9621580ddabf72f06f0e72c1c9c3b7b608849b58b11080d8f14"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a5fef4c4ecc782e6e43fbeab09cff1bac82c998a1773d3a5ee6a3605cde343e"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5135358a7e00991f73b88cdc8eda5203bf9de22120d10a834c5761dbeb07dd13"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cff9ae76a54f7c6fe0167c9c4028c12c1f6de52d68a31d11b6790bb2ae685560"}, + {file = "mmh3-4.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f02576a4d106d7830ca90278868bf0983554dd69183b7bbe09f2fcd51cf54f"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:073d57425a23721730d3ff5485e2da489dd3c90b04e86243dd7211f889898106"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:71e32ddec7f573a1a0feb8d2cf2af474c50ec21e7a8263026e8d3b4b629805db"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7cbb20b29d57e76a58b40fd8b13a9130db495a12d678d651b459bf61c0714cea"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a42ad267e131d7847076bb7e31050f6c4378cd38e8f1bf7a0edd32f30224d5c9"}, + {file = "mmh3-4.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a013979fc9390abadc445ea2527426a0e7a4495c19b74589204f9b71bcaafeb"}, + {file = "mmh3-4.1.0-cp311-cp311-win32.whl", hash = "sha256:1d3b1cdad7c71b7b88966301789a478af142bddcb3a2bee563f7a7d40519a00f"}, + {file = "mmh3-4.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0dc6dc32eb03727467da8e17deffe004fbb65e8b5ee2b502d36250d7a3f4e2ec"}, + {file = "mmh3-4.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:9ae3a5c1b32dda121c7dc26f9597ef7b01b4c56a98319a7fe86c35b8bc459ae6"}, + {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0033d60c7939168ef65ddc396611077a7268bde024f2c23bdc283a19123f9e9c"}, + {file = "mmh3-4.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d6af3e2287644b2b08b5924ed3a88c97b87b44ad08e79ca9f93d3470a54a41c5"}, + {file = "mmh3-4.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d82eb4defa245e02bb0b0dc4f1e7ee284f8d212633389c91f7fba99ba993f0a2"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba245e94b8d54765e14c2d7b6214e832557e7856d5183bc522e17884cab2f45d"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb04e2feeabaad6231e89cd43b3d01a4403579aa792c9ab6fdeef45cc58d4ec0"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e3b1a27def545ce11e36158ba5d5390cdbc300cfe456a942cc89d649cf7e3b2"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce0ab79ff736d7044e5e9b3bfe73958a55f79a4ae672e6213e92492ad5e734d5"}, + {file = "mmh3-4.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b02268be6e0a8eeb8a924d7db85f28e47344f35c438c1e149878bb1c47b1cd3"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:deb887f5fcdaf57cf646b1e062d56b06ef2f23421c80885fce18b37143cba828"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99dd564e9e2b512eb117bd0cbf0f79a50c45d961c2a02402787d581cec5448d5"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:08373082dfaa38fe97aa78753d1efd21a1969e51079056ff552e687764eafdfe"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:54b9c6a2ea571b714e4fe28d3e4e2db37abfd03c787a58074ea21ee9a8fd1740"}, + {file = "mmh3-4.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a7b1edf24c69e3513f879722b97ca85e52f9032f24a52284746877f6a7304086"}, + {file = "mmh3-4.1.0-cp312-cp312-win32.whl", hash = "sha256:411da64b951f635e1e2284b71d81a5a83580cea24994b328f8910d40bed67276"}, + {file = "mmh3-4.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:bebc3ecb6ba18292e3d40c8712482b4477abd6981c2ebf0e60869bd90f8ac3a9"}, + {file = "mmh3-4.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:168473dd608ade6a8d2ba069600b35199a9af837d96177d3088ca91f2b3798e3"}, + {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:372f4b7e1dcde175507640679a2a8790185bb71f3640fc28a4690f73da986a3b"}, + {file = "mmh3-4.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:438584b97f6fe13e944faf590c90fc127682b57ae969f73334040d9fa1c7ffa5"}, + {file = "mmh3-4.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6e27931b232fc676675fac8641c6ec6b596daa64d82170e8597f5a5b8bdcd3b6"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:571a92bad859d7b0330e47cfd1850b76c39b615a8d8e7aa5853c1f971fd0c4b1"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a69d6afe3190fa08f9e3a58e5145549f71f1f3fff27bd0800313426929c7068"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afb127be0be946b7630220908dbea0cee0d9d3c583fa9114a07156f98566dc28"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:940d86522f36348ef1a494cbf7248ab3f4a1638b84b59e6c9e90408bd11ad729"}, + {file = "mmh3-4.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dcccc4935686619a8e3d1f7b6e97e3bd89a4a796247930ee97d35ea1a39341"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01bb9b90d61854dfc2407c5e5192bfb47222d74f29d140cb2dd2a69f2353f7cc"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bcb1b8b951a2c0b0fb8a5426c62a22557e2ffc52539e0a7cc46eb667b5d606a9"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6477a05d5e5ab3168e82e8b106e316210ac954134f46ec529356607900aea82a"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:da5892287e5bea6977364b15712a2573c16d134bc5fdcdd4cf460006cf849278"}, + {file = "mmh3-4.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:99180d7fd2327a6fffbaff270f760576839dc6ee66d045fa3a450f3490fda7f5"}, + {file = "mmh3-4.1.0-cp38-cp38-win32.whl", hash = "sha256:9b0d4f3949913a9f9a8fb1bb4cc6ecd52879730aab5ff8c5a3d8f5b593594b73"}, + {file = "mmh3-4.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:598c352da1d945108aee0c3c3cfdd0e9b3edef74108f53b49d481d3990402169"}, + {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:475d6d1445dd080f18f0f766277e1237fa2914e5fe3307a3b2a3044f30892103"}, + {file = "mmh3-4.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5ca07c41e6a2880991431ac717c2a049056fff497651a76e26fc22224e8b5732"}, + {file = "mmh3-4.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ebe052fef4bbe30c0548d12ee46d09f1b69035ca5208a7075e55adfe091be44"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaefd42e85afb70f2b855a011f7b4d8a3c7e19c3f2681fa13118e4d8627378c5"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0ae43caae5a47afe1b63a1ae3f0986dde54b5fb2d6c29786adbfb8edc9edfb"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6218666f74c8c013c221e7f5f8a693ac9cf68e5ac9a03f2373b32d77c48904de"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac59294a536ba447b5037f62d8367d7d93b696f80671c2c45645fa9f1109413c"}, + {file = "mmh3-4.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:086844830fcd1e5c84fec7017ea1ee8491487cfc877847d96f86f68881569d2e"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e42b38fad664f56f77f6fbca22d08450f2464baa68acdbf24841bf900eb98e87"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d08b790a63a9a1cde3b5d7d733ed97d4eb884bfbc92f075a091652d6bfd7709a"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:73ea4cc55e8aea28c86799ecacebca09e5f86500414870a8abaedfcbaf74d288"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:f90938ff137130e47bcec8dc1f4ceb02f10178c766e2ef58a9f657ff1f62d124"}, + {file = "mmh3-4.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:aa1f13e94b8631c8cd53259250556edcf1de71738936b60febba95750d9632bd"}, + {file = "mmh3-4.1.0-cp39-cp39-win32.whl", hash = "sha256:a3b680b471c181490cf82da2142029edb4298e1bdfcb67c76922dedef789868d"}, + {file = "mmh3-4.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:fefef92e9c544a8dbc08f77a8d1b6d48006a750c4375bbcd5ff8199d761e263b"}, + {file = "mmh3-4.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:8e2c1f6a2b41723a4f82bd5a762a777836d29d664fc0095f17910bea0adfd4a6"}, + {file = "mmh3-4.1.0.tar.gz", hash = "sha256:a1cf25348b9acd229dda464a094d6170f47d2850a1fcb762a3b6172d2ce6ca4a"}, +] + +[package.extras] +test = ["mypy (>=1.0)", "pytest (>=7.0.0)"] + +[[package]] +name = "monotonic" +version = "1.6" +description = "An implementation of time.monotonic() for Python 2 & < 3.3" +optional = false +python-versions = "*" +files = [ + {file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"}, + {file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"}, +] + +[[package]] +name = "mpmath" +version = "1.3.0" +description = "Python library for arbitrary-precision floating-point arithmetic" +optional = false +python-versions = "*" +files = [ + {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, + {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, +] + +[package.extras] +develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] +docs = ["sphinx"] +gmpy = ["gmpy2 (>=2.1.0a4)"] +tests = ["pytest (>=4.6)"] + +[[package]] +name = "msgpack" +version = "1.0.8" +description = "MessagePack serializer" +optional = false +python-versions = ">=3.8" +files = [ + {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:505fe3d03856ac7d215dbe005414bc28505d26f0c128906037e66d98c4e95868"}, + {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b7842518a63a9f17107eb176320960ec095a8ee3b4420b5f688e24bf50c53c"}, + {file = "msgpack-1.0.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:376081f471a2ef24828b83a641a02c575d6103a3ad7fd7dade5486cad10ea659"}, + {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e390971d082dba073c05dbd56322427d3280b7cc8b53484c9377adfbae67dc2"}, + {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e073efcba9ea99db5acef3959efa45b52bc67b61b00823d2a1a6944bf45982"}, + {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82d92c773fbc6942a7a8b520d22c11cfc8fd83bba86116bfcf962c2f5c2ecdaa"}, + {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9ee32dcb8e531adae1f1ca568822e9b3a738369b3b686d1477cbc643c4a9c128"}, + {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e3aa7e51d738e0ec0afbed661261513b38b3014754c9459508399baf14ae0c9d"}, + {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69284049d07fce531c17404fcba2bb1df472bc2dcdac642ae71a2d079d950653"}, + {file = "msgpack-1.0.8-cp310-cp310-win32.whl", hash = "sha256:13577ec9e247f8741c84d06b9ece5f654920d8365a4b636ce0e44f15e07ec693"}, + {file = "msgpack-1.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:e532dbd6ddfe13946de050d7474e3f5fb6ec774fbb1a188aaf469b08cf04189a"}, + {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9517004e21664f2b5a5fd6333b0731b9cf0817403a941b393d89a2f1dc2bd836"}, + {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d16a786905034e7e34098634b184a7d81f91d4c3d246edc6bd7aefb2fd8ea6ad"}, + {file = "msgpack-1.0.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2872993e209f7ed04d963e4b4fbae72d034844ec66bc4ca403329db2074377b"}, + {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c330eace3dd100bdb54b5653b966de7f51c26ec4a7d4e87132d9b4f738220ba"}, + {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b5c044f3eff2a6534768ccfd50425939e7a8b5cf9a7261c385de1e20dcfc85"}, + {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1876b0b653a808fcd50123b953af170c535027bf1d053b59790eebb0aeb38950"}, + {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dfe1f0f0ed5785c187144c46a292b8c34c1295c01da12e10ccddfc16def4448a"}, + {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3528807cbbb7f315bb81959d5961855e7ba52aa60a3097151cb21956fbc7502b"}, + {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e2f879ab92ce502a1e65fce390eab619774dda6a6ff719718069ac94084098ce"}, + {file = "msgpack-1.0.8-cp311-cp311-win32.whl", hash = "sha256:26ee97a8261e6e35885c2ecd2fd4a6d38252246f94a2aec23665a4e66d066305"}, + {file = "msgpack-1.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:eadb9f826c138e6cf3c49d6f8de88225a3c0ab181a9b4ba792e006e5292d150e"}, + {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:114be227f5213ef8b215c22dde19532f5da9652e56e8ce969bf0a26d7c419fee"}, + {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d661dc4785affa9d0edfdd1e59ec056a58b3dbb9f196fa43587f3ddac654ac7b"}, + {file = "msgpack-1.0.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d56fd9f1f1cdc8227d7b7918f55091349741904d9520c65f0139a9755952c9e8"}, + {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0726c282d188e204281ebd8de31724b7d749adebc086873a59efb8cf7ae27df3"}, + {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8db8e423192303ed77cff4dce3a4b88dbfaf43979d280181558af5e2c3c71afc"}, + {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99881222f4a8c2f641f25703963a5cefb076adffd959e0558dc9f803a52d6a58"}, + {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b5505774ea2a73a86ea176e8a9a4a7c8bf5d521050f0f6f8426afe798689243f"}, + {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ef254a06bcea461e65ff0373d8a0dd1ed3aa004af48839f002a0c994a6f72d04"}, + {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1dd7839443592d00e96db831eddb4111a2a81a46b028f0facd60a09ebbdd543"}, + {file = "msgpack-1.0.8-cp312-cp312-win32.whl", hash = "sha256:64d0fcd436c5683fdd7c907eeae5e2cbb5eb872fafbc03a43609d7941840995c"}, + {file = "msgpack-1.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:74398a4cf19de42e1498368c36eed45d9528f5fd0155241e82c4082b7e16cffd"}, + {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ceea77719d45c839fd73abcb190b8390412a890df2f83fb8cf49b2a4b5c2f40"}, + {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ab0bbcd4d1f7b6991ee7c753655b481c50084294218de69365f8f1970d4c151"}, + {file = "msgpack-1.0.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1cce488457370ffd1f953846f82323cb6b2ad2190987cd4d70b2713e17268d24"}, + {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3923a1778f7e5ef31865893fdca12a8d7dc03a44b33e2a5f3295416314c09f5d"}, + {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22e47578b30a3e199ab067a4d43d790249b3c0587d9a771921f86250c8435db"}, + {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd739c9251d01e0279ce729e37b39d49a08c0420d3fee7f2a4968c0576678f77"}, + {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3420522057ebab1728b21ad473aa950026d07cb09da41103f8e597dfbfaeb13"}, + {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5845fdf5e5d5b78a49b826fcdc0eb2e2aa7191980e3d2cfd2a30303a74f212e2"}, + {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a0e76621f6e1f908ae52860bdcb58e1ca85231a9b0545e64509c931dd34275a"}, + {file = "msgpack-1.0.8-cp38-cp38-win32.whl", hash = "sha256:374a8e88ddab84b9ada695d255679fb99c53513c0a51778796fcf0944d6c789c"}, + {file = "msgpack-1.0.8-cp38-cp38-win_amd64.whl", hash = "sha256:f3709997b228685fe53e8c433e2df9f0cdb5f4542bd5114ed17ac3c0129b0480"}, + {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f51bab98d52739c50c56658cc303f190785f9a2cd97b823357e7aeae54c8f68a"}, + {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:73ee792784d48aa338bba28063e19a27e8d989344f34aad14ea6e1b9bd83f596"}, + {file = "msgpack-1.0.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f9904e24646570539a8950400602d66d2b2c492b9010ea7e965025cb71d0c86d"}, + {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e75753aeda0ddc4c28dce4c32ba2f6ec30b1b02f6c0b14e547841ba5b24f753f"}, + {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5dbf059fb4b7c240c873c1245ee112505be27497e90f7c6591261c7d3c3a8228"}, + {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4916727e31c28be8beaf11cf117d6f6f188dcc36daae4e851fee88646f5b6b18"}, + {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7938111ed1358f536daf311be244f34df7bf3cdedb3ed883787aca97778b28d8"}, + {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:493c5c5e44b06d6c9268ce21b302c9ca055c1fd3484c25ba41d34476c76ee746"}, + {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, + {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, + {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, + {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, +] + +[[package]] +name = "multidict" +version = "6.0.5" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, +] + +[[package]] +name = "multiprocess" +version = "0.70.16" +description = "better multiprocessing and multithreading in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "multiprocess-0.70.16-pp310-pypy310_pp73-macosx_10_13_x86_64.whl", hash = "sha256:476887be10e2f59ff183c006af746cb6f1fd0eadcfd4ef49e605cbe2659920ee"}, + {file = "multiprocess-0.70.16-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d951bed82c8f73929ac82c61f01a7b5ce8f3e5ef40f5b52553b4f547ce2b08ec"}, + {file = "multiprocess-0.70.16-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37b55f71c07e2d741374998c043b9520b626a8dddc8b3129222ca4f1a06ef67a"}, + {file = "multiprocess-0.70.16-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba8c31889abf4511c7308a8c52bb4a30b9d590e7f58523302ba00237702ca054"}, + {file = "multiprocess-0.70.16-pp39-pypy39_pp73-macosx_10_13_x86_64.whl", hash = "sha256:0dfd078c306e08d46d7a8d06fb120313d87aa43af60d66da43ffff40b44d2f41"}, + {file = "multiprocess-0.70.16-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e7b9d0f307cd9bd50851afaac0dba2cb6c44449efff697df7c7645f7d3f2be3a"}, + {file = "multiprocess-0.70.16-py310-none-any.whl", hash = "sha256:c4a9944c67bd49f823687463660a2d6daae94c289adff97e0f9d696ba6371d02"}, + {file = "multiprocess-0.70.16-py311-none-any.whl", hash = "sha256:af4cabb0dac72abfb1e794fa7855c325fd2b55a10a44628a3c1ad3311c04127a"}, + {file = "multiprocess-0.70.16-py312-none-any.whl", hash = "sha256:fc0544c531920dde3b00c29863377f87e1632601092ea2daca74e4beb40faa2e"}, + {file = "multiprocess-0.70.16-py38-none-any.whl", hash = "sha256:a71d82033454891091a226dfc319d0cfa8019a4e888ef9ca910372a446de4435"}, + {file = "multiprocess-0.70.16-py39-none-any.whl", hash = "sha256:a0bafd3ae1b732eac64be2e72038231c1ba97724b60b09400d68f229fcc2fbf3"}, + {file = "multiprocess-0.70.16.tar.gz", hash = "sha256:161af703d4652a0e1410be6abccecde4a7ddffd19341be0a7011b94aeb171ac1"}, +] + +[package.dependencies] +dill = ">=0.3.8" + +[[package]] +name = "mypy" +version = "1.9.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, + {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, + {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, + {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, + {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, + {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, + {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, + {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, + {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, + {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, + {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, + {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, + {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, + {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, + {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, + {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, + {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, + {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, + {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, + {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, + {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, + {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, + {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nest-asyncio" +version = "1.6.0" +description = "Patch asyncio to allow nested event loops" +optional = false +python-versions = ">=3.5" +files = [ + {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, + {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, +] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "oauthlib" +version = "3.2.2" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +optional = false +python-versions = ">=3.6" +files = [ + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, +] + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + +[[package]] +name = "onnxruntime" +version = "1.17.1" +description = "ONNX Runtime is a runtime accelerator for Machine Learning models" +optional = false +python-versions = "*" +files = [ + {file = "onnxruntime-1.17.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:d43ac17ac4fa3c9096ad3c0e5255bb41fd134560212dc124e7f52c3159af5d21"}, + {file = "onnxruntime-1.17.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55b5e92a4c76a23981c998078b9bf6145e4fb0b016321a8274b1607bd3c6bd35"}, + {file = "onnxruntime-1.17.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ebbcd2bc3a066cf54e6f18c75708eb4d309ef42be54606d22e5bdd78afc5b0d7"}, + {file = "onnxruntime-1.17.1-cp310-cp310-win32.whl", hash = "sha256:5e3716b5eec9092e29a8d17aab55e737480487deabfca7eac3cd3ed952b6ada9"}, + {file = "onnxruntime-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:fbb98cced6782ae1bb799cc74ddcbbeeae8819f3ad1d942a74d88e72b6511337"}, + {file = "onnxruntime-1.17.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:36fd6f87a1ecad87e9c652e42407a50fb305374f9a31d71293eb231caae18784"}, + {file = "onnxruntime-1.17.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99a8bddeb538edabc524d468edb60ad4722cff8a49d66f4e280c39eace70500b"}, + {file = "onnxruntime-1.17.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd7fddb4311deb5a7d3390cd8e9b3912d4d963efbe4dfe075edbaf18d01c024e"}, + {file = "onnxruntime-1.17.1-cp311-cp311-win32.whl", hash = "sha256:606a7cbfb6680202b0e4f1890881041ffc3ac6e41760a25763bd9fe146f0b335"}, + {file = "onnxruntime-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:53e4e06c0a541696ebdf96085fd9390304b7b04b748a19e02cf3b35c869a1e76"}, + {file = "onnxruntime-1.17.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:40f08e378e0f85929712a2b2c9b9a9cc400a90c8a8ca741d1d92c00abec60843"}, + {file = "onnxruntime-1.17.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ac79da6d3e1bb4590f1dad4bb3c2979d7228555f92bb39820889af8b8e6bd472"}, + {file = "onnxruntime-1.17.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ae9ba47dc099004e3781f2d0814ad710a13c868c739ab086fc697524061695ea"}, + {file = "onnxruntime-1.17.1-cp312-cp312-win32.whl", hash = "sha256:2dff1a24354220ac30e4a4ce2fb1df38cb1ea59f7dac2c116238d63fe7f4c5ff"}, + {file = "onnxruntime-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:6226a5201ab8cafb15e12e72ff2a4fc8f50654e8fa5737c6f0bd57c5ff66827e"}, + {file = "onnxruntime-1.17.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:cd0c07c0d1dfb8629e820b05fda5739e4835b3b82faf43753d2998edf2cf00aa"}, + {file = "onnxruntime-1.17.1-cp38-cp38-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:617ebdf49184efa1ba6e4467e602fbfa029ed52c92f13ce3c9f417d303006381"}, + {file = "onnxruntime-1.17.1-cp38-cp38-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9dae9071e3facdf2920769dceee03b71c684b6439021defa45b830d05e148924"}, + {file = "onnxruntime-1.17.1-cp38-cp38-win32.whl", hash = "sha256:835d38fa1064841679433b1aa8138b5e1218ddf0cfa7a3ae0d056d8fd9cec713"}, + {file = "onnxruntime-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:96621e0c555c2453bf607606d08af3f70fbf6f315230c28ddea91754e17ad4e6"}, + {file = "onnxruntime-1.17.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:7a9539935fb2d78ebf2cf2693cad02d9930b0fb23cdd5cf37a7df813e977674d"}, + {file = "onnxruntime-1.17.1-cp39-cp39-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:45c6a384e9d9a29c78afff62032a46a993c477b280247a7e335df09372aedbe9"}, + {file = "onnxruntime-1.17.1-cp39-cp39-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4e19f966450f16863a1d6182a685ca33ae04d7772a76132303852d05b95411ea"}, + {file = "onnxruntime-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e2ae712d64a42aac29ed7a40a426cb1e624a08cfe9273dcfe681614aa65b07dc"}, + {file = "onnxruntime-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:f7e9f7fb049825cdddf4a923cfc7c649d84d63c0134315f8e0aa9e0c3004672c"}, +] + +[package.dependencies] +coloredlogs = "*" +flatbuffers = "*" +numpy = ">=1.21.6" +packaging = "*" +protobuf = "*" +sympy = "*" + +[[package]] +name = "openai" +version = "1.16.1" +description = "The official Python library for the openai API" +optional = false +python-versions = ">=3.7.1" +files = [ + {file = "openai-1.16.1-py3-none-any.whl", hash = "sha256:77ef3db6110071f7154859e234250fb945a36554207a30a4491092eadb73fcb5"}, + {file = "openai-1.16.1.tar.gz", hash = "sha256:58922c785d167458b46e3c76e7b1bc2306f313ee9b71791e84cbf590abe160f2"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +tqdm = ">4" +typing-extensions = ">=4.7,<5" + +[package.extras] +datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] + +[[package]] +name = "opentelemetry-api" +version = "1.24.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_api-1.24.0-py3-none-any.whl", hash = "sha256:0f2c363d98d10d1ce93330015ca7fd3a65f60be64e05e30f557c61de52c80ca2"}, + {file = "opentelemetry_api-1.24.0.tar.gz", hash = "sha256:42719f10ce7b5a9a73b10a4baf620574fb8ad495a9cbe5c18d76b75d8689c67e"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +importlib-metadata = ">=6.0,<=7.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-common" +version = "1.24.0" +description = "OpenTelemetry Protobuf encoding" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_exporter_otlp_proto_common-1.24.0-py3-none-any.whl", hash = "sha256:e51f2c9735054d598ad2df5d3eca830fecfb5b0bda0a2fa742c9c7718e12f641"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.24.0.tar.gz", hash = "sha256:5d31fa1ff976cacc38be1ec4e3279a3f88435c75b38b1f7a099a1faffc302461"}, +] + +[package.dependencies] +opentelemetry-proto = "1.24.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-grpc" +version = "1.24.0" +description = "OpenTelemetry Collector Protobuf over gRPC Exporter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_exporter_otlp_proto_grpc-1.24.0-py3-none-any.whl", hash = "sha256:f40d62aa30a0a43cc1657428e59fcf82ad5f7ea8fff75de0f9d9cb6f739e0a3b"}, + {file = "opentelemetry_exporter_otlp_proto_grpc-1.24.0.tar.gz", hash = "sha256:217c6e30634f2c9797999ea9da29f7300479a94a610139b9df17433f915e7baa"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +googleapis-common-protos = ">=1.52,<2.0" +grpcio = ">=1.0.0,<2.0.0" +opentelemetry-api = ">=1.15,<2.0" +opentelemetry-exporter-otlp-proto-common = "1.24.0" +opentelemetry-proto = "1.24.0" +opentelemetry-sdk = ">=1.24.0,<1.25.0" + +[package.extras] +test = ["pytest-grpc"] + +[[package]] +name = "opentelemetry-instrumentation" +version = "0.45b0" +description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_instrumentation-0.45b0-py3-none-any.whl", hash = "sha256:06c02e2c952c1b076e8eaedf1b82f715e2937ba7eeacab55913dd434fbcec258"}, + {file = "opentelemetry_instrumentation-0.45b0.tar.gz", hash = "sha256:6c47120a7970bbeb458e6a73686ee9ba84b106329a79e4a4a66761f933709c7e"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.4,<2.0" +setuptools = ">=16.0" +wrapt = ">=1.0.0,<2.0.0" + +[[package]] +name = "opentelemetry-instrumentation-asgi" +version = "0.45b0" +description = "ASGI instrumentation for OpenTelemetry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_instrumentation_asgi-0.45b0-py3-none-any.whl", hash = "sha256:8be1157ed62f0db24e45fdf7933c530c4338bd025c5d4af7830e903c0756021b"}, + {file = "opentelemetry_instrumentation_asgi-0.45b0.tar.gz", hash = "sha256:97f55620f163fd3d20323e9fd8dc3aacc826c03397213ff36b877e0f4b6b08a6"}, +] + +[package.dependencies] +asgiref = ">=3.0,<4.0" +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.45b0" +opentelemetry-semantic-conventions = "0.45b0" +opentelemetry-util-http = "0.45b0" + +[package.extras] +instruments = ["asgiref (>=3.0,<4.0)"] + +[[package]] +name = "opentelemetry-instrumentation-fastapi" +version = "0.45b0" +description = "OpenTelemetry FastAPI Instrumentation" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_instrumentation_fastapi-0.45b0-py3-none-any.whl", hash = "sha256:77d9c123a363129148f5f66d44094f3d67aaaa2b201396d94782b4a7f9ce4314"}, + {file = "opentelemetry_instrumentation_fastapi-0.45b0.tar.gz", hash = "sha256:5a6b91e1c08a01601845fcfcfdefd0a2aecdb3c356d4a436a3210cb58c21487e"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.45b0" +opentelemetry-instrumentation-asgi = "0.45b0" +opentelemetry-semantic-conventions = "0.45b0" +opentelemetry-util-http = "0.45b0" + +[package.extras] +instruments = ["fastapi (>=0.58,<1.0)"] + +[[package]] +name = "opentelemetry-proto" +version = "1.24.0" +description = "OpenTelemetry Python Proto" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_proto-1.24.0-py3-none-any.whl", hash = "sha256:bcb80e1e78a003040db71ccf83f2ad2019273d1e0828089d183b18a1476527ce"}, + {file = "opentelemetry_proto-1.24.0.tar.gz", hash = "sha256:ff551b8ad63c6cabb1845ce217a6709358dfaba0f75ea1fa21a61ceddc78cab8"}, +] + +[package.dependencies] +protobuf = ">=3.19,<5.0" + +[[package]] +name = "opentelemetry-sdk" +version = "1.24.0" +description = "OpenTelemetry Python SDK" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_sdk-1.24.0-py3-none-any.whl", hash = "sha256:fa731e24efe832e98bcd90902085b359dcfef7d9c9c00eb5b9a18587dae3eb59"}, + {file = "opentelemetry_sdk-1.24.0.tar.gz", hash = "sha256:75bc0563affffa827700e0f4f4a68e1e257db0df13372344aebc6f8a64cde2e5"}, +] + +[package.dependencies] +opentelemetry-api = "1.24.0" +opentelemetry-semantic-conventions = "0.45b0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.45b0" +description = "OpenTelemetry Semantic Conventions" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_semantic_conventions-0.45b0-py3-none-any.whl", hash = "sha256:a4a6fb9a7bacd9167c082aa4681009e9acdbfa28ffb2387af50c2fef3d30c864"}, + {file = "opentelemetry_semantic_conventions-0.45b0.tar.gz", hash = "sha256:7c84215a44ac846bc4b8e32d5e78935c5c43482e491812a0bb8aaf87e4d92118"}, +] + +[[package]] +name = "opentelemetry-util-http" +version = "0.45b0" +description = "Web util for OpenTelemetry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "opentelemetry_util_http-0.45b0-py3-none-any.whl", hash = "sha256:6628868b501b3004e1860f976f410eeb3d3499e009719d818000f24ce17b6e33"}, + {file = "opentelemetry_util_http-0.45b0.tar.gz", hash = "sha256:4ce08b6a7d52dd7c96b7705b5b4f06fdb6aa3eac1233b3b0bfef8a0cab9a92cd"}, +] + +[[package]] +name = "orjson" +version = "3.9.15" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.9.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d61f7ce4727a9fa7680cd6f3986b0e2c732639f46a5e0156e550e35258aa313a"}, + {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4feeb41882e8aa17634b589533baafdceb387e01e117b1ec65534ec724023d04"}, + {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fbbeb3c9b2edb5fd044b2a070f127a0ac456ffd079cb82746fc84af01ef021a4"}, + {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b66bcc5670e8a6b78f0313bcb74774c8291f6f8aeef10fe70e910b8040f3ab75"}, + {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2973474811db7b35c30248d1129c64fd2bdf40d57d84beed2a9a379a6f57d0ab"}, + {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fe41b6f72f52d3da4db524c8653e46243c8c92df826ab5ffaece2dba9cccd58"}, + {file = "orjson-3.9.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4228aace81781cc9d05a3ec3a6d2673a1ad0d8725b4e915f1089803e9efd2b99"}, + {file = "orjson-3.9.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f7b65bfaf69493c73423ce9db66cfe9138b2f9ef62897486417a8fcb0a92bfe"}, + {file = "orjson-3.9.15-cp310-none-win32.whl", hash = "sha256:2d99e3c4c13a7b0fb3792cc04c2829c9db07838fb6973e578b85c1745e7d0ce7"}, + {file = "orjson-3.9.15-cp310-none-win_amd64.whl", hash = "sha256:b725da33e6e58e4a5d27958568484aa766e825e93aa20c26c91168be58e08cbb"}, + {file = "orjson-3.9.15-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c8e8fe01e435005d4421f183038fc70ca85d2c1e490f51fb972db92af6e047c2"}, + {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87f1097acb569dde17f246faa268759a71a2cb8c96dd392cd25c668b104cad2f"}, + {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff0f9913d82e1d1fadbd976424c316fbc4d9c525c81d047bbdd16bd27dd98cfc"}, + {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8055ec598605b0077e29652ccfe9372247474375e0e3f5775c91d9434e12d6b1"}, + {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6768a327ea1ba44c9114dba5fdda4a214bdb70129065cd0807eb5f010bfcbb5"}, + {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12365576039b1a5a47df01aadb353b68223da413e2e7f98c02403061aad34bde"}, + {file = "orjson-3.9.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:71c6b009d431b3839d7c14c3af86788b3cfac41e969e3e1c22f8a6ea13139404"}, + {file = "orjson-3.9.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e18668f1bd39e69b7fed19fa7cd1cd110a121ec25439328b5c89934e6d30d357"}, + {file = "orjson-3.9.15-cp311-none-win32.whl", hash = "sha256:62482873e0289cf7313461009bf62ac8b2e54bc6f00c6fabcde785709231a5d7"}, + {file = "orjson-3.9.15-cp311-none-win_amd64.whl", hash = "sha256:b3d336ed75d17c7b1af233a6561cf421dee41d9204aa3cfcc6c9c65cd5bb69a8"}, + {file = "orjson-3.9.15-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:82425dd5c7bd3adfe4e94c78e27e2fa02971750c2b7ffba648b0f5d5cc016a73"}, + {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c51378d4a8255b2e7c1e5cc430644f0939539deddfa77f6fac7b56a9784160a"}, + {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6ae4e06be04dc00618247c4ae3f7c3e561d5bc19ab6941427f6d3722a0875ef7"}, + {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcef128f970bb63ecf9a65f7beafd9b55e3aaf0efc271a4154050fc15cdb386e"}, + {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b72758f3ffc36ca566ba98a8e7f4f373b6c17c646ff8ad9b21ad10c29186f00d"}, + {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c57bc7b946cf2efa67ac55766e41764b66d40cbd9489041e637c1304400494"}, + {file = "orjson-3.9.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:946c3a1ef25338e78107fba746f299f926db408d34553b4754e90a7de1d44068"}, + {file = "orjson-3.9.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2f256d03957075fcb5923410058982aea85455d035607486ccb847f095442bda"}, + {file = "orjson-3.9.15-cp312-none-win_amd64.whl", hash = "sha256:5bb399e1b49db120653a31463b4a7b27cf2fbfe60469546baf681d1b39f4edf2"}, + {file = "orjson-3.9.15-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b17f0f14a9c0ba55ff6279a922d1932e24b13fc218a3e968ecdbf791b3682b25"}, + {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f6cbd8e6e446fb7e4ed5bac4661a29e43f38aeecbf60c4b900b825a353276a1"}, + {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76bc6356d07c1d9f4b782813094d0caf1703b729d876ab6a676f3aaa9a47e37c"}, + {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fdfa97090e2d6f73dced247a2f2d8004ac6449df6568f30e7fa1a045767c69a6"}, + {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7413070a3e927e4207d00bd65f42d1b780fb0d32d7b1d951f6dc6ade318e1b5a"}, + {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cf1596680ac1f01839dba32d496136bdd5d8ffb858c280fa82bbfeb173bdd40"}, + {file = "orjson-3.9.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:809d653c155e2cc4fd39ad69c08fdff7f4016c355ae4b88905219d3579e31eb7"}, + {file = "orjson-3.9.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:920fa5a0c5175ab14b9c78f6f820b75804fb4984423ee4c4f1e6d748f8b22bc1"}, + {file = "orjson-3.9.15-cp38-none-win32.whl", hash = "sha256:2b5c0f532905e60cf22a511120e3719b85d9c25d0e1c2a8abb20c4dede3b05a5"}, + {file = "orjson-3.9.15-cp38-none-win_amd64.whl", hash = "sha256:67384f588f7f8daf040114337d34a5188346e3fae6c38b6a19a2fe8c663a2f9b"}, + {file = "orjson-3.9.15-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6fc2fe4647927070df3d93f561d7e588a38865ea0040027662e3e541d592811e"}, + {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34cbcd216e7af5270f2ffa63a963346845eb71e174ea530867b7443892d77180"}, + {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f541587f5c558abd93cb0de491ce99a9ef8d1ae29dd6ab4dbb5a13281ae04cbd"}, + {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92255879280ef9c3c0bcb327c5a1b8ed694c290d61a6a532458264f887f052cb"}, + {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05a1f57fb601c426635fcae9ddbe90dfc1ed42245eb4c75e4960440cac667262"}, + {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ede0bde16cc6e9b96633df1631fbcd66491d1063667f260a4f2386a098393790"}, + {file = "orjson-3.9.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e88b97ef13910e5f87bcbc4dd7979a7de9ba8702b54d3204ac587e83639c0c2b"}, + {file = "orjson-3.9.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57d5d8cf9c27f7ef6bc56a5925c7fbc76b61288ab674eb352c26ac780caa5b10"}, + {file = "orjson-3.9.15-cp39-none-win32.whl", hash = "sha256:001f4eb0ecd8e9ebd295722d0cbedf0748680fb9998d3993abaed2f40587257a"}, + {file = "orjson-3.9.15-cp39-none-win_amd64.whl", hash = "sha256:ea0b183a5fe6b2b45f3b854b0d19c4e932d6f5934ae1f723b07cf9560edd4ec7"}, + {file = "orjson-3.9.15.tar.gz", hash = "sha256:95cae920959d772f30ab36d3b25f83bb0f3be671e986c72ce22f8fa700dae061"}, +] + +[[package]] +name = "overrides" +version = "7.7.0" +description = "A decorator to automatically detect mismatch when overriding a method." +optional = false +python-versions = ">=3.6" +files = [ + {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, + {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pandas" +version = "2.2.0" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8108ee1712bb4fa2c16981fba7e68b3f6ea330277f5ca34fa8d557e986a11670"}, + {file = "pandas-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:736da9ad4033aeab51d067fc3bd69a0ba36f5a60f66a527b3d72e2030e63280a"}, + {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e0b4fc3ddceb56ec8a287313bc22abe17ab0eb184069f08fc6a9352a769b18"}, + {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20404d2adefe92aed3b38da41d0847a143a09be982a31b85bc7dd565bdba0f4e"}, + {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ea3ee3f125032bfcade3a4cf85131ed064b4f8dd23e5ce6fa16473e48ebcaf5"}, + {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9670b3ac00a387620489dfc1bca66db47a787f4e55911f1293063a78b108df1"}, + {file = "pandas-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a946f210383c7e6d16312d30b238fd508d80d927014f3b33fb5b15c2f895430"}, + {file = "pandas-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a1b438fa26b208005c997e78672f1aa8138f67002e833312e6230f3e57fa87d5"}, + {file = "pandas-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8ce2fbc8d9bf303ce54a476116165220a1fedf15985b09656b4b4275300e920b"}, + {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2707514a7bec41a4ab81f2ccce8b382961a29fbe9492eab1305bb075b2b1ff4f"}, + {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85793cbdc2d5bc32620dc8ffa715423f0c680dacacf55056ba13454a5be5de88"}, + {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cfd6c2491dc821b10c716ad6776e7ab311f7df5d16038d0b7458bc0b67dc10f3"}, + {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a146b9dcacc3123aa2b399df1a284de5f46287a4ab4fbfc237eac98a92ebcb71"}, + {file = "pandas-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbc1b53c0e1fdf16388c33c3cca160f798d38aea2978004dd3f4d3dec56454c9"}, + {file = "pandas-2.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a41d06f308a024981dcaa6c41f2f2be46a6b186b902c94c2674e8cb5c42985bc"}, + {file = "pandas-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:159205c99d7a5ce89ecfc37cb08ed179de7783737cea403b295b5eda8e9c56d1"}, + {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1e1f3861ea9132b32f2133788f3b14911b68102d562715d71bd0013bc45440"}, + {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:761cb99b42a69005dec2b08854fb1d4888fdf7b05db23a8c5a099e4b886a2106"}, + {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a20628faaf444da122b2a64b1e5360cde100ee6283ae8effa0d8745153809a2e"}, + {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f5be5d03ea2073627e7111f61b9f1f0d9625dc3c4d8dda72cc827b0c58a1d042"}, + {file = "pandas-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:a626795722d893ed6aacb64d2401d017ddc8a2341b49e0384ab9bf7112bdec30"}, + {file = "pandas-2.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9f66419d4a41132eb7e9a73dcec9486cf5019f52d90dd35547af11bc58f8637d"}, + {file = "pandas-2.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57abcaeda83fb80d447f28ab0cc7b32b13978f6f733875ebd1ed14f8fbc0f4ab"}, + {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60f1f7dba3c2d5ca159e18c46a34e7ca7247a73b5dd1a22b6d59707ed6b899a"}, + {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb61dc8567b798b969bcc1fc964788f5a68214d333cade8319c7ab33e2b5d88a"}, + {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:52826b5f4ed658fa2b729264d63f6732b8b29949c7fd234510d57c61dbeadfcd"}, + {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bde2bc699dbd80d7bc7f9cab1e23a95c4375de615860ca089f34e7c64f4a8de7"}, + {file = "pandas-2.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:3de918a754bbf2da2381e8a3dcc45eede8cd7775b047b923f9006d5f876802ae"}, + {file = "pandas-2.2.0.tar.gz", hash = "sha256:30b83f7c3eb217fb4d1b494a57a2fda5444f17834f5df2de6b2ffff68dc3c8e2"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2,<2", markers = "python_version == \"3.11\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "pandas-stubs" +version = "2.2.1.240316" +description = "Type annotations for pandas" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas_stubs-2.2.1.240316-py3-none-any.whl", hash = "sha256:0126a26451a37cb893ea62357ca87ba3d181bd999ec8ba2ca5602e20207d6682"}, + {file = "pandas_stubs-2.2.1.240316.tar.gz", hash = "sha256:236a4f812fb6b1922e9607ff09e427f6d8540c421c9e5a40e3e4ddf7adac7f05"}, +] + +[package.dependencies] +numpy = {version = ">=1.26.0", markers = "python_version < \"3.13\""} +types-pytz = ">=2022.1.1" + +[[package]] +name = "parso" +version = "0.8.3" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +files = [ + {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, + {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, +] + +[package.extras] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["docopt", "pytest (<6.0.0)"] + +[[package]] +name = "passlib" +version = "1.7.4" +description = "comprehensive password hashing framework supporting over 30 schemes" +optional = false +python-versions = "*" +files = [ + {file = "passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1"}, + {file = "passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04"}, +] + +[package.extras] +argon2 = ["argon2-cffi (>=18.2.0)"] +bcrypt = ["bcrypt (>=3.1.0)"] +build-docs = ["cloud-sptheme (>=1.10.1)", "sphinx (>=1.6)", "sphinxcontrib-fulltoc (>=1.2.0)"] +totp = ["cryptography"] + +[[package]] +name = "pexpect" +version = "4.9.0" +description = "Pexpect allows easy control of interactive console applications." +optional = false +python-versions = "*" +files = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pillow" +version = "10.3.0" +description = "Python Imaging Library (Fork)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, + {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, + {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, + {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, + {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, + {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, + {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, + {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, + {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, + {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, + {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, + {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, + {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, + {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, + {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, + {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, + {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, + {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, + {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, + {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, + {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, + {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, + {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, + {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, + {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, + {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, + {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, + {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, + {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, + {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, + {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, + {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, + {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "posthog" +version = "3.5.0" +description = "Integrate PostHog into any python application." +optional = false +python-versions = "*" +files = [ + {file = "posthog-3.5.0-py2.py3-none-any.whl", hash = "sha256:3c672be7ba6f95d555ea207d4486c171d06657eb34b3ce25eb043bfe7b6b5b76"}, + {file = "posthog-3.5.0.tar.gz", hash = "sha256:8f7e3b2c6e8714d0c0c542a2109b83a7549f63b7113a133ab2763a89245ef2ef"}, +] + +[package.dependencies] +backoff = ">=1.10.0" +monotonic = ">=1.5" +python-dateutil = ">2.1" +requests = ">=2.7,<3.0" +six = ">=1.5" + +[package.extras] +dev = ["black", "flake8", "flake8-print", "isort", "pre-commit"] +sentry = ["django", "sentry-sdk"] +test = ["coverage", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)", "pylint", "pytest", "pytest-timeout"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.43" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, + {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "protobuf" +version = "4.25.3" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, + {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, + {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, + {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, + {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, + {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, + {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, + {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, + {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, +] + +[[package]] +name = "psutil" +version = "5.9.8" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, + {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, + {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, + {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, + {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, + {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, + {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, + {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, + {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, + {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pulsar-client" +version = "3.4.0" +description = "Apache Pulsar Python client library" +optional = false +python-versions = "*" +files = [ + {file = "pulsar_client-3.4.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ebf99db5244ff69479283b25621b070492acc4bb643d162d86b90387cb6fdb2a"}, + {file = "pulsar_client-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6cb5d8e1482a8aea758633be23717e0c4bb7dc53784e37915c0048c0382f134"}, + {file = "pulsar_client-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b30a7592e42c76034e9a8d64d42dd5bab361425f869de562e9ccad698e19cd88"}, + {file = "pulsar_client-3.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5963090a78a5644ba25f41da3a6d49ea3f00c972b095baff365916dc246426a"}, + {file = "pulsar_client-3.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:419cdcf577f755e3f31bf264300d9ba158325edb2ee9cee555d81ba1909c094e"}, + {file = "pulsar_client-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:4c93c35ee97307dae153e748b33dcd3d4f06da34bca373321aa2df73f1535705"}, + {file = "pulsar_client-3.4.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:11952fb022ee72debf53b169f4482f9dc5c890be0149ae98779864b3a21f1bd3"}, + {file = "pulsar_client-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8743c320aa96798d20cafa98ea97a68c4295fc4872c23acd5e012fd36cb06ba"}, + {file = "pulsar_client-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33571de99cd898349f17978ba62e2b839ea0275fb7067f31bf5f6ebfeae0987d"}, + {file = "pulsar_client-3.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a60c03c3e70f018538e7cd3fa84d95e283b610272b744166dbc48960a809fa07"}, + {file = "pulsar_client-3.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c47041267b5843ffec54352d842156c279945f3e976d7025ffa89875ff76390"}, + {file = "pulsar_client-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:49fe4ab04004b476c87ab3ad22fe87346fca564a3e3ca9c0ac58fee45a895d81"}, + {file = "pulsar_client-3.4.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:1e077a4839be3ead3de3f05b4c244269dca2df07f47cea0b90544c7e9dc1642f"}, + {file = "pulsar_client-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f202b84e1f683d64672dd1971114600ae2e5c3735587286ff9bfb431385f08e8"}, + {file = "pulsar_client-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c606c04f357341042fa6c75477de7d2204f7ae50aa29c2f74b24e54c85f47f96"}, + {file = "pulsar_client-3.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c67b25ede3a578f5a7dc30230e52609ef38191f74b47e5cbdbc98c42df556927"}, + {file = "pulsar_client-3.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b7f8211cc9460cdf4d06e4e1cb878689d2aa4a7e4027bd2a2f1419a79ade16a6"}, + {file = "pulsar_client-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:c5399e9780d6951c69808c0b6175311a966af82fb08addf6e741ae37b1bee7ef"}, + {file = "pulsar_client-3.4.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:a2d6c850b60106dc915d3476a490fba547c6748a5f742b68abd30d1a35355b82"}, + {file = "pulsar_client-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a52ea8294a9f30eb6f0a2db5dc16e3aad7ff2284f818c48ad3a6b601723be02b"}, + {file = "pulsar_client-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1eeeede40108be12222e009285c971e5b8f6433d9f0f8ef934d6a131585921c4"}, + {file = "pulsar_client-3.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9409066c600f2b6f220552c5dfe08aeeabcf07fe0e76367aa5816b2e87a5cf72"}, + {file = "pulsar_client-3.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:58e2f886e6dab43e66c3ce990fe96209e55ab46350506829a637b77b74125fb9"}, + {file = "pulsar_client-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:b57dfa5063b0d9dc7664896c55605eac90753e35e80db5a959d3be2be0ab0d48"}, + {file = "pulsar_client-3.4.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:7704c664aa2c801af4c2d3a58e9d8ffaeef12ce8a0f71712e9187f9a96da856f"}, + {file = "pulsar_client-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0364db563e27442053bdbb8655e7ffb420f491690bc2c78da5a58bd35c658ad"}, + {file = "pulsar_client-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3e34de19e0744d8aa3538cb2172076bccd0761b3e94ebadb7bd59765ae3d1ed"}, + {file = "pulsar_client-3.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:dc8be41dec8cb052fb1837550f495e9b73a8b3cf85e07157904ec84832758a65"}, + {file = "pulsar_client-3.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b49d669bed15b7edb9c936704310d57808f1d01c511b94d866f54fe8ffe1752d"}, + {file = "pulsar_client-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:88c93e5fbfc349f3967e931f7a908d15fd4fd725ebdd842423ac9cd961fe293f"}, +] + +[package.dependencies] +certifi = "*" + +[package.extras] +all = ["apache-bookkeeper-client (>=4.16.1)", "fastavro (>=1.9.2)", "grpcio (>=1.60.0)", "prometheus-client", "protobuf (>=3.6.1,<=3.20.3)", "ratelimit"] +avro = ["fastavro (>=1.9.2)"] +functions = ["apache-bookkeeper-client (>=4.16.1)", "grpcio (>=1.60.0)", "prometheus-client", "protobuf (>=3.6.1,<=3.20.3)", "ratelimit"] + +[[package]] +name = "pure-eval" +version = "0.2.2" +description = "Safely evaluate AST nodes without side effects" +optional = false +python-versions = "*" +files = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "pyasn1" +version = "0.6.0" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, + {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.0" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, + {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, +] + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.7.0" + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pydantic" +version = "2.6.4" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"}, + {file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.16.3" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.16.3" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, + {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, + {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, + {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, + {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, + {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, + {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, + {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, + {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, + {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, + {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, + {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, + {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, + {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydantic-settings" +version = "2.2.1" +description = "Settings management using Pydantic" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_settings-2.2.1-py3-none-any.whl", hash = "sha256:0235391d26db4d2190cb9b31051c4b46882d28a51533f97440867f012d4da091"}, + {file = "pydantic_settings-2.2.1.tar.gz", hash = "sha256:00b9f6a5e95553590434c0fa01ead0b216c3e10bc54ae02e37f359948643c5ed"}, +] + +[package.dependencies] +pydantic = ">=2.3.0" +python-dotenv = ">=0.21.0" + +[package.extras] +toml = ["tomli (>=2.0.1)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pypdf" +version = "4.1.0" +description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pypdf-4.1.0-py3-none-any.whl", hash = "sha256:16cac912a05200099cef3f347c4c7e0aaf0a6d027603b8f9a973c0ea500dff89"}, + {file = "pypdf-4.1.0.tar.gz", hash = "sha256:01c3257ec908676efd60a4537e525b89d48e0852bc92b4e0aa4cc646feda17cc"}, +] + +[package.extras] +crypto = ["PyCryptodome", "cryptography"] +dev = ["black", "flit", "pip-tools", "pre-commit (<2.18.0)", "pytest-cov", "pytest-socket", "pytest-timeout", "pytest-xdist", "wheel"] +docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"] +full = ["Pillow (>=8.0.0)", "PyCryptodome", "cryptography"] +image = ["Pillow (>=8.0.0)"] + +[[package]] +name = "pypika" +version = "0.48.9" +description = "A SQL query builder API for Python" +optional = false +python-versions = "*" +files = [ + {file = "PyPika-0.48.9.tar.gz", hash = "sha256:838836a61747e7c8380cd1b7ff638694b7a7335345d0f559b04b2cd832ad5378"}, +] + +[[package]] +name = "pyproject-hooks" +version = "1.0.0" +description = "Wrappers to call pyproject.toml-based build backend hooks." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyproject_hooks-1.0.0-py3-none-any.whl", hash = "sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8"}, + {file = "pyproject_hooks-1.0.0.tar.gz", hash = "sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5"}, +] + +[package.dependencies] +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} + +[[package]] +name = "pyreadline3" +version = "3.4.1" +description = "A python implementation of GNU readline." +optional = false +python-versions = "*" +files = [ + {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"}, + {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, +] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.21.1" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, + {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] + +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "pytest-sugar" +version = "0.9.7" +description = "pytest-sugar is a plugin for pytest that changes the default look and feel of pytest (e.g. progressbar, show tests that fail instantly)." +optional = false +python-versions = "*" +files = [ + {file = "pytest-sugar-0.9.7.tar.gz", hash = "sha256:f1e74c1abfa55f7241cf7088032b6e378566f16b938f3f08905e2cf4494edd46"}, + {file = "pytest_sugar-0.9.7-py2.py3-none-any.whl", hash = "sha256:8cb5a4e5f8bbcd834622b0235db9e50432f4cbd71fef55b467fe44e43701e062"}, +] + +[package.dependencies] +packaging = ">=21.3" +pytest = ">=6.2.0" +termcolor = ">=2.1.0" + +[package.extras] +dev = ["black", "flake8", "pre-commit"] + +[[package]] +name = "pytest-xdist" +version = "3.5.0" +description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-xdist-3.5.0.tar.gz", hash = "sha256:cbb36f3d67e0c478baa57fa4edc8843887e0f6cfc42d677530a36d7472b32d8a"}, + {file = "pytest_xdist-3.5.0-py3-none-any.whl", hash = "sha256:d075629c7e00b611df89f490a5063944bee7a4362a5ff11c7cc7824a03dfce24"}, +] + +[package.dependencies] +execnet = ">=1.1" +pytest = ">=6.2.0" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-docx" +version = "1.1.0" +description = "Create, read, and update Microsoft Word .docx files." +optional = false +python-versions = ">=3.7" +files = [ + {file = "python-docx-1.1.0.tar.gz", hash = "sha256:5829b722141cf1ab79aedf0c34d9fe9924b29764584c0f2164eb2b02dcdf17c9"}, + {file = "python_docx-1.1.0-py3-none-any.whl", hash = "sha256:bac9773278098a1ddc43a52d84e22f5909c4a3080a624530b3ecb3771b07c6cd"}, +] + +[package.dependencies] +lxml = ">=3.1.0" +typing-extensions = "*" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-engineio" +version = "4.9.0" +description = "Engine.IO server and client for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "python-engineio-4.9.0.tar.gz", hash = "sha256:e87459c15638e567711fd156e6f9c4a402668871bed79523f0ecfec744729ec7"}, + {file = "python_engineio-4.9.0-py3-none-any.whl", hash = "sha256:979859bff770725b75e60353d7ae53b397e8b517d05ba76733b404a3dcca3e4c"}, +] + +[package.dependencies] +simple-websocket = ">=0.10.0" + +[package.extras] +asyncio-client = ["aiohttp (>=3.4)"] +client = ["requests (>=2.21.0)", "websocket-client (>=0.54.0)"] +docs = ["sphinx"] + +[[package]] +name = "python-jose" +version = "3.3.0" +description = "JOSE implementation in Python" +optional = false +python-versions = "*" +files = [ + {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, + {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, +] + +[package.dependencies] +ecdsa = "!=0.15" +pyasn1 = "*" +rsa = "*" + +[package.extras] +cryptography = ["cryptography (>=3.4.0)"] +pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] +pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] + +[[package]] +name = "python-multipart" +version = "0.0.7" +description = "A streaming multipart parser for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "python_multipart-0.0.7-py3-none-any.whl", hash = "sha256:b1fef9a53b74c795e2347daac8c54b252d9e0df9c619712691c1cc8021bd3c49"}, + {file = "python_multipart-0.0.7.tar.gz", hash = "sha256:288a6c39b06596c1b988bb6794c6fbc80e6c369e35e5062637df256bee0c9af9"}, +] + +[package.extras] +dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatch", "invoke (==2.2.0)", "more-itertools (==4.3.0)", "pbr (==4.3.0)", "pluggy (==1.0.0)", "py (==1.11.0)", "pytest (==7.2.0)", "pytest-cov (==4.0.0)", "pytest-timeout (==2.1.0)", "pyyaml (==5.1)"] + +[[package]] +name = "python-socketio" +version = "5.11.2" +description = "Socket.IO server and client for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-socketio-5.11.2.tar.gz", hash = "sha256:ae6a1de5c5209ca859dc574dccc8931c4be17ee003e74ce3b8d1306162bb4a37"}, + {file = "python_socketio-5.11.2-py3-none-any.whl", hash = "sha256:b9f22a8ff762d7a6e123d16a43ddb1a27d50f07c3c88ea999334f2f89b0ad52b"}, +] + +[package.dependencies] +bidict = ">=0.21.0" +python-engineio = ">=4.8.0" + +[package.extras] +asyncio-client = ["aiohttp (>=3.4)"] +client = ["requests (>=2.21.0)", "websocket-client (>=0.54.0)"] +docs = ["sphinx"] + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "pyzmq" +version = "25.1.2" +description = "Python bindings for 0MQ" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:e624c789359f1a16f83f35e2c705d07663ff2b4d4479bad35621178d8f0f6ea4"}, + {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:49151b0efece79f6a79d41a461d78535356136ee70084a1c22532fc6383f4ad0"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9a5f194cf730f2b24d6af1f833c14c10f41023da46a7f736f48b6d35061e76e"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:faf79a302f834d9e8304fafdc11d0d042266667ac45209afa57e5efc998e3872"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f51a7b4ead28d3fca8dda53216314a553b0f7a91ee8fc46a72b402a78c3e43d"}, + {file = "pyzmq-25.1.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0ddd6d71d4ef17ba5a87becf7ddf01b371eaba553c603477679ae817a8d84d75"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:246747b88917e4867e2367b005fc8eefbb4a54b7db363d6c92f89d69abfff4b6"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:00c48ae2fd81e2a50c3485de1b9d5c7c57cd85dc8ec55683eac16846e57ac979"}, + {file = "pyzmq-25.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a68d491fc20762b630e5db2191dd07ff89834086740f70e978bb2ef2668be08"}, + {file = "pyzmq-25.1.2-cp310-cp310-win32.whl", hash = "sha256:09dfe949e83087da88c4a76767df04b22304a682d6154de2c572625c62ad6886"}, + {file = "pyzmq-25.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:fa99973d2ed20417744fca0073390ad65ce225b546febb0580358e36aa90dba6"}, + {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:82544e0e2d0c1811482d37eef297020a040c32e0687c1f6fc23a75b75db8062c"}, + {file = "pyzmq-25.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:01171fc48542348cd1a360a4b6c3e7d8f46cdcf53a8d40f84db6707a6768acc1"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc69c96735ab501419c432110016329bf0dea8898ce16fab97c6d9106dc0b348"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e124e6b1dd3dfbeb695435dff0e383256655bb18082e094a8dd1f6293114642"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7598d2ba821caa37a0f9d54c25164a4fa351ce019d64d0b44b45540950458840"}, + {file = "pyzmq-25.1.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d1299d7e964c13607efd148ca1f07dcbf27c3ab9e125d1d0ae1d580a1682399d"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4e6f689880d5ad87918430957297c975203a082d9a036cc426648fcbedae769b"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cc69949484171cc961e6ecd4a8911b9ce7a0d1f738fcae717177c231bf77437b"}, + {file = "pyzmq-25.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9880078f683466b7f567b8624bfc16cad65077be046b6e8abb53bed4eeb82dd3"}, + {file = "pyzmq-25.1.2-cp311-cp311-win32.whl", hash = "sha256:4e5837af3e5aaa99a091302df5ee001149baff06ad22b722d34e30df5f0d9097"}, + {file = "pyzmq-25.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:25c2dbb97d38b5ac9fd15586e048ec5eb1e38f3d47fe7d92167b0c77bb3584e9"}, + {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:11e70516688190e9c2db14fcf93c04192b02d457b582a1f6190b154691b4c93a"}, + {file = "pyzmq-25.1.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:313c3794d650d1fccaaab2df942af9f2c01d6217c846177cfcbc693c7410839e"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b3cbba2f47062b85fe0ef9de5b987612140a9ba3a9c6d2543c6dec9f7c2ab27"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc31baa0c32a2ca660784d5af3b9487e13b61b3032cb01a115fce6588e1bed30"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c9087b109070c5ab0b383079fa1b5f797f8d43e9a66c07a4b8b8bdecfd88ee"}, + {file = "pyzmq-25.1.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:f8429b17cbb746c3e043cb986328da023657e79d5ed258b711c06a70c2ea7537"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5074adeacede5f810b7ef39607ee59d94e948b4fd954495bdb072f8c54558181"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7ae8f354b895cbd85212da245f1a5ad8159e7840e37d78b476bb4f4c3f32a9fe"}, + {file = "pyzmq-25.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b264bf2cc96b5bc43ce0e852be995e400376bd87ceb363822e2cb1964fcdc737"}, + {file = "pyzmq-25.1.2-cp312-cp312-win32.whl", hash = "sha256:02bbc1a87b76e04fd780b45e7f695471ae6de747769e540da909173d50ff8e2d"}, + {file = "pyzmq-25.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:ced111c2e81506abd1dc142e6cd7b68dd53747b3b7ae5edbea4578c5eeff96b7"}, + {file = "pyzmq-25.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:7b6d09a8962a91151f0976008eb7b29b433a560fde056ec7a3db9ec8f1075438"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967668420f36878a3c9ecb5ab33c9d0ff8d054f9c0233d995a6d25b0e95e1b6b"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5edac3f57c7ddaacdb4d40f6ef2f9e299471fc38d112f4bc6d60ab9365445fb0"}, + {file = "pyzmq-25.1.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0dabfb10ef897f3b7e101cacba1437bd3a5032ee667b7ead32bbcdd1a8422fe7"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:2c6441e0398c2baacfe5ba30c937d274cfc2dc5b55e82e3749e333aabffde561"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:16b726c1f6c2e7625706549f9dbe9b06004dfbec30dbed4bf50cbdfc73e5b32a"}, + {file = "pyzmq-25.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a86c2dd76ef71a773e70551a07318b8e52379f58dafa7ae1e0a4be78efd1ff16"}, + {file = "pyzmq-25.1.2-cp36-cp36m-win32.whl", hash = "sha256:359f7f74b5d3c65dae137f33eb2bcfa7ad9ebefd1cab85c935f063f1dbb245cc"}, + {file = "pyzmq-25.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:55875492f820d0eb3417b51d96fea549cde77893ae3790fd25491c5754ea2f68"}, + {file = "pyzmq-25.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b8c8a419dfb02e91b453615c69568442e897aaf77561ee0064d789705ff37a92"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8807c87fa893527ae8a524c15fc505d9950d5e856f03dae5921b5e9aa3b8783b"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5e319ed7d6b8f5fad9b76daa0a68497bc6f129858ad956331a5835785761e003"}, + {file = "pyzmq-25.1.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3c53687dde4d9d473c587ae80cc328e5b102b517447456184b485587ebd18b62"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9add2e5b33d2cd765ad96d5eb734a5e795a0755f7fc49aa04f76d7ddda73fd70"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e690145a8c0c273c28d3b89d6fb32c45e0d9605b2293c10e650265bf5c11cfec"}, + {file = "pyzmq-25.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00a06faa7165634f0cac1abb27e54d7a0b3b44eb9994530b8ec73cf52e15353b"}, + {file = "pyzmq-25.1.2-cp37-cp37m-win32.whl", hash = "sha256:0f97bc2f1f13cb16905a5f3e1fbdf100e712d841482b2237484360f8bc4cb3d7"}, + {file = "pyzmq-25.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6cc0020b74b2e410287e5942e1e10886ff81ac77789eb20bec13f7ae681f0fdd"}, + {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:bef02cfcbded83473bdd86dd8d3729cd82b2e569b75844fb4ea08fee3c26ae41"}, + {file = "pyzmq-25.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e10a4b5a4b1192d74853cc71a5e9fd022594573926c2a3a4802020360aa719d8"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8c5f80e578427d4695adac6fdf4370c14a2feafdc8cb35549c219b90652536ae"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5dde6751e857910c1339890f3524de74007958557593b9e7e8c5f01cd919f8a7"}, + {file = "pyzmq-25.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea1608dd169da230a0ad602d5b1ebd39807ac96cae1845c3ceed39af08a5c6df"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0f513130c4c361201da9bc69df25a086487250e16b5571ead521b31ff6b02220"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:019744b99da30330798bb37df33549d59d380c78e516e3bab9c9b84f87a9592f"}, + {file = "pyzmq-25.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2e2713ef44be5d52dd8b8e2023d706bf66cb22072e97fc71b168e01d25192755"}, + {file = "pyzmq-25.1.2-cp38-cp38-win32.whl", hash = "sha256:07cd61a20a535524906595e09344505a9bd46f1da7a07e504b315d41cd42eb07"}, + {file = "pyzmq-25.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb7e49a17fb8c77d3119d41a4523e432eb0c6932187c37deb6fbb00cc3028088"}, + {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:94504ff66f278ab4b7e03e4cba7e7e400cb73bfa9d3d71f58d8972a8dc67e7a6"}, + {file = "pyzmq-25.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6dd0d50bbf9dca1d0bdea219ae6b40f713a3fb477c06ca3714f208fd69e16fd8"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:004ff469d21e86f0ef0369717351073e0e577428e514c47c8480770d5e24a565"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c0b5ca88a8928147b7b1e2dfa09f3b6c256bc1135a1338536cbc9ea13d3b7add"}, + {file = "pyzmq-25.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c9a79f1d2495b167119d02be7448bfba57fad2a4207c4f68abc0bab4b92925b"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:518efd91c3d8ac9f9b4f7dd0e2b7b8bf1a4fe82a308009016b07eaa48681af82"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1ec23bd7b3a893ae676d0e54ad47d18064e6c5ae1fadc2f195143fb27373f7f6"}, + {file = "pyzmq-25.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db36c27baed588a5a8346b971477b718fdc66cf5b80cbfbd914b4d6d355e44e2"}, + {file = "pyzmq-25.1.2-cp39-cp39-win32.whl", hash = "sha256:39b1067f13aba39d794a24761e385e2eddc26295826530a8c7b6c6c341584289"}, + {file = "pyzmq-25.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:8e9f3fabc445d0ce320ea2c59a75fe3ea591fdbdeebec5db6de530dd4b09412e"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a8c1d566344aee826b74e472e16edae0a02e2a044f14f7c24e123002dcff1c05"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:759cfd391a0996345ba94b6a5110fca9c557ad4166d86a6e81ea526c376a01e8"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c61e346ac34b74028ede1c6b4bcecf649d69b707b3ff9dc0fab453821b04d1e"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cb8fc1f8d69b411b8ec0b5f1ffbcaf14c1db95b6bccea21d83610987435f1a4"}, + {file = "pyzmq-25.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3c00c9b7d1ca8165c610437ca0c92e7b5607b2f9076f4eb4b095c85d6e680a1d"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:df0c7a16ebb94452d2909b9a7b3337940e9a87a824c4fc1c7c36bb4404cb0cde"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:45999e7f7ed5c390f2e87ece7f6c56bf979fb213550229e711e45ecc7d42ccb8"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ac170e9e048b40c605358667aca3d94e98f604a18c44bdb4c102e67070f3ac9b"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1b604734bec94f05f81b360a272fc824334267426ae9905ff32dc2be433ab96"}, + {file = "pyzmq-25.1.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:a793ac733e3d895d96f865f1806f160696422554e46d30105807fdc9841b9f7d"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0806175f2ae5ad4b835ecd87f5f85583316b69f17e97786f7443baaf54b9bb98"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ef12e259e7bc317c7597d4f6ef59b97b913e162d83b421dd0db3d6410f17a244"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea253b368eb41116011add00f8d5726762320b1bda892f744c91997b65754d73"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b9b1f2ad6498445a941d9a4fee096d387fee436e45cc660e72e768d3d8ee611"}, + {file = "pyzmq-25.1.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8b14c75979ce932c53b79976a395cb2a8cd3aaf14aef75e8c2cb55a330b9b49d"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:889370d5174a741a62566c003ee8ddba4b04c3f09a97b8000092b7ca83ec9c49"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a18fff090441a40ffda8a7f4f18f03dc56ae73f148f1832e109f9bffa85df15"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99a6b36f95c98839ad98f8c553d8507644c880cf1e0a57fe5e3a3f3969040882"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4345c9a27f4310afbb9c01750e9461ff33d6fb74cd2456b107525bbeebcb5be3"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3516e0b6224cf6e43e341d56da15fd33bdc37fa0c06af4f029f7d7dfceceabbc"}, + {file = "pyzmq-25.1.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:146b9b1f29ead41255387fb07be56dc29639262c0f7344f570eecdcd8d683314"}, + {file = "pyzmq-25.1.2.tar.gz", hash = "sha256:93f1aa311e8bb912e34f004cf186407a4e90eec4f0ecc0efd26056bf7eda0226"}, +] + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name == \"pypy\""} + +[[package]] +name = "regex" +version = "2023.12.25" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.7" +files = [ + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0694219a1d54336fd0445ea382d49d36882415c0134ee1e8332afd1529f0baa5"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b014333bd0217ad3d54c143de9d4b9a3ca1c5a29a6d0d554952ea071cff0f1f8"}, + {file = "regex-2023.12.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d865984b3f71f6d0af64d0d88f5733521698f6c16f445bb09ce746c92c97c586"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e0eabac536b4cc7f57a5f3d095bfa557860ab912f25965e08fe1545e2ed8b4c"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25a8ad70e716f96e13a637802813f65d8a6760ef48672aa3502f4c24ea8b400"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9b6d73353f777630626f403b0652055ebfe8ff142a44ec2cf18ae470395766e"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9cc99d6946d750eb75827cb53c4371b8b0fe89c733a94b1573c9dd16ea6c9e4"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88d1f7bef20c721359d8675f7d9f8e414ec5003d8f642fdfd8087777ff7f94b5"}, + {file = "regex-2023.12.25-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cb3fe77aec8f1995611f966d0c656fdce398317f850d0e6e7aebdfe61f40e1cd"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7aa47c2e9ea33a4a2a05f40fcd3ea36d73853a2aae7b4feab6fc85f8bf2c9704"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df26481f0c7a3f8739fecb3e81bc9da3fcfae34d6c094563b9d4670b047312e1"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c40281f7d70baf6e0db0c2f7472b31609f5bc2748fe7275ea65a0b4601d9b392"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d94a1db462d5690ebf6ae86d11c5e420042b9898af5dcf278bd97d6bda065423"}, + {file = "regex-2023.12.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba1b30765a55acf15dce3f364e4928b80858fa8f979ad41f862358939bdd1f2f"}, + {file = "regex-2023.12.25-cp310-cp310-win32.whl", hash = "sha256:150c39f5b964e4d7dba46a7962a088fbc91f06e606f023ce57bb347a3b2d4630"}, + {file = "regex-2023.12.25-cp310-cp310-win_amd64.whl", hash = "sha256:09da66917262d9481c719599116c7dc0c321ffcec4b1f510c4f8a066f8768105"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1b9d811f72210fa9306aeb88385b8f8bcef0dfbf3873410413c00aa94c56c2b6"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d902a43085a308cef32c0d3aea962524b725403fd9373dea18110904003bac97"}, + {file = "regex-2023.12.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d166eafc19f4718df38887b2bbe1467a4f74a9830e8605089ea7a30dd4da8887"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7ad32824b7f02bb3c9f80306d405a1d9b7bb89362d68b3c5a9be53836caebdb"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:636ba0a77de609d6510235b7f0e77ec494d2657108f777e8765efc060094c98c"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fda75704357805eb953a3ee15a2b240694a9a514548cd49b3c5124b4e2ad01b"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f72cbae7f6b01591f90814250e636065850c5926751af02bb48da94dfced7baa"}, + {file = "regex-2023.12.25-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db2a0b1857f18b11e3b0e54ddfefc96af46b0896fb678c85f63fb8c37518b3e7"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7502534e55c7c36c0978c91ba6f61703faf7ce733715ca48f499d3dbbd7657e0"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e8c7e08bb566de4faaf11984af13f6bcf6a08f327b13631d41d62592681d24fe"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:283fc8eed679758de38fe493b7d7d84a198b558942b03f017b1f94dda8efae80"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f44dd4d68697559d007462b0a3a1d9acd61d97072b71f6d1968daef26bc744bd"}, + {file = "regex-2023.12.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:67d3ccfc590e5e7197750fcb3a2915b416a53e2de847a728cfa60141054123d4"}, + {file = "regex-2023.12.25-cp311-cp311-win32.whl", hash = "sha256:68191f80a9bad283432385961d9efe09d783bcd36ed35a60fb1ff3f1ec2efe87"}, + {file = "regex-2023.12.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d2af3f6b8419661a0c421584cfe8aaec1c0e435ce7e47ee2a97e344b98f794f"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8a0ccf52bb37d1a700375a6b395bff5dd15c50acb745f7db30415bae3c2b0715"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c3c4a78615b7762740531c27cf46e2f388d8d727d0c0c739e72048beb26c8a9d"}, + {file = "regex-2023.12.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ad83e7545b4ab69216cef4cc47e344d19622e28aabec61574b20257c65466d6a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7a635871143661feccce3979e1727c4e094f2bdfd3ec4b90dfd4f16f571a87a"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d498eea3f581fbe1b34b59c697512a8baef88212f92e4c7830fcc1499f5b45a5"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43f7cd5754d02a56ae4ebb91b33461dc67be8e3e0153f593c509e21d219c5060"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51f4b32f793812714fd5307222a7f77e739b9bc566dc94a18126aba3b92b98a3"}, + {file = "regex-2023.12.25-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba99d8077424501b9616b43a2d208095746fb1284fc5ba490139651f971d39d9"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4bfc2b16e3ba8850e0e262467275dd4d62f0d045e0e9eda2bc65078c0110a11f"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8c2c19dae8a3eb0ea45a8448356ed561be843b13cbc34b840922ddf565498c1c"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:60080bb3d8617d96f0fb7e19796384cc2467447ef1c491694850ebd3670bc457"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b77e27b79448e34c2c51c09836033056a0547aa360c45eeeb67803da7b0eedaf"}, + {file = "regex-2023.12.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:518440c991f514331f4850a63560321f833979d145d7d81186dbe2f19e27ae3d"}, + {file = "regex-2023.12.25-cp312-cp312-win32.whl", hash = "sha256:e2610e9406d3b0073636a3a2e80db05a02f0c3169b5632022b4e81c0364bcda5"}, + {file = "regex-2023.12.25-cp312-cp312-win_amd64.whl", hash = "sha256:cc37b9aeebab425f11f27e5e9e6cf580be7206c6582a64467a14dda211abc232"}, + {file = "regex-2023.12.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:da695d75ac97cb1cd725adac136d25ca687da4536154cdc2815f576e4da11c69"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d126361607b33c4eb7b36debc173bf25d7805847346dd4d99b5499e1fef52bc7"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4719bb05094d7d8563a450cf8738d2e1061420f79cfcc1fa7f0a44744c4d8f73"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dd58946bce44b53b06d94aa95560d0b243eb2fe64227cba50017a8d8b3cd3e2"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22a86d9fff2009302c440b9d799ef2fe322416d2d58fc124b926aa89365ec482"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2aae8101919e8aa05ecfe6322b278f41ce2994c4a430303c4cd163fef746e04f"}, + {file = "regex-2023.12.25-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e692296c4cc2873967771345a876bcfc1c547e8dd695c6b89342488b0ea55cd8"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:263ef5cc10979837f243950637fffb06e8daed7f1ac1e39d5910fd29929e489a"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d6f7e255e5fa94642a0724e35406e6cb7001c09d476ab5fce002f652b36d0c39"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:88ad44e220e22b63b0f8f81f007e8abbb92874d8ced66f32571ef8beb0643b2b"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:3a17d3ede18f9cedcbe23d2daa8a2cd6f59fe2bf082c567e43083bba3fb00347"}, + {file = "regex-2023.12.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d15b274f9e15b1a0b7a45d2ac86d1f634d983ca40d6b886721626c47a400bf39"}, + {file = "regex-2023.12.25-cp37-cp37m-win32.whl", hash = "sha256:ed19b3a05ae0c97dd8f75a5d8f21f7723a8c33bbc555da6bbe1f96c470139d3c"}, + {file = "regex-2023.12.25-cp37-cp37m-win_amd64.whl", hash = "sha256:a6d1047952c0b8104a1d371f88f4ab62e6275567d4458c1e26e9627ad489b445"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b43523d7bc2abd757119dbfb38af91b5735eea45537ec6ec3a5ec3f9562a1c53"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:efb2d82f33b2212898f1659fb1c2e9ac30493ac41e4d53123da374c3b5541e64"}, + {file = "regex-2023.12.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7fca9205b59c1a3d5031f7e64ed627a1074730a51c2a80e97653e3e9fa0d415"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086dd15e9435b393ae06f96ab69ab2d333f5d65cbe65ca5a3ef0ec9564dfe770"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e81469f7d01efed9b53740aedd26085f20d49da65f9c1f41e822a33992cb1590"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34e4af5b27232f68042aa40a91c3b9bb4da0eeb31b7632e0091afc4310afe6cb"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9852b76ab558e45b20bf1893b59af64a28bd3820b0c2efc80e0a70a4a3ea51c1"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff100b203092af77d1a5a7abe085b3506b7eaaf9abf65b73b7d6905b6cb76988"}, + {file = "regex-2023.12.25-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc038b2d8b1470364b1888a98fd22d616fba2b6309c5b5f181ad4483e0017861"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:094ba386bb5c01e54e14434d4caabf6583334090865b23ef58e0424a6286d3dc"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5cd05d0f57846d8ba4b71d9c00f6f37d6b97d5e5ef8b3c3840426a475c8f70f4"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9aa1a67bbf0f957bbe096375887b2505f5d8ae16bf04488e8b0f334c36e31360"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:98a2636994f943b871786c9e82bfe7883ecdaba2ef5df54e1450fa9869d1f756"}, + {file = "regex-2023.12.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37f8e93a81fc5e5bd8db7e10e62dc64261bcd88f8d7e6640aaebe9bc180d9ce2"}, + {file = "regex-2023.12.25-cp38-cp38-win32.whl", hash = "sha256:d78bd484930c1da2b9679290a41cdb25cc127d783768a0369d6b449e72f88beb"}, + {file = "regex-2023.12.25-cp38-cp38-win_amd64.whl", hash = "sha256:b521dcecebc5b978b447f0f69b5b7f3840eac454862270406a39837ffae4e697"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f7bc09bc9c29ebead055bcba136a67378f03d66bf359e87d0f7c759d6d4ffa31"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e14b73607d6231f3cc4622809c196b540a6a44e903bcfad940779c80dffa7be7"}, + {file = "regex-2023.12.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9eda5f7a50141291beda3edd00abc2d4a5b16c29c92daf8d5bd76934150f3edc"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc6bb9aa69aacf0f6032c307da718f61a40cf970849e471254e0e91c56ffca95"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298dc6354d414bc921581be85695d18912bea163a8b23cac9a2562bbcd5088b1"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f4e475a80ecbd15896a976aa0b386c5525d0ed34d5c600b6d3ebac0a67c7ddf"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531ac6cf22b53e0696f8e1d56ce2396311254eb806111ddd3922c9d937151dae"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f3470f7524b6da61e2020672df2f3063676aff444db1daa283c2ea4ed259d6"}, + {file = "regex-2023.12.25-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:89723d2112697feaa320c9d351e5f5e7b841e83f8b143dba8e2d2b5f04e10923"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0ecf44ddf9171cd7566ef1768047f6e66975788258b1c6c6ca78098b95cf9a3d"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:905466ad1702ed4acfd67a902af50b8db1feeb9781436372261808df7a2a7bca"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:4558410b7a5607a645e9804a3e9dd509af12fb72b9825b13791a37cd417d73a5"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7e316026cc1095f2a3e8cc012822c99f413b702eaa2ca5408a513609488cb62f"}, + {file = "regex-2023.12.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3b1de218d5375cd6ac4b5493e0b9f3df2be331e86520f23382f216c137913d20"}, + {file = "regex-2023.12.25-cp39-cp39-win32.whl", hash = "sha256:11a963f8e25ab5c61348d090bf1b07f1953929c13bd2309a0662e9ff680763c9"}, + {file = "regex-2023.12.25-cp39-cp39-win_amd64.whl", hash = "sha256:e693e233ac92ba83a87024e1d32b5f9ab15ca55ddd916d878146f4e3406b5c91"}, + {file = "regex-2023.12.25.tar.gz", hash = "sha256:29171aa128da69afdf4bde412d5bedc335f2ca8fcfe4489038577d05f16181e5"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-oauthlib" +version = "2.0.0" +description = "OAuthlib authentication support for Requests." +optional = false +python-versions = ">=3.4" +files = [ + {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, + {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, +] + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + +[[package]] +name = "rich" +version = "13.7.1" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "roundrobin" +version = "0.0.4" +description = "Collection of roundrobin utilities" +optional = false +python-versions = "*" +files = [ + {file = "roundrobin-0.0.4.tar.gz", hash = "sha256:7e9d19a5bd6123d99993fb935fa86d25c88bb2096e493885f61737ed0f5e9abd"}, +] + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "ruff" +version = "0.1.15" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, + {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, + {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, + {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, + {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, +] + +[[package]] +name = "setuptools" +version = "69.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "simple-websocket" +version = "1.0.0" +description = "Simple WebSocket server and client for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "simple-websocket-1.0.0.tar.gz", hash = "sha256:17d2c72f4a2bd85174a97e3e4c88b01c40c3f81b7b648b0cc3ce1305968928c8"}, + {file = "simple_websocket-1.0.0-py3-none-any.whl", hash = "sha256:1d5bf585e415eaa2083e2bcf02a3ecf91f9712e7b3e6b9fa0b461ad04e0837bc"}, +] + +[package.dependencies] +wsproto = "*" + +[package.extras] +docs = ["sphinx"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.29" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c142852ae192e9fe5aad5c350ea6befe9db14370b34047e1f0f7cf99e63c63b"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:99a1e69d4e26f71e750e9ad6fdc8614fbddb67cfe2173a3628a2566034e223c7"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef3fbccb4058355053c51b82fd3501a6e13dd808c8d8cd2561e610c5456013c"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d6753305936eddc8ed190e006b7bb33a8f50b9854823485eed3a886857ab8d1"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0f3ca96af060a5250a8ad5a63699180bc780c2edf8abf96c58af175921df847a"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c4520047006b1d3f0d89e0532978c0688219857eb2fee7c48052560ae76aca1e"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win32.whl", hash = "sha256:b2a0e3cf0caac2085ff172c3faacd1e00c376e6884b5bc4dd5b6b84623e29e4f"}, + {file = "SQLAlchemy-2.0.29-cp310-cp310-win_amd64.whl", hash = "sha256:01d10638a37460616708062a40c7b55f73e4d35eaa146781c683e0fa7f6c43fb"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:308ef9cb41d099099fffc9d35781638986870b29f744382904bf9c7dadd08513"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:296195df68326a48385e7a96e877bc19aa210e485fa381c5246bc0234c36c78e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a13b917b4ffe5a0a31b83d051d60477819ddf18276852ea68037a144a506efb9"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f6d971255d9ddbd3189e2e79d743ff4845c07f0633adfd1de3f63d930dbe673"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:61405ea2d563407d316c63a7b5271ae5d274a2a9fbcd01b0aa5503635699fa1e"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de7202ffe4d4a8c1e3cde1c03e01c1a3772c92858837e8f3879b497158e4cb44"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win32.whl", hash = "sha256:b5d7ed79df55a731749ce65ec20d666d82b185fa4898430b17cb90c892741520"}, + {file = "SQLAlchemy-2.0.29-cp311-cp311-win_amd64.whl", hash = "sha256:205f5a2b39d7c380cbc3b5dcc8f2762fb5bcb716838e2d26ccbc54330775b003"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d96710d834a6fb31e21381c6d7b76ec729bd08c75a25a5184b1089141356171f"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:52de4736404e53c5c6a91ef2698c01e52333988ebdc218f14c833237a0804f1b"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7b02525ede2a164c5fa5014915ba3591730f2cc831f5be9ff3b7fd3e30958e"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dfefdb3e54cd15f5d56fd5ae32f1da2d95d78319c1f6dfb9bcd0eb15d603d5d"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a88913000da9205b13f6f195f0813b6ffd8a0c0c2bd58d499e00a30eb508870c"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fecd5089c4be1bcc37c35e9aa678938d2888845a134dd016de457b942cf5a758"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win32.whl", hash = "sha256:8197d6f7a3d2b468861ebb4c9f998b9df9e358d6e1cf9c2a01061cb9b6cf4e41"}, + {file = "SQLAlchemy-2.0.29-cp312-cp312-win_amd64.whl", hash = "sha256:9b19836ccca0d321e237560e475fd99c3d8655d03da80c845c4da20dda31b6e1"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:87a1d53a5382cdbbf4b7619f107cc862c1b0a4feb29000922db72e5a66a5ffc0"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a0732dffe32333211801b28339d2a0babc1971bc90a983e3035e7b0d6f06b93"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90453597a753322d6aa770c5935887ab1fc49cc4c4fdd436901308383d698b4b"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ea311d4ee9a8fa67f139c088ae9f905fcf0277d6cd75c310a21a88bf85e130f5"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5f20cb0a63a3e0ec4e169aa8890e32b949c8145983afa13a708bc4b0a1f30e03"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win32.whl", hash = "sha256:e5bbe55e8552019c6463709b39634a5fc55e080d0827e2a3a11e18eb73f5cdbd"}, + {file = "SQLAlchemy-2.0.29-cp37-cp37m-win_amd64.whl", hash = "sha256:c2f9c762a2735600654c654bf48dad388b888f8ce387b095806480e6e4ff6907"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e614d7a25a43a9f54fcce4675c12761b248547f3d41b195e8010ca7297c369c"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:471fcb39c6adf37f820350c28aac4a7df9d3940c6548b624a642852e727ea586"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:988569c8732f54ad3234cf9c561364221a9e943b78dc7a4aaf35ccc2265f1930"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dddaae9b81c88083e6437de95c41e86823d150f4ee94bf24e158a4526cbead01"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:334184d1ab8f4c87f9652b048af3f7abea1c809dfe526fb0435348a6fef3d380"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:38b624e5cf02a69b113c8047cf7f66b5dfe4a2ca07ff8b8716da4f1b3ae81567"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win32.whl", hash = "sha256:bab41acf151cd68bc2b466deae5deeb9e8ae9c50ad113444151ad965d5bf685b"}, + {file = "SQLAlchemy-2.0.29-cp38-cp38-win_amd64.whl", hash = "sha256:52c8011088305476691b8750c60e03b87910a123cfd9ad48576d6414b6ec2a1d"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3071ad498896907a5ef756206b9dc750f8e57352113c19272bdfdc429c7bd7de"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dba622396a3170974f81bad49aacebd243455ec3cc70615aeaef9e9613b5bca5"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b184e3de58009cc0bf32e20f137f1ec75a32470f5fede06c58f6c355ed42a72"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c37f1050feb91f3d6c32f864d8e114ff5545a4a7afe56778d76a9aec62638ba"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bda7ce59b06d0f09afe22c56714c65c957b1068dee3d5e74d743edec7daba552"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:25664e18bef6dc45015b08f99c63952a53a0a61f61f2e48a9e70cec27e55f699"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win32.whl", hash = "sha256:77d29cb6c34b14af8a484e831ab530c0f7188f8efed1c6a833a2c674bf3c26ec"}, + {file = "SQLAlchemy-2.0.29-cp39-cp39-win_amd64.whl", hash = "sha256:04c487305ab035a9548f573763915189fc0fe0824d9ba28433196f8436f1449c"}, + {file = "SQLAlchemy-2.0.29-py3-none-any.whl", hash = "sha256:dc4ee2d4ee43251905f88637d5281a8d52e916a021384ec10758826f5cbae305"}, + {file = "SQLAlchemy-2.0.29.tar.gz", hash = "sha256:bd9566b8e58cabd700bc367b60e90d9349cd16f0984973f98a9a09f9c64e86f0"}, +] + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] +aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "sqlmodel" +version = "0.0.14" +description = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness." +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "sqlmodel-0.0.14-py3-none-any.whl", hash = "sha256:accea3ff5d878e41ac439b11e78613ed61ce300cfcb860e87a2d73d4884cbee4"}, + {file = "sqlmodel-0.0.14.tar.gz", hash = "sha256:0bff8fc94af86b44925aa813f56cf6aabdd7f156b73259f2f60692c6a64ac90e"}, +] + +[package.dependencies] +pydantic = ">=1.10.13,<3.0.0" +SQLAlchemy = ">=2.0.0,<2.1.0" + +[[package]] +name = "stack-data" +version = "0.6.3" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + +[[package]] +name = "starlette" +version = "0.36.3" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.8" +files = [ + {file = "starlette-0.36.3-py3-none-any.whl", hash = "sha256:13d429aa93a61dc40bf503e8c801db1f1bca3dc706b10ef2434a36123568f044"}, + {file = "starlette-0.36.3.tar.gz", hash = "sha256:90a671733cfb35771d8cc605e0b679d23b992f8dcfad48cc60b38cb29aeb7080"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5" + +[package.extras] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] + +[[package]] +name = "sympy" +version = "1.12" +description = "Computer algebra system (CAS) in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sympy-1.12-py3-none-any.whl", hash = "sha256:c3588cd4295d0c0f603d0f2ae780587e64e2efeedb3521e46b9bb1d08d184fa5"}, + {file = "sympy-1.12.tar.gz", hash = "sha256:ebf595c8dac3e0fdc4152c51878b498396ec7f30e7a914d6071e674d49420fb8"}, +] + +[package.dependencies] +mpmath = ">=0.19" + +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + +[[package]] +name = "termcolor" +version = "2.4.0" +description = "ANSI color formatting for output in terminal" +optional = false +python-versions = ">=3.8" +files = [ + {file = "termcolor-2.4.0-py3-none-any.whl", hash = "sha256:9297c0df9c99445c2412e832e882a7884038a25617c60cea2ad69488d4040d63"}, + {file = "termcolor-2.4.0.tar.gz", hash = "sha256:aab9e56047c8ac41ed798fa36d892a37aca6b3e9159f3e0c24bc64a9b3ac7b7a"}, +] + +[package.extras] +tests = ["pytest", "pytest-cov"] + +[[package]] +name = "tiktoken" +version = "0.6.0" +description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tiktoken-0.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:277de84ccd8fa12730a6b4067456e5cf72fef6300bea61d506c09e45658d41ac"}, + {file = "tiktoken-0.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c44433f658064463650d61387623735641dcc4b6c999ca30bc0f8ba3fccaf5c"}, + {file = "tiktoken-0.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afb9a2a866ae6eef1995ab656744287a5ac95acc7e0491c33fad54d053288ad3"}, + {file = "tiktoken-0.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c62c05b3109fefca26fedb2820452a050074ad8e5ad9803f4652977778177d9f"}, + {file = "tiktoken-0.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ef917fad0bccda07bfbad835525bbed5f3ab97a8a3e66526e48cdc3e7beacf7"}, + {file = "tiktoken-0.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e095131ab6092d0769a2fda85aa260c7c383072daec599ba9d8b149d2a3f4d8b"}, + {file = "tiktoken-0.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:05b344c61779f815038292a19a0c6eb7098b63c8f865ff205abb9ea1b656030e"}, + {file = "tiktoken-0.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cefb9870fb55dca9e450e54dbf61f904aab9180ff6fe568b61f4db9564e78871"}, + {file = "tiktoken-0.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:702950d33d8cabc039845674107d2e6dcabbbb0990ef350f640661368df481bb"}, + {file = "tiktoken-0.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8d49d076058f23254f2aff9af603863c5c5f9ab095bc896bceed04f8f0b013a"}, + {file = "tiktoken-0.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:430bc4e650a2d23a789dc2cdca3b9e5e7eb3cd3935168d97d43518cbb1f9a911"}, + {file = "tiktoken-0.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:293cb8669757301a3019a12d6770bd55bec38a4d3ee9978ddbe599d68976aca7"}, + {file = "tiktoken-0.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7bd1a288b7903aadc054b0e16ea78e3171f70b670e7372432298c686ebf9dd47"}, + {file = "tiktoken-0.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac76e000183e3b749634968a45c7169b351e99936ef46f0d2353cd0d46c3118d"}, + {file = "tiktoken-0.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17cc8a4a3245ab7d935c83a2db6bb71619099d7284b884f4b2aea4c74f2f83e3"}, + {file = "tiktoken-0.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:284aebcccffe1bba0d6571651317df6a5b376ff6cfed5aeb800c55df44c78177"}, + {file = "tiktoken-0.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c1a3a5d33846f8cd9dd3b7897c1d45722f48625a587f8e6f3d3e85080559be8"}, + {file = "tiktoken-0.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6318b2bb2337f38ee954fd5efa82632c6e5ced1d52a671370fa4b2eff1355e91"}, + {file = "tiktoken-0.6.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f5f0f2ed67ba16373f9a6013b68da298096b27cd4e1cf276d2d3868b5c7efd1"}, + {file = "tiktoken-0.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:75af4c0b16609c2ad02581f3cdcd1fb698c7565091370bf6c0cf8624ffaba6dc"}, + {file = "tiktoken-0.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:45577faf9a9d383b8fd683e313cf6df88b6076c034f0a16da243bb1c139340c3"}, + {file = "tiktoken-0.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7c1492ab90c21ca4d11cef3a236ee31a3e279bb21b3fc5b0e2210588c4209e68"}, + {file = "tiktoken-0.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e2b380c5b7751272015400b26144a2bab4066ebb8daae9c3cd2a92c3b508fe5a"}, + {file = "tiktoken-0.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9f497598b9f58c99cbc0eb764b4a92272c14d5203fc713dd650b896a03a50ad"}, + {file = "tiktoken-0.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e65e8bd6f3f279d80f1e1fbd5f588f036b9a5fa27690b7f0cc07021f1dfa0839"}, + {file = "tiktoken-0.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5f1495450a54e564d236769d25bfefbf77727e232d7a8a378f97acddee08c1ae"}, + {file = "tiktoken-0.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6c4e4857d99f6fb4670e928250835b21b68c59250520a1941618b5b4194e20c3"}, + {file = "tiktoken-0.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:168d718f07a39b013032741867e789971346df8e89983fe3c0ef3fbd5a0b1cb9"}, + {file = "tiktoken-0.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:47fdcfe11bd55376785a6aea8ad1db967db7f66ea81aed5c43fad497521819a4"}, + {file = "tiktoken-0.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fb7d2ccbf1a7784810aff6b80b4012fb42c6fc37eaa68cb3b553801a5cc2d1fc"}, + {file = "tiktoken-0.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ccb7a111ee76af5d876a729a347f8747d5ad548e1487eeea90eaf58894b3138"}, + {file = "tiktoken-0.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2048e1086b48e3c8c6e2ceeac866561374cd57a84622fa49a6b245ffecb7744"}, + {file = "tiktoken-0.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:07f229a5eb250b6403a61200199cecf0aac4aa23c3ecc1c11c1ca002cbb8f159"}, + {file = "tiktoken-0.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:432aa3be8436177b0db5a2b3e7cc28fd6c693f783b2f8722539ba16a867d0c6a"}, + {file = "tiktoken-0.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:8bfe8a19c8b5c40d121ee7938cd9c6a278e5b97dc035fd61714b4f0399d2f7a1"}, + {file = "tiktoken-0.6.0.tar.gz", hash = "sha256:ace62a4ede83c75b0374a2ddfa4b76903cf483e9cb06247f566be3bf14e6beed"}, +] + +[package.dependencies] +regex = ">=2022.1.18" +requests = ">=2.26.0" + +[package.extras] +blobfile = ["blobfile (>=2)"] + +[[package]] +name = "tokenizers" +version = "0.15.2" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tokenizers-0.15.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:52f6130c9cbf70544287575a985bf44ae1bda2da7e8c24e97716080593638012"}, + {file = "tokenizers-0.15.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:054c1cc9c6d68f7ffa4e810b3d5131e0ba511b6e4be34157aa08ee54c2f8d9ee"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9b9b070fdad06e347563b88c278995735292ded1132f8657084989a4c84a6d5"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea621a7eef4b70e1f7a4e84dd989ae3f0eeb50fc8690254eacc08acb623e82f1"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf7fd9a5141634fa3aa8d6b7be362e6ae1b4cda60da81388fa533e0b552c98fd"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44f2a832cd0825295f7179eaf173381dc45230f9227ec4b44378322d900447c9"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8b9ec69247a23747669ec4b0ca10f8e3dfb3545d550258129bd62291aabe8605"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b6a4c78da863ff26dbd5ad9a8ecc33d8a8d97b535172601cf00aee9d7ce9ce"}, + {file = "tokenizers-0.15.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5ab2a4d21dcf76af60e05af8063138849eb1d6553a0d059f6534357bce8ba364"}, + {file = "tokenizers-0.15.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a47acfac7e511f6bbfcf2d3fb8c26979c780a91e06fb5b9a43831b2c0153d024"}, + {file = "tokenizers-0.15.2-cp310-none-win32.whl", hash = "sha256:064ff87bb6acdbd693666de9a4b692add41308a2c0ec0770d6385737117215f2"}, + {file = "tokenizers-0.15.2-cp310-none-win_amd64.whl", hash = "sha256:3b919afe4df7eb6ac7cafd2bd14fb507d3f408db7a68c43117f579c984a73843"}, + {file = "tokenizers-0.15.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:89cd1cb93e4b12ff39bb2d626ad77e35209de9309a71e4d3d4672667b4b256e7"}, + {file = "tokenizers-0.15.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cfed5c64e5be23d7ee0f0e98081a25c2a46b0b77ce99a4f0605b1ec43dd481fa"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a907d76dcfda37023ba203ab4ceeb21bc5683436ebefbd895a0841fd52f6f6f2"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20ea60479de6fc7b8ae756b4b097572372d7e4032e2521c1bbf3d90c90a99ff0"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:48e2b9335be2bc0171df9281385c2ed06a15f5cf121c44094338306ab7b33f2c"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:112a1dd436d2cc06e6ffdc0b06d55ac019a35a63afd26475205cb4b1bf0bfbff"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4620cca5c2817177ee8706f860364cc3a8845bc1e291aaf661fb899e5d1c45b0"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccd73a82751c523b3fc31ff8194702e4af4db21dc20e55b30ecc2079c5d43cb7"}, + {file = "tokenizers-0.15.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:107089f135b4ae7817affe6264f8c7a5c5b4fd9a90f9439ed495f54fcea56fb4"}, + {file = "tokenizers-0.15.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0ff110ecc57b7aa4a594396525a3451ad70988e517237fe91c540997c4e50e29"}, + {file = "tokenizers-0.15.2-cp311-none-win32.whl", hash = "sha256:6d76f00f5c32da36c61f41c58346a4fa7f0a61be02f4301fd30ad59834977cc3"}, + {file = "tokenizers-0.15.2-cp311-none-win_amd64.whl", hash = "sha256:cc90102ed17271cf0a1262babe5939e0134b3890345d11a19c3145184b706055"}, + {file = "tokenizers-0.15.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f86593c18d2e6248e72fb91c77d413a815153b8ea4e31f7cd443bdf28e467670"}, + {file = "tokenizers-0.15.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0774bccc6608eca23eb9d620196687c8b2360624619623cf4ba9dc9bd53e8b51"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d0222c5b7c9b26c0b4822a82f6a7011de0a9d3060e1da176f66274b70f846b98"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3835738be1de66624fff2f4f6f6684775da4e9c00bde053be7564cbf3545cc66"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0143e7d9dcd811855c1ce1ab9bf5d96d29bf5e528fd6c7824d0465741e8c10fd"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db35825f6d54215f6b6009a7ff3eedee0848c99a6271c870d2826fbbedf31a38"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f5e64b0389a2be47091d8cc53c87859783b837ea1a06edd9d8e04004df55a5c"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e0480c452217edd35eca56fafe2029fb4d368b7c0475f8dfa3c5c9c400a7456"}, + {file = "tokenizers-0.15.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a33ab881c8fe70474980577e033d0bc9a27b7ab8272896e500708b212995d834"}, + {file = "tokenizers-0.15.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a308a607ca9de2c64c1b9ba79ec9a403969715a1b8ba5f998a676826f1a7039d"}, + {file = "tokenizers-0.15.2-cp312-none-win32.whl", hash = "sha256:b8fcfa81bcb9447df582c5bc96a031e6df4da2a774b8080d4f02c0c16b42be0b"}, + {file = "tokenizers-0.15.2-cp312-none-win_amd64.whl", hash = "sha256:38d7ab43c6825abfc0b661d95f39c7f8af2449364f01d331f3b51c94dcff7221"}, + {file = "tokenizers-0.15.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:38bfb0204ff3246ca4d5e726e8cc8403bfc931090151e6eede54d0e0cf162ef0"}, + {file = "tokenizers-0.15.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c861d35e8286a53e06e9e28d030b5a05bcbf5ac9d7229e561e53c352a85b1fc"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:936bf3842db5b2048eaa53dade907b1160f318e7c90c74bfab86f1e47720bdd6"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:620beacc3373277700d0e27718aa8b25f7b383eb8001fba94ee00aeea1459d89"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2735ecbbf37e52db4ea970e539fd2d450d213517b77745114f92867f3fc246eb"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:473c83c5e2359bb81b0b6fde870b41b2764fcdd36d997485e07e72cc3a62264a"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:968fa1fb3c27398b28a4eca1cbd1e19355c4d3a6007f7398d48826bbe3a0f728"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:865c60ae6eaebdde7da66191ee9b7db52e542ed8ee9d2c653b6d190a9351b980"}, + {file = "tokenizers-0.15.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7c0d8b52664ab2d4a8d6686eb5effc68b78608a9008f086a122a7b2996befbab"}, + {file = "tokenizers-0.15.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f33dfbdec3784093a9aebb3680d1f91336c56d86cc70ddf88708251da1fe9064"}, + {file = "tokenizers-0.15.2-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d44ba80988ff9424e33e0a49445072ac7029d8c0e1601ad25a0ca5f41ed0c1d6"}, + {file = "tokenizers-0.15.2-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:dce74266919b892f82b1b86025a613956ea0ea62a4843d4c4237be2c5498ed3a"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0ef06b9707baeb98b316577acb04f4852239d856b93e9ec3a299622f6084e4be"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c73e2e74bbb07910da0d37c326869f34113137b23eadad3fc00856e6b3d9930c"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4eeb12daf02a59e29f578a865f55d87cd103ce62bd8a3a5874f8fdeaa82e336b"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ba9f6895af58487ca4f54e8a664a322f16c26bbb442effd01087eba391a719e"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccec77aa7150e38eec6878a493bf8c263ff1fa8a62404e16c6203c64c1f16a26"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3f40604f5042ff210ba82743dda2b6aa3e55aa12df4e9f2378ee01a17e2855e"}, + {file = "tokenizers-0.15.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5645938a42d78c4885086767c70923abad047163d809c16da75d6b290cb30bbe"}, + {file = "tokenizers-0.15.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:05a77cbfebe28a61ab5c3891f9939cc24798b63fa236d84e5f29f3a85a200c00"}, + {file = "tokenizers-0.15.2-cp37-none-win32.whl", hash = "sha256:361abdc068e8afe9c5b818769a48624687fb6aaed49636ee39bec4e95e1a215b"}, + {file = "tokenizers-0.15.2-cp37-none-win_amd64.whl", hash = "sha256:7ef789f83eb0f9baeb4d09a86cd639c0a5518528f9992f38b28e819df397eb06"}, + {file = "tokenizers-0.15.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4fe1f74a902bee74a3b25aff180fbfbf4f8b444ab37c4d496af7afd13a784ed2"}, + {file = "tokenizers-0.15.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c4b89038a684f40a6b15d6b09f49650ac64d951ad0f2a3ea9169687bbf2a8ba"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d05a1b06f986d41aed5f2de464c003004b2df8aaf66f2b7628254bcbfb72a438"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:508711a108684111ec8af89d3a9e9e08755247eda27d0ba5e3c50e9da1600f6d"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:daa348f02d15160cb35439098ac96e3a53bacf35885072611cd9e5be7d333daa"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:494fdbe5932d3416de2a85fc2470b797e6f3226c12845cadf054dd906afd0442"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2d60f5246f4da9373f75ff18d64c69cbf60c3bca597290cea01059c336d2470"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93268e788825f52de4c7bdcb6ebc1fcd4a5442c02e730faa9b6b08f23ead0e24"}, + {file = "tokenizers-0.15.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6fc7083ab404019fc9acafe78662c192673c1e696bd598d16dc005bd663a5cf9"}, + {file = "tokenizers-0.15.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e39b41e5531d6b2122a77532dbea60e171ef87a3820b5a3888daa847df4153"}, + {file = "tokenizers-0.15.2-cp38-none-win32.whl", hash = "sha256:06cd0487b1cbfabefb2cc52fbd6b1f8d4c37799bd6c6e1641281adaa6b2504a7"}, + {file = "tokenizers-0.15.2-cp38-none-win_amd64.whl", hash = "sha256:5179c271aa5de9c71712e31cb5a79e436ecd0d7532a408fa42a8dbfa4bc23fd9"}, + {file = "tokenizers-0.15.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:82f8652a74cc107052328b87ea8b34291c0f55b96d8fb261b3880216a9f9e48e"}, + {file = "tokenizers-0.15.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:02458bee6f5f3139f1ebbb6d042b283af712c0981f5bc50edf771d6b762d5e4f"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c9a09cd26cca2e1c349f91aa665309ddb48d71636370749414fbf67bc83c5343"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:158be8ea8554e5ed69acc1ce3fbb23a06060bd4bbb09029431ad6b9a466a7121"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ddba9a2b0c8c81633eca0bb2e1aa5b3a15362b1277f1ae64176d0f6eba78ab1"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ef5dd1d39797044642dbe53eb2bc56435308432e9c7907728da74c69ee2adca"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:454c203164e07a860dbeb3b1f4a733be52b0edbb4dd2e5bd75023ffa8b49403a"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cf6b7f1d4dc59af960e6ffdc4faffe6460bbfa8dce27a58bf75755ffdb2526d"}, + {file = "tokenizers-0.15.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2ef09bbc16519f6c25d0c7fc0c6a33a6f62923e263c9d7cca4e58b8c61572afb"}, + {file = "tokenizers-0.15.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c9a2ebdd2ad4ec7a68e7615086e633857c85e2f18025bd05d2a4399e6c5f7169"}, + {file = "tokenizers-0.15.2-cp39-none-win32.whl", hash = "sha256:918fbb0eab96fe08e72a8c2b5461e9cce95585d82a58688e7f01c2bd546c79d0"}, + {file = "tokenizers-0.15.2-cp39-none-win_amd64.whl", hash = "sha256:524e60da0135e106b254bd71f0659be9f89d83f006ea9093ce4d1fab498c6d0d"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6a9b648a58281c4672212fab04e60648fde574877d0139cd4b4f93fe28ca8944"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7c7d18b733be6bbca8a55084027f7be428c947ddf871c500ee603e375013ffba"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:13ca3611de8d9ddfbc4dc39ef54ab1d2d4aaa114ac8727dfdc6a6ec4be017378"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:237d1bf3361cf2e6463e6c140628e6406766e8b27274f5fcc62c747ae3c6f094"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67a0fe1e49e60c664915e9fb6b0cb19bac082ab1f309188230e4b2920230edb3"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4e022fe65e99230b8fd89ebdfea138c24421f91c1a4f4781a8f5016fd5cdfb4d"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d857be2df69763362ac699f8b251a8cd3fac9d21893de129bc788f8baaef2693"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:708bb3e4283177236309e698da5fcd0879ce8fd37457d7c266d16b550bcbbd18"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:64c35e09e9899b72a76e762f9854e8750213f67567787d45f37ce06daf57ca78"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1257f4394be0d3b00de8c9e840ca5601d0a4a8438361ce9c2b05c7d25f6057b"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02272fe48280e0293a04245ca5d919b2c94a48b408b55e858feae9618138aeda"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dc3ad9ebc76eabe8b1d7c04d38be884b8f9d60c0cdc09b0aa4e3bcf746de0388"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:32e16bdeffa7c4f46bf2152172ca511808b952701d13e7c18833c0b73cb5c23f"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fb16ba563d59003028b678d2361a27f7e4ae0ab29c7a80690efa20d829c81fdb"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:2277c36d2d6cdb7876c274547921a42425b6810d38354327dd65a8009acf870c"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1cf75d32e8d250781940d07f7eece253f2fe9ecdb1dc7ba6e3833fa17b82fcbc"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1b3b31884dc8e9b21508bb76da80ebf7308fdb947a17affce815665d5c4d028"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10122d8d8e30afb43bb1fe21a3619f62c3e2574bff2699cf8af8b0b6c5dc4a3"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d88b96ff0fe8e91f6ef01ba50b0d71db5017fa4e3b1d99681cec89a85faf7bf7"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:37aaec5a52e959892870a7c47cef80c53797c0db9149d458460f4f31e2fb250e"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e2ea752f2b0fe96eb6e2f3adbbf4d72aaa1272079b0dfa1145507bd6a5d537e6"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4b19a808d8799fda23504a5cd31d2f58e6f52f140380082b352f877017d6342b"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:64c86e5e068ac8b19204419ed8ca90f9d25db20578f5881e337d203b314f4104"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de19c4dc503c612847edf833c82e9f73cd79926a384af9d801dcf93f110cea4e"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea09acd2fe3324174063d61ad620dec3bcf042b495515f27f638270a7d466e8b"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cf27fd43472e07b57cf420eee1e814549203d56de00b5af8659cb99885472f1f"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7ca22bd897537a0080521445d91a58886c8c04084a6a19e6c78c586e0cfa92a5"}, + {file = "tokenizers-0.15.2.tar.gz", hash = "sha256:e6e9c6e019dd5484be5beafc775ae6c925f4c69a3487040ed09b45e13df2cb91"}, +] + +[package.dependencies] +huggingface_hub = ">=0.16.4,<1.0" + +[package.extras] +dev = ["tokenizers[testing]"] +docs = ["setuptools_rust", "sphinx", "sphinx_rtd_theme"] +testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tornado" +version = "6.4" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = false +python-versions = ">= 3.8" +files = [ + {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, + {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, + {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, + {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, + {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, +] + +[[package]] +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "traitlets" +version = "5.14.2" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.8" +files = [ + {file = "traitlets-5.14.2-py3-none-any.whl", hash = "sha256:fcdf85684a772ddeba87db2f398ce00b40ff550d1528c03c14dbf6a02003cd80"}, + {file = "traitlets-5.14.2.tar.gz", hash = "sha256:8cdd83c040dab7d1dee822678e5f5d100b514f7b72b01615b26fc5718916fdf9"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.1)", "pytest-mock", "pytest-mypy-testing"] + +[[package]] +name = "typer" +version = "0.9.4" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.6" +files = [ + {file = "typer-0.9.4-py3-none-any.whl", hash = "sha256:aa6c4a4e2329d868b80ecbaf16f807f2b54e192209d7ac9dd42691d63f7a54eb"}, + {file = "typer-0.9.4.tar.gz", hash = "sha256:f714c2d90afae3a7929fcd72a3abb08df305e1ff61719381384211c4070af57f"}, +] + +[package.dependencies] +click = ">=7.1.1,<9.0.0" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] +dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] +doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] +test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.971)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] + +[[package]] +name = "types-google-cloud-ndb" +version = "2.3.0.20240311" +description = "Typing stubs for google-cloud-ndb" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-google-cloud-ndb-2.3.0.20240311.tar.gz", hash = "sha256:c37a149f313827d9443a0f7b8dfd572292f9d9dabb8a9c4d68cdba81689a380f"}, + {file = "types_google_cloud_ndb-2.3.0.20240311-py3-none-any.whl", hash = "sha256:8209962a420d2c60615ee26bc21ad74d77a3e337045b70ed86843a974f2d2ecd"}, +] + +[[package]] +name = "types-passlib" +version = "1.7.7.20240327" +description = "Typing stubs for passlib" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-passlib-1.7.7.20240327.tar.gz", hash = "sha256:4cce6a1a3a6afee9fc4728b4d9784300764ac2be747f5bcc01646d904b85f4bb"}, + {file = "types_passlib-1.7.7.20240327-py3-none-any.whl", hash = "sha256:3a3b7f4258b71034d2e2f4f307d6810f9904f906cdf375514c8bdbdb28a4ad23"}, +] + +[[package]] +name = "types-pillow" +version = "9.5.0.6" +description = "Typing stubs for Pillow" +optional = false +python-versions = "*" +files = [ + {file = "types-Pillow-9.5.0.6.tar.gz", hash = "sha256:6a0cad40686e5c35fe7ada70f52bd3970395d31ece33486609e5420e820a9e4e"}, + {file = "types_Pillow-9.5.0.6-py3-none-any.whl", hash = "sha256:1d238abaa9d529b04941d805b7f4d3f7df30702bb14521ec507617f117406fb4"}, +] + +[[package]] +name = "types-pyasn1" +version = "0.6.0.20240402" +description = "Typing stubs for pyasn1" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-pyasn1-0.6.0.20240402.tar.gz", hash = "sha256:5d54dcb33f69dd269071ca098e923ac20c5f03c814631fa7f3ed9ee035a5da3a"}, + {file = "types_pyasn1-0.6.0.20240402-py3-none-any.whl", hash = "sha256:848d01e7313c200acc035a8b3d377fe7b2aecbe77f2be49eb160a7f82835aaaf"}, +] + +[[package]] +name = "types-pyopenssl" +version = "24.0.0.20240311" +description = "Typing stubs for pyOpenSSL" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-pyOpenSSL-24.0.0.20240311.tar.gz", hash = "sha256:7bca00cfc4e7ef9c5d2663c6a1c068c35798e59670595439f6296e7ba3d58083"}, + {file = "types_pyOpenSSL-24.0.0.20240311-py3-none-any.whl", hash = "sha256:6e8e8bfad34924067333232c93f7fc4b369856d8bea0d5c9d1808cb290ab1972"}, +] + +[package.dependencies] +cryptography = ">=35.0.0" + +[[package]] +name = "types-python-jose" +version = "3.3.4.20240106" +description = "Typing stubs for python-jose" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-python-jose-3.3.4.20240106.tar.gz", hash = "sha256:b18cf8c5080bbfe1ef7c3b707986435d9efca3e90889acb6a06f65e06bc3405a"}, + {file = "types_python_jose-3.3.4.20240106-py3-none-any.whl", hash = "sha256:b515a6c0c61f5e2a53bc93e3a2b024cbd42563e2e19cbde9fd1c2cc2cfe77ccc"}, +] + +[package.dependencies] +types-pyasn1 = "*" + +[[package]] +name = "types-pytz" +version = "2024.1.0.20240203" +description = "Typing stubs for pytz" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-pytz-2024.1.0.20240203.tar.gz", hash = "sha256:c93751ee20dfc6e054a0148f8f5227b9a00b79c90a4d3c9f464711a73179c89e"}, + {file = "types_pytz-2024.1.0.20240203-py3-none-any.whl", hash = "sha256:9679eef0365db3af91ef7722c199dbb75ee5c1b67e3c4dd7bfbeb1b8a71c21a3"}, +] + +[[package]] +name = "types-pywin32" +version = "306.0.0.20240331" +description = "Typing stubs for pywin32" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-pywin32-306.0.0.20240331.tar.gz", hash = "sha256:3427e60ffbbc47b31e6bc416c3e4c37d579e6774e5152dab953589c9b64c4d58"}, + {file = "types_pywin32-306.0.0.20240331-py3-none-any.whl", hash = "sha256:844b90253f3da38e254c9ce199ba41f63da52c511003daedcda414e88f1cf319"}, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.20240311" +description = "Typing stubs for PyYAML" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-PyYAML-6.0.12.20240311.tar.gz", hash = "sha256:a9e0f0f88dc835739b0c1ca51ee90d04ca2a897a71af79de9aec5f38cb0a5342"}, + {file = "types_PyYAML-6.0.12.20240311-py3-none-any.whl", hash = "sha256:b845b06a1c7e54b8e5b4c683043de0d9caf205e7434b3edc678ff2411979b8f6"}, +] + +[[package]] +name = "types-redis" +version = "4.6.0.20240311" +description = "Typing stubs for redis" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-redis-4.6.0.20240311.tar.gz", hash = "sha256:e049bbdff0e0a1f8e701b64636811291d21bff79bf1e7850850a44055224a85f"}, + {file = "types_redis-4.6.0.20240311-py3-none-any.whl", hash = "sha256:6b9d68a29aba1ee400c823d8e5fe88675282eb69d7211e72fe65dbe54b33daca"}, +] + +[package.dependencies] +cryptography = ">=35.0.0" +types-pyOpenSSL = "*" + +[[package]] +name = "types-requests" +version = "2.31.0.20240403" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-requests-2.31.0.20240403.tar.gz", hash = "sha256:e1e0cd0b655334f39d9f872b68a1310f0e343647688bf2cee932ec4c2b04de59"}, + {file = "types_requests-2.31.0.20240403-py3-none-any.whl", hash = "sha256:06abf6a68f5c4f2a62f6bb006672dfb26ed50ccbfddb281e1ee6f09a65707d5d"}, +] + +[package.dependencies] +urllib3 = ">=2" + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "uvicorn" +version = "0.27.1" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.8" +files = [ + {file = "uvicorn-0.27.1-py3-none-any.whl", hash = "sha256:5c89da2f3895767472a35556e539fd59f7edbe9b1e9c0e1c99eebeadc61838e4"}, + {file = "uvicorn-0.27.1.tar.gz", hash = "sha256:3d9a267296243532db80c83a959a3400502165ade2c1338dea4e67915fd4745a"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""} +h11 = ">=0.8" +httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} +python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} +typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} +websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} + +[package.extras] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "uvloop" +version = "0.19.0" +description = "Fast implementation of asyncio event loop on top of libuv" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de4313d7f575474c8f5a12e163f6d89c0a878bc49219641d49e6f1444369a90e"}, + {file = "uvloop-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5588bd21cf1fcf06bded085f37e43ce0e00424197e7c10e77afd4bbefffef428"}, + {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1fd71c3843327f3bbc3237bedcdb6504fd50368ab3e04d0410e52ec293f5b8"}, + {file = "uvloop-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a05128d315e2912791de6088c34136bfcdd0c7cbc1cf85fd6fd1bb321b7c849"}, + {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:cd81bdc2b8219cb4b2556eea39d2e36bfa375a2dd021404f90a62e44efaaf957"}, + {file = "uvloop-0.19.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f17766fb6da94135526273080f3455a112f82570b2ee5daa64d682387fe0dcd"}, + {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4ce6b0af8f2729a02a5d1575feacb2a94fc7b2e983868b009d51c9a9d2149bef"}, + {file = "uvloop-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:31e672bb38b45abc4f26e273be83b72a0d28d074d5b370fc4dcf4c4eb15417d2"}, + {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:570fc0ed613883d8d30ee40397b79207eedd2624891692471808a95069a007c1"}, + {file = "uvloop-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5138821e40b0c3e6c9478643b4660bd44372ae1e16a322b8fc07478f92684e24"}, + {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:91ab01c6cd00e39cde50173ba4ec68a1e578fee9279ba64f5221810a9e786533"}, + {file = "uvloop-0.19.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:47bf3e9312f63684efe283f7342afb414eea4d3011542155c7e625cd799c3b12"}, + {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:da8435a3bd498419ee8c13c34b89b5005130a476bda1d6ca8cfdde3de35cd650"}, + {file = "uvloop-0.19.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:02506dc23a5d90e04d4f65c7791e65cf44bd91b37f24cfc3ef6cf2aff05dc7ec"}, + {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693049be9d36fef81741fddb3f441673ba12a34a704e7b4361efb75cf30befc"}, + {file = "uvloop-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7010271303961c6f0fe37731004335401eb9075a12680738731e9c92ddd96ad6"}, + {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5daa304d2161d2918fa9a17d5635099a2f78ae5b5960e742b2fcfbb7aefaa593"}, + {file = "uvloop-0.19.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7207272c9520203fea9b93843bb775d03e1cf88a80a936ce760f60bb5add92f3"}, + {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:78ab247f0b5671cc887c31d33f9b3abfb88d2614b84e4303f1a63b46c046c8bd"}, + {file = "uvloop-0.19.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:472d61143059c84947aa8bb74eabbace30d577a03a1805b77933d6bd13ddebbd"}, + {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45bf4c24c19fb8a50902ae37c5de50da81de4922af65baf760f7c0c42e1088be"}, + {file = "uvloop-0.19.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:271718e26b3e17906b28b67314c45d19106112067205119dddbd834c2b7ce797"}, + {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:34175c9fd2a4bc3adc1380e1261f60306344e3407c20a4d684fd5f3be010fa3d"}, + {file = "uvloop-0.19.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e27f100e1ff17f6feeb1f33968bc185bf8ce41ca557deee9d9bbbffeb72030b7"}, + {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13dfdf492af0aa0a0edf66807d2b465607d11c4fa48f4a1fd41cbea5b18e8e8b"}, + {file = "uvloop-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e3d4e85ac060e2342ff85e90d0c04157acb210b9ce508e784a944f852a40e67"}, + {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca4956c9ab567d87d59d49fa3704cf29e37109ad348f2d5223c9bf761a332e7"}, + {file = "uvloop-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f467a5fd23b4fc43ed86342641f3936a68ded707f4627622fa3f82a120e18256"}, + {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:492e2c32c2af3f971473bc22f086513cedfc66a130756145a931a90c3958cb17"}, + {file = "uvloop-0.19.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2df95fca285a9f5bfe730e51945ffe2fa71ccbfdde3b0da5772b4ee4f2e770d5"}, + {file = "uvloop-0.19.0.tar.gz", hash = "sha256:0246f4fd1bf2bf702e06b0d45ee91677ee5c31242f39aab4ea6fe0c51aedd0fd"}, +] + +[package.extras] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] + +[[package]] +name = "watchfiles" +version = "0.21.0" +description = "Simple, modern and high performance file watching and code reload in python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "watchfiles-0.21.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:27b4035013f1ea49c6c0b42d983133b136637a527e48c132d368eb19bf1ac6aa"}, + {file = "watchfiles-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c81818595eff6e92535ff32825f31c116f867f64ff8cdf6562cd1d6b2e1e8f3e"}, + {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6c107ea3cf2bd07199d66f156e3ea756d1b84dfd43b542b2d870b77868c98c03"}, + {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d9ac347653ebd95839a7c607608703b20bc07e577e870d824fa4801bc1cb124"}, + {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5eb86c6acb498208e7663ca22dbe68ca2cf42ab5bf1c776670a50919a56e64ab"}, + {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f564bf68404144ea6b87a78a3f910cc8de216c6b12a4cf0b27718bf4ec38d303"}, + {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d0f32ebfaa9c6011f8454994f86108c2eb9c79b8b7de00b36d558cadcedaa3d"}, + {file = "watchfiles-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d45d9b699ecbac6c7bd8e0a2609767491540403610962968d258fd6405c17c"}, + {file = "watchfiles-0.21.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:aff06b2cac3ef4616e26ba17a9c250c1fe9dd8a5d907d0193f84c499b1b6e6a9"}, + {file = "watchfiles-0.21.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d9792dff410f266051025ecfaa927078b94cc7478954b06796a9756ccc7e14a9"}, + {file = "watchfiles-0.21.0-cp310-none-win32.whl", hash = "sha256:214cee7f9e09150d4fb42e24919a1e74d8c9b8a9306ed1474ecaddcd5479c293"}, + {file = "watchfiles-0.21.0-cp310-none-win_amd64.whl", hash = "sha256:1ad7247d79f9f55bb25ab1778fd47f32d70cf36053941f07de0b7c4e96b5d235"}, + {file = "watchfiles-0.21.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:668c265d90de8ae914f860d3eeb164534ba2e836811f91fecc7050416ee70aa7"}, + {file = "watchfiles-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a23092a992e61c3a6a70f350a56db7197242f3490da9c87b500f389b2d01eef"}, + {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e7941bbcfdded9c26b0bf720cb7e6fd803d95a55d2c14b4bd1f6a2772230c586"}, + {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11cd0c3100e2233e9c53106265da31d574355c288e15259c0d40a4405cbae317"}, + {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78f30cbe8b2ce770160d3c08cff01b2ae9306fe66ce899b73f0409dc1846c1b"}, + {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6674b00b9756b0af620aa2a3346b01f8e2a3dc729d25617e1b89cf6af4a54eb1"}, + {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd7ac678b92b29ba630d8c842d8ad6c555abda1b9ef044d6cc092dacbfc9719d"}, + {file = "watchfiles-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c873345680c1b87f1e09e0eaf8cf6c891b9851d8b4d3645e7efe2ec20a20cc7"}, + {file = "watchfiles-0.21.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:49f56e6ecc2503e7dbe233fa328b2be1a7797d31548e7a193237dcdf1ad0eee0"}, + {file = "watchfiles-0.21.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:02d91cbac553a3ad141db016e3350b03184deaafeba09b9d6439826ee594b365"}, + {file = "watchfiles-0.21.0-cp311-none-win32.whl", hash = "sha256:ebe684d7d26239e23d102a2bad2a358dedf18e462e8808778703427d1f584400"}, + {file = "watchfiles-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:4566006aa44cb0d21b8ab53baf4b9c667a0ed23efe4aaad8c227bfba0bf15cbe"}, + {file = "watchfiles-0.21.0-cp311-none-win_arm64.whl", hash = "sha256:c550a56bf209a3d987d5a975cdf2063b3389a5d16caf29db4bdddeae49f22078"}, + {file = "watchfiles-0.21.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:51ddac60b96a42c15d24fbdc7a4bfcd02b5a29c047b7f8bf63d3f6f5a860949a"}, + {file = "watchfiles-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:511f0b034120cd1989932bf1e9081aa9fb00f1f949fbd2d9cab6264916ae89b1"}, + {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cfb92d49dbb95ec7a07511bc9efb0faff8fe24ef3805662b8d6808ba8409a71a"}, + {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f92944efc564867bbf841c823c8b71bb0be75e06b8ce45c084b46411475a915"}, + {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:642d66b75eda909fd1112d35c53816d59789a4b38c141a96d62f50a3ef9b3360"}, + {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d23bcd6c8eaa6324fe109d8cac01b41fe9a54b8c498af9ce464c1aeeb99903d6"}, + {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18d5b4da8cf3e41895b34e8c37d13c9ed294954907929aacd95153508d5d89d7"}, + {file = "watchfiles-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b8d1eae0f65441963d805f766c7e9cd092f91e0c600c820c764a4ff71a0764c"}, + {file = "watchfiles-0.21.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1fd9a5205139f3c6bb60d11f6072e0552f0a20b712c85f43d42342d162be1235"}, + {file = "watchfiles-0.21.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a1e3014a625bcf107fbf38eece0e47fa0190e52e45dc6eee5a8265ddc6dc5ea7"}, + {file = "watchfiles-0.21.0-cp312-none-win32.whl", hash = "sha256:9d09869f2c5a6f2d9df50ce3064b3391d3ecb6dced708ad64467b9e4f2c9bef3"}, + {file = "watchfiles-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:18722b50783b5e30a18a8a5db3006bab146d2b705c92eb9a94f78c72beb94094"}, + {file = "watchfiles-0.21.0-cp312-none-win_arm64.whl", hash = "sha256:a3b9bec9579a15fb3ca2d9878deae789df72f2b0fdaf90ad49ee389cad5edab6"}, + {file = "watchfiles-0.21.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:4ea10a29aa5de67de02256a28d1bf53d21322295cb00bd2d57fcd19b850ebd99"}, + {file = "watchfiles-0.21.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:40bca549fdc929b470dd1dbfcb47b3295cb46a6d2c90e50588b0a1b3bd98f429"}, + {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9b37a7ba223b2f26122c148bb8d09a9ff312afca998c48c725ff5a0a632145f7"}, + {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec8c8900dc5c83650a63dd48c4d1d245343f904c4b64b48798c67a3767d7e165"}, + {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ad3fe0a3567c2f0f629d800409cd528cb6251da12e81a1f765e5c5345fd0137"}, + {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d353c4cfda586db2a176ce42c88f2fc31ec25e50212650c89fdd0f560ee507b"}, + {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:83a696da8922314ff2aec02987eefb03784f473281d740bf9170181829133765"}, + {file = "watchfiles-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a03651352fc20975ee2a707cd2d74a386cd303cc688f407296064ad1e6d1562"}, + {file = "watchfiles-0.21.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3ad692bc7792be8c32918c699638b660c0de078a6cbe464c46e1340dadb94c19"}, + {file = "watchfiles-0.21.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06247538e8253975bdb328e7683f8515ff5ff041f43be6c40bff62d989b7d0b0"}, + {file = "watchfiles-0.21.0-cp38-none-win32.whl", hash = "sha256:9a0aa47f94ea9a0b39dd30850b0adf2e1cd32a8b4f9c7aa443d852aacf9ca214"}, + {file = "watchfiles-0.21.0-cp38-none-win_amd64.whl", hash = "sha256:8d5f400326840934e3507701f9f7269247f7c026d1b6cfd49477d2be0933cfca"}, + {file = "watchfiles-0.21.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7f762a1a85a12cc3484f77eee7be87b10f8c50b0b787bb02f4e357403cad0c0e"}, + {file = "watchfiles-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6e9be3ef84e2bb9710f3f777accce25556f4a71e15d2b73223788d528fcc2052"}, + {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4c48a10d17571d1275701e14a601e36959ffada3add8cdbc9e5061a6e3579a5d"}, + {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c889025f59884423428c261f212e04d438de865beda0b1e1babab85ef4c0f01"}, + {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:66fac0c238ab9a2e72d026b5fb91cb902c146202bbd29a9a1a44e8db7b710b6f"}, + {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4a21f71885aa2744719459951819e7bf5a906a6448a6b2bbce8e9cc9f2c8128"}, + {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c9198c989f47898b2c22201756f73249de3748e0fc9de44adaf54a8b259cc0c"}, + {file = "watchfiles-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f57c4461cd24fda22493109c45b3980863c58a25b8bec885ca8bea6b8d4b28"}, + {file = "watchfiles-0.21.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:853853cbf7bf9408b404754b92512ebe3e3a83587503d766d23e6bf83d092ee6"}, + {file = "watchfiles-0.21.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d5b1dc0e708fad9f92c296ab2f948af403bf201db8fb2eb4c8179db143732e49"}, + {file = "watchfiles-0.21.0-cp39-none-win32.whl", hash = "sha256:59137c0c6826bd56c710d1d2bda81553b5e6b7c84d5a676747d80caf0409ad94"}, + {file = "watchfiles-0.21.0-cp39-none-win_amd64.whl", hash = "sha256:6cb8fdc044909e2078c248986f2fc76f911f72b51ea4a4fbbf472e01d14faa58"}, + {file = "watchfiles-0.21.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab03a90b305d2588e8352168e8c5a1520b721d2d367f31e9332c4235b30b8994"}, + {file = "watchfiles-0.21.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:927c589500f9f41e370b0125c12ac9e7d3a2fd166b89e9ee2828b3dda20bfe6f"}, + {file = "watchfiles-0.21.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd467213195e76f838caf2c28cd65e58302d0254e636e7c0fca81efa4a2e62c"}, + {file = "watchfiles-0.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02b73130687bc3f6bb79d8a170959042eb56eb3a42df3671c79b428cd73f17cc"}, + {file = "watchfiles-0.21.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:08dca260e85ffae975448e344834d765983237ad6dc308231aa16e7933db763e"}, + {file = "watchfiles-0.21.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:3ccceb50c611c433145502735e0370877cced72a6c70fd2410238bcbc7fe51d8"}, + {file = "watchfiles-0.21.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57d430f5fb63fea141ab71ca9c064e80de3a20b427ca2febcbfcef70ff0ce895"}, + {file = "watchfiles-0.21.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dd5fad9b9c0dd89904bbdea978ce89a2b692a7ee8a0ce19b940e538c88a809c"}, + {file = "watchfiles-0.21.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:be6dd5d52b73018b21adc1c5d28ac0c68184a64769052dfeb0c5d9998e7f56a2"}, + {file = "watchfiles-0.21.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b3cab0e06143768499384a8a5efb9c4dc53e19382952859e4802f294214f36ec"}, + {file = "watchfiles-0.21.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c6ed10c2497e5fedadf61e465b3ca12a19f96004c15dcffe4bd442ebadc2d85"}, + {file = "watchfiles-0.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43babacef21c519bc6631c5fce2a61eccdfc011b4bcb9047255e9620732c8097"}, + {file = "watchfiles-0.21.0.tar.gz", hash = "sha256:c76c635fabf542bb78524905718c39f736a98e5ab25b23ec6d4abede1a85a6a3"}, +] + +[package.dependencies] +anyio = ">=3.0.0" + +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + +[[package]] +name = "websocket-client" +version = "1.7.0" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"}, + {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "websockets" +version = "12.0" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, + {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, + {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, + {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, + {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, + {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, + {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, + {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, + {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, + {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, + {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, + {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, + {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, + {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, + {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, + {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, + {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, + {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, + {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, + {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, + {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, + {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, + {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, + {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, + {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, +] + +[[package]] +name = "werkzeug" +version = "3.0.2" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "werkzeug-3.0.2-py3-none-any.whl", hash = "sha256:3aac3f5da756f93030740bc235d3e09449efcf65f2f55e3602e1d851b8f48795"}, + {file = "werkzeug-3.0.2.tar.gz", hash = "sha256:e39b645a6ac92822588e7b39a692e7828724ceae0b0d702ef96701f90e70128d"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "win32-setctime" +version = "1.1.0" +description = "A small Python utility to set file creation time on Windows" +optional = false +python-versions = ">=3.5" +files = [ + {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, + {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, +] + +[package.extras] +dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[[package]] +name = "wsproto" +version = "1.2.0" +description = "WebSockets state-machine based protocol implementation" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, + {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, +] + +[package.dependencies] +h11 = ">=0.9.0,<1" + +[[package]] +name = "yarl" +version = "1.9.4" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[[package]] +name = "zipp" +version = "3.18.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "zope-event" +version = "5.0" +description = "Very basic event publishing system" +optional = false +python-versions = ">=3.7" +files = [ + {file = "zope.event-5.0-py3-none-any.whl", hash = "sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26"}, + {file = "zope.event-5.0.tar.gz", hash = "sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd"}, +] + +[package.dependencies] +setuptools = "*" + +[package.extras] +docs = ["Sphinx"] +test = ["zope.testrunner"] + +[[package]] +name = "zope-interface" +version = "6.2" +description = "Interfaces for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "zope.interface-6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:506f5410b36e5ba494136d9fa04c548eaf1a0d9c442b0b0e7a0944db7620e0ab"}, + {file = "zope.interface-6.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b386b8b9d2b6a5e1e4eadd4e62335571244cb9193b7328c2b6e38b64cfda4f0e"}, + {file = "zope.interface-6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb0b3f2cb606981c7432f690db23506b1db5899620ad274e29dbbbdd740e797"}, + {file = "zope.interface-6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de7916380abaef4bb4891740879b1afcba2045aee51799dfd6d6ca9bdc71f35f"}, + {file = "zope.interface-6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b240883fb43160574f8f738e6d09ddbdbf8fa3e8cea051603d9edfd947d9328"}, + {file = "zope.interface-6.2-cp310-cp310-win_amd64.whl", hash = "sha256:8af82afc5998e1f307d5e72712526dba07403c73a9e287d906a8aa2b1f2e33dd"}, + {file = "zope.interface-6.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4d45d2ba8195850e3e829f1f0016066a122bfa362cc9dc212527fc3d51369037"}, + {file = "zope.interface-6.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:76e0531d86523be7a46e15d379b0e975a9db84316617c0efe4af8338dc45b80c"}, + {file = "zope.interface-6.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59f7374769b326a217d0b2366f1c176a45a4ff21e8f7cebb3b4a3537077eff85"}, + {file = "zope.interface-6.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25e0af9663eeac6b61b231b43c52293c2cb7f0c232d914bdcbfd3e3bd5c182ad"}, + {file = "zope.interface-6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14e02a6fc1772b458ebb6be1c276528b362041217b9ca37e52ecea2cbdce9fac"}, + {file = "zope.interface-6.2-cp311-cp311-win_amd64.whl", hash = "sha256:02adbab560683c4eca3789cc0ac487dcc5f5a81cc48695ec247f00803cafe2fe"}, + {file = "zope.interface-6.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8f5d2c39f3283e461de3655e03faf10e4742bb87387113f787a7724f32db1e48"}, + {file = "zope.interface-6.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:75d2ec3d9b401df759b87bc9e19d1b24db73083147089b43ae748aefa63067ef"}, + {file = "zope.interface-6.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa994e8937e8ccc7e87395b7b35092818905cf27c651e3ff3e7f29729f5ce3ce"}, + {file = "zope.interface-6.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ede888382882f07b9e4cd942255921ffd9f2901684198b88e247c7eabd27a000"}, + {file = "zope.interface-6.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2606955a06c6852a6cff4abeca38346ed01e83f11e960caa9a821b3626a4467b"}, + {file = "zope.interface-6.2-cp312-cp312-win_amd64.whl", hash = "sha256:ac7c2046d907e3b4e2605a130d162b1b783c170292a11216479bb1deb7cadebe"}, + {file = "zope.interface-6.2-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:febceb04ee7dd2aef08c2ff3d6f8a07de3052fc90137c507b0ede3ea80c21440"}, + {file = "zope.interface-6.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fc711acc4a1c702ca931fdbf7bf7c86f2a27d564c85c4964772dadf0e3c52f5"}, + {file = "zope.interface-6.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:396f5c94654301819a7f3a702c5830f0ea7468d7b154d124ceac823e2419d000"}, + {file = "zope.interface-6.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd374927c00764fcd6fe1046bea243ebdf403fba97a937493ae4be2c8912c2b"}, + {file = "zope.interface-6.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a3046e8ab29b590d723821d0785598e0b2e32b636a0272a38409be43e3ae0550"}, + {file = "zope.interface-6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:de125151a53ecdb39df3cb3deb9951ed834dd6a110a9e795d985b10bb6db4532"}, + {file = "zope.interface-6.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f444de0565db46d26c9fa931ca14f497900a295bd5eba480fc3fad25af8c763e"}, + {file = "zope.interface-6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2fefad268ff5c5b314794e27e359e48aeb9c8bb2cbb5748a071757a56f6bb8f"}, + {file = "zope.interface-6.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97785604824981ec8c81850dd25c8071d5ce04717a34296eeac771231fbdd5cd"}, + {file = "zope.interface-6.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7b2bed4eea047a949296e618552d3fed00632dc1b795ee430289bdd0e3717f3"}, + {file = "zope.interface-6.2-cp38-cp38-win_amd64.whl", hash = "sha256:d54f66c511ea01b9ef1d1a57420a93fbb9d48a08ec239f7d9c581092033156d0"}, + {file = "zope.interface-6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5ee9789a20b0081dc469f65ff6c5007e67a940d5541419ca03ef20c6213dd099"}, + {file = "zope.interface-6.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af27b3fe5b6bf9cd01b8e1c5ddea0a0d0a1b8c37dc1c7452f1e90bf817539c6d"}, + {file = "zope.interface-6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bce517b85f5debe07b186fc7102b332676760f2e0c92b7185dd49c138734b70"}, + {file = "zope.interface-6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ae9793f114cee5c464cc0b821ae4d36e1eba961542c6086f391a61aee167b6f"}, + {file = "zope.interface-6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e87698e2fea5ca2f0a99dff0a64ce8110ea857b640de536c76d92aaa2a91ff3a"}, + {file = "zope.interface-6.2-cp39-cp39-win_amd64.whl", hash = "sha256:b66335bbdbb4c004c25ae01cc4a54fd199afbc1fd164233813c6d3c2293bb7e1"}, + {file = "zope.interface-6.2.tar.gz", hash = "sha256:3b6c62813c63c543a06394a636978b22dffa8c5410affc9331ce6cdb5bfa8565"}, +] + +[package.dependencies] +setuptools = "*" + +[package.extras] +docs = ["Sphinx", "repoze.sphinx.autointerface", "sphinx_rtd_theme"] +test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] +testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] + +[extras] +all = [] +deploy = [] +local = [] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.10,<3.12" +content-hash = "27adc9d6515d9e92ee01a6aae0c9a8162aa403456134ab25a8dd98909ecbe5f2" diff --git a/src/backend/base/pyproject.toml b/src/backend/base/pyproject.toml new file mode 100644 index 000000000..9b164e369 --- /dev/null +++ b/src/backend/base/pyproject.toml @@ -0,0 +1,120 @@ +[tool.poetry] +name = "langflow-base" +version = "0.0.18" +description = "A Python package with a built-in web application" +authors = ["Logspace "] +maintainers = [ + "Carlos Coelho ", + "Cristhian Zanforlin ", + "Gabriel Almeida ", + "Gustavo Schaedler ", + "Igor Carvalho ", + "Lucas Eduoli ", + "Otávio Anovazzi ", + "Rodrigo Nader ", +] +repository = "https://github.com/logspace-ai/langflow" +license = "MIT" +readme = "README.md" +keywords = ["nlp", "langchain", "openai", "gpt", "gui"] +packages = [{ include = "langflow" }, { include = "langflow/py.typed" }] +include = ["pyproject.toml", "README.md", "langflow/**/*"] +documentation = "https://docs.langflow.org" + + +[tool.poetry.scripts] +langflow-base = "langflow.__main__:main" + +[tool.poetry.dependencies] +python = ">=3.10,<3.12" +fastapi = "^0.109.0" +httpx = "*" +uvicorn = "^0.27.0" +gunicorn = "^21.2.0" +langchain = "~0.1.0" +sqlmodel = "^0.0.14" +loguru = "^0.7.1" +rich = "^13.7.0" +langchain-experimental = "*" +pydantic = "^2.5.0" +pydantic-settings = "^2.1.0" +websockets = "*" +typer = "^0.9.0" +cachetools = "^5.3.1" +platformdirs = "^4.2.0" +python-multipart = "^0.0.7" +orjson = "3.9.15" +alembic = "^1.13.0" +passlib = "^1.7.4" +bcrypt = "4.0.1" +pillow = "^10.2.0" +docstring-parser = "^0.15" +python-jose = "^3.3.0" +pandas = "2.2.0" +multiprocess = "^0.70.14" +duckdb = "^0.9.2" +python-socketio = "^5.11.0" +python-docx = "^1.1.0" +jq = { version = "^1.7.0", markers = "sys_platform != 'win32'" } +pypdf = "^4.1.0" +chromadb = "^0.4.24" +langchain-anthropic = "^0.1.4" +langchain-astradb = "^0.1.0" +nest-asyncio = "^1.6.0" +emoji = "^2.11.0" +cryptography = "^42.0.5" +langchain-openai = "^0.1.1" + + +[tool.poetry.group.dev.dependencies] +pytest-asyncio = "^0.21.1" +types-redis = "^4.6.0.5" +ipykernel = "^6.26.0" +mypy = "^1.7.1" +ruff = "^0.1.5" +httpx = "*" +pytest = "^7.4.2" +types-requests = "^2.31.0" +requests = "^2.31.0" +pytest-cov = "^4.1.0" +pandas-stubs = "^2.0.0.230412" +types-pillow = "^9.5.0.2" +types-pyyaml = "^6.0.12.8" +types-python-jose = "^3.3.4.8" +types-passlib = "^1.7.7.13" +locust = "^2.16.1" +pytest-mock = "^3.11.1" +pytest-xdist = "^3.3.1" +types-pywin32 = "^306.0.0.4" +types-google-cloud-ndb = "^2.2.0.0" +pytest-sugar = "^0.9.7" + + +[tool.poetry.extras] +deploy = ["celery", "redis", "flower"] +local = ["llama-cpp-python", "sentence-transformers", "ctransformers"] +all = ["deploy", "local"] + + +[tool.pytest.ini_options] +minversion = "6.0" +addopts = "-ra" +testpaths = ["tests", "integration"] +console_output_style = "progress" +filterwarnings = ["ignore::DeprecationWarning"] +log_cli = true +markers = ["async_test"] + +[tool.mypy] +namespace_packages = true +mypy_path = "langflow" +ignore_missing_imports = true + + +[tool.ruff] +exclude = ["src/backend/langflow/alembic/*"] +line-length = 120 + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/src/backend/langflow/__init__.py b/src/backend/langflow/__init__.py deleted file mode 100644 index 2b6dd4fb8..000000000 --- a/src/backend/langflow/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -from importlib import metadata - -from langflow.interface.custom.custom_component import CustomComponent - -# Deactivate cache manager for now -# from langflow.services.cache import cache_service -from langflow.processing.load import load_flow_from_json - -try: - __version__ = metadata.version(__package__) -except metadata.PackageNotFoundError: - # Case where package metadata is not available. - __version__ = "" -del metadata # optional, avoids polluting the results of dir(__package__) - -__all__ = ["load_flow_from_json", "CustomComponent"] diff --git a/src/backend/langflow/api/v1/base.py b/src/backend/langflow/api/v1/base.py deleted file mode 100644 index 01e4e9c79..000000000 --- a/src/backend/langflow/api/v1/base.py +++ /dev/null @@ -1,160 +0,0 @@ -from typing import Optional - -from langchain.prompts import PromptTemplate -from pydantic import BaseModel, field_validator, model_serializer - -from langflow.interface.utils import extract_input_variables_from_prompt -from langflow.template.frontend_node.base import FrontendNode - - -class CacheResponse(BaseModel): - data: dict - - -class Code(BaseModel): - code: str - - -class FrontendNodeRequest(FrontendNode): - template: dict # type: ignore - - @model_serializer(mode="wrap") - def serialize_model(self, handler): - # Override the default serialization method in FrontendNode - # because we don't need the name in the response (i.e. {name: {}}) - return handler(self) - - -class ValidatePromptRequest(BaseModel): - name: str - template: str - # optional for tweak call - frontend_node: Optional[FrontendNodeRequest] = None - - -# Build ValidationResponse class for {"imports": {"errors": []}, "function": {"errors": []}} -class CodeValidationResponse(BaseModel): - imports: dict - function: dict - - @field_validator("imports") - @classmethod - def validate_imports(cls, v): - return v or {"errors": []} - - @field_validator("function") - @classmethod - def validate_function(cls, v): - return v or {"errors": []} - - -class PromptValidationResponse(BaseModel): - input_variables: list - # object return for tweak call - frontend_node: Optional[FrontendNodeRequest] = None - - -INVALID_CHARACTERS = { - " ", - ",", - ".", - ":", - ";", - "!", - "?", - "/", - "\\", - "(", - ")", - "[", - "]", - "{", - "}", -} - -INVALID_NAMES = { - "input_variables", - "output_parser", - "partial_variables", - "template", - "template_format", - "validate_template", -} - - -def validate_prompt(template: str): - input_variables = extract_input_variables_from_prompt(template) - - # Check if there are invalid characters in the input_variables - input_variables = check_input_variables(input_variables) - if any(var in INVALID_NAMES for var in input_variables): - raise ValueError(f"Invalid input variables. None of the variables can be named {', '.join(input_variables)}. ") - - try: - PromptTemplate(template=template, input_variables=input_variables) - except Exception as exc: - raise ValueError(str(exc)) from exc - - return input_variables - - -def check_input_variables(input_variables: list): - invalid_chars = [] - fixed_variables = [] - wrong_variables = [] - empty_variables = [] - for variable in input_variables: - new_var = variable - - # if variable is empty, then we should add that to the wrong variables - if not variable: - empty_variables.append(variable) - continue - - # if variable starts with a number we should add that to the invalid chars - # and wrong variables - if variable[0].isdigit(): - invalid_chars.append(variable[0]) - new_var = new_var.replace(variable[0], "") - wrong_variables.append(variable) - else: - for char in INVALID_CHARACTERS: - if char in variable: - invalid_chars.append(char) - new_var = new_var.replace(char, "") - wrong_variables.append(variable) - fixed_variables.append(new_var) - # If any of the input_variables is not in the fixed_variables, then it means that - # there are invalid characters in the input_variables - - if any(var not in fixed_variables for var in input_variables): - error_message = build_error_message( - input_variables, - invalid_chars, - wrong_variables, - fixed_variables, - empty_variables, - ) - raise ValueError(error_message) - return input_variables - - -def build_error_message(input_variables, invalid_chars, wrong_variables, fixed_variables, empty_variables): - input_variables_str = ", ".join([f"'{var}'" for var in input_variables]) - error_string = f"Invalid input variables: {input_variables_str}. " - - if wrong_variables and invalid_chars: - # fix the wrong variables replacing invalid chars and find them in the fixed variables - error_string_vars = "You can fix them by replacing the invalid characters: " - wvars = wrong_variables.copy() - for i, wrong_var in enumerate(wvars): - for char in invalid_chars: - wrong_var = wrong_var.replace(char, "") - if wrong_var in fixed_variables: - error_string_vars += f"'{wrong_variables[i]}' -> '{wrong_var}'" - error_string += error_string_vars - elif empty_variables: - error_string += f" There are {len(empty_variables)} empty variable{'s' if len(empty_variables) > 1 else ''}." - elif len(set(fixed_variables)) != len(fixed_variables): - error_string += "There are duplicate variables." - return error_string diff --git a/src/backend/langflow/api/v1/chat.py b/src/backend/langflow/api/v1/chat.py deleted file mode 100644 index b909a0209..000000000 --- a/src/backend/langflow/api/v1/chat.py +++ /dev/null @@ -1,245 +0,0 @@ -import time - -from fastapi import ( - APIRouter, - Depends, - HTTPException, - WebSocket, - WebSocketException, - status, -) -from fastapi.responses import StreamingResponse -from loguru import logger -from sqlmodel import Session - -from langflow.api.utils import build_input_keys_response, format_elapsed_time -from langflow.api.v1.schemas import BuildStatus, BuiltResponse, InitResponse, StreamData -from langflow.graph.graph.base import Graph -from langflow.services.auth.utils import ( - get_current_active_user, - get_current_user_for_websocket, -) -from langflow.services.cache.service import BaseCacheService -from langflow.services.cache.utils import update_build_status -from langflow.services.chat.service import ChatService -from langflow.services.deps import get_cache_service, get_chat_service, get_session - -router = APIRouter(tags=["Chat"]) - - -@router.websocket("/chat/{client_id}") -async def chat( - client_id: str, - websocket: WebSocket, - db: Session = Depends(get_session), - chat_service: "ChatService" = Depends(get_chat_service), -): - """Websocket endpoint for chat.""" - try: - user = await get_current_user_for_websocket(websocket, db) - await websocket.accept() - if not user: - await websocket.close(code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized") - elif not user.is_active: - await websocket.close(code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized") - - if client_id in chat_service.cache_service: - await chat_service.handle_websocket(client_id, websocket) - else: - # We accept the connection but close it immediately - # if the flow is not built yet - message = "Please, build the flow before sending messages" - await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=message) - except WebSocketException as exc: - logger.error(f"Websocket exrror: {exc}") - await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=str(exc)) - except Exception as exc: - logger.error(f"Error in chat websocket: {exc}") - messsage = exc.detail if isinstance(exc, HTTPException) else str(exc) - if "Could not validate credentials" in str(exc): - await websocket.close(code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized") - else: - await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=messsage) - - -@router.post("/build/init/{flow_id}", response_model=InitResponse, status_code=201) -async def init_build( - graph_data: dict, - flow_id: str, - current_user=Depends(get_current_active_user), - chat_service: "ChatService" = Depends(get_chat_service), - cache_service: "BaseCacheService" = Depends(get_cache_service), -): - """Initialize the build by storing graph data and returning a unique session ID.""" - try: - if flow_id is None: - raise ValueError("No ID provided") - # Check if already building - if ( - flow_id in cache_service - and isinstance(cache_service[flow_id], dict) - and cache_service[flow_id].get("status") == BuildStatus.IN_PROGRESS - ): - return InitResponse(flowId=flow_id) - - # Delete from cache if already exists - if flow_id in chat_service.cache_service: - chat_service.cache_service.delete(flow_id) - logger.debug(f"Deleted flow {flow_id} from cache") - cache_service[flow_id] = { - "graph_data": graph_data, - "status": BuildStatus.STARTED, - "user_id": current_user.id, - } - - return InitResponse(flowId=flow_id) - except Exception as exc: - logger.error(f"Error initializing build: {exc}") - return HTTPException(status_code=500, detail=str(exc)) - - -@router.get("/build/{flow_id}/status", response_model=BuiltResponse) -async def build_status(flow_id: str, cache_service: "BaseCacheService" = Depends(get_cache_service)): - """Check the flow_id is in the cache_service.""" - try: - built = flow_id in cache_service and cache_service[flow_id]["status"] == BuildStatus.SUCCESS - - return BuiltResponse( - built=built, - ) - - except Exception as exc: - logger.error(f"Error checking build status: {exc}") - return HTTPException(status_code=500, detail=str(exc)) - - -@router.get("/build/stream/{flow_id}", response_class=StreamingResponse) -async def stream_build( - flow_id: str, - chat_service: "ChatService" = Depends(get_chat_service), - cache_service: "BaseCacheService" = Depends(get_cache_service), -): - """Stream the build process based on stored flow data.""" - - async def event_stream(flow_id): - final_response = {"end_of_stream": True} - artifacts = {} - flow_cache = cache_service[flow_id] - flow_cache = flow_cache if isinstance(flow_cache, dict) else {} - try: - if flow_id not in cache_service: - error_message = "Invalid session ID" - yield str(StreamData(event="error", data={"error": error_message})) - return - - if flow_cache.get("status") == BuildStatus.IN_PROGRESS: - error_message = "Already building" - yield str(StreamData(event="error", data={"error": error_message})) - return - - graph_data = flow_cache.get("graph_data") - - if not graph_data: - error_message = "No data provided" - yield str(StreamData(event="error", data={"error": error_message})) - return - - logger.debug("Building langchain object") - - # Some error could happen when building the graph - graph = Graph.from_payload(graph_data) - - number_of_nodes = len(graph.vertices) - update_build_status(cache_service, flow_id, BuildStatus.IN_PROGRESS) - time_elapsed = "" - try: - user_id = flow_cache["user_id"] - except KeyError: - logger.debug("No user_id found in cache_service") - user_id = None - for i, vertex in enumerate(graph.generator_build(), 1): - start_time = time.perf_counter() - try: - log_dict = { - "log": f"Building node {vertex.vertex_type}", - } - yield str(StreamData(event="log", data=log_dict)) - if vertex.is_task: - vertex = await try_running_celery_task(vertex, user_id) - else: - await vertex.build(user_id=user_id) - time_elapsed = format_elapsed_time(time.perf_counter() - start_time) - params = vertex._built_object_repr() - valid = True - - logger.debug(f"Building node {str(vertex.vertex_type)}") - logger.debug(f"Output: {params[:100]}{'...' if len(params) > 100 else ''}") - if vertex.artifacts: - # The artifacts will be prompt variables - # passed to build_input_keys_response - # to set the input_keys values - artifacts.update(vertex.artifacts) - except Exception as exc: - logger.exception(exc) - params = str(exc) - valid = False - time_elapsed = format_elapsed_time(time.perf_counter() - start_time) - update_build_status(cache_service, flow_id, BuildStatus.FAILURE) - - vertex_id = vertex.parent_node_id if vertex.parent_is_top_level else vertex.id - if vertex_id in graph.top_level_vertices: - response = { - "valid": valid, - "params": params, - "id": vertex_id, - "progress": round(i / number_of_nodes, 2), - "duration": time_elapsed, - } - - yield str(StreamData(event="message", data=response)) - - langchain_object = await graph.build() - # Now we need to check the input_keys to send them to the client - if hasattr(langchain_object, "input_keys"): - input_keys_response = build_input_keys_response(langchain_object, artifacts) - else: - input_keys_response = { - "input_keys": None, - "memory_keys": [], - "handle_keys": [], - } - yield str(StreamData(event="message", data=input_keys_response)) - chat_service.set_cache(flow_id, langchain_object) - # We need to reset the chat history - chat_service.chat_history.empty_history(flow_id) - update_build_status(cache_service, flow_id, BuildStatus.SUCCESS) - except Exception as exc: - logger.exception(exc) - logger.error("Error while building the flow: %s", exc) - - update_build_status(cache_service, flow_id, BuildStatus.FAILURE) - yield str(StreamData(event="error", data={"error": str(exc)})) - finally: - yield str(StreamData(event="message", data=final_response)) - - try: - return StreamingResponse(event_stream(flow_id), media_type="text/event-stream") - except Exception as exc: - logger.error(f"Error streaming build: {exc}") - raise HTTPException(status_code=500, detail=str(exc)) - - -async def try_running_celery_task(vertex, user_id): - # Try running the task in celery - # and set the task_id to the local vertex - # if it fails, run the task locally - try: - from langflow.worker import build_vertex - - task = build_vertex.delay(vertex) - vertex.task_id = task.id - except Exception as exc: - logger.debug(f"Error running task in celery: {exc}") - vertex.task_id = None - await vertex.build(user_id=user_id) - return vertex diff --git a/src/backend/langflow/api/v1/credential.py b/src/backend/langflow/api/v1/credential.py deleted file mode 100644 index 1026319c4..000000000 --- a/src/backend/langflow/api/v1/credential.py +++ /dev/null @@ -1,86 +0,0 @@ -from datetime import datetime -from uuid import UUID - -from fastapi import APIRouter, Depends, HTTPException -from langflow.services.auth import utils as auth_utils -from langflow.services.auth.utils import get_current_active_user -from langflow.services.database.models.credential import Credential, CredentialCreate, CredentialRead, CredentialUpdate -from langflow.services.database.models.user.model import User -from langflow.services.deps import get_session, get_settings_service -from sqlmodel import Session, select - -router = APIRouter(prefix="/credentials", tags=["Credentials"]) - - -@router.post("/", response_model=CredentialRead, status_code=201) -def create_credential( - *, - session: Session = Depends(get_session), - credential: CredentialCreate, - current_user: User = Depends(get_current_active_user), - settings_service=Depends(get_settings_service), -): - """Create a new credential.""" - try: - # check if credential name already exists - credential_exists = session.exec( - select(Credential).where(Credential.name == credential.name, Credential.user_id == current_user.id) - ).first() - if credential_exists: - raise HTTPException(status_code=400, detail="Credential name already exists") - - db_credential = Credential.model_validate(credential, from_attributes=True) - if not db_credential.value: - raise HTTPException(status_code=400, detail="Credential value cannot be empty") - encrypted = auth_utils.encrypt_api_key(db_credential.value, settings_service=settings_service) - db_credential.value = encrypted - db_credential.user_id = current_user.id - session.add(db_credential) - session.commit() - session.refresh(db_credential) - return db_credential - except Exception as e: - if isinstance(e, HTTPException): - raise e - raise HTTPException(status_code=500, detail=str(e)) from e - - -@router.get("/", response_model=list[CredentialRead], status_code=200) -def read_credentials( - *, - session: Session = Depends(get_session), - current_user: User = Depends(get_current_active_user), -): - """Read all credentials.""" - try: - credentials = session.exec(select(Credential).where(Credential.user_id == current_user.id)).all() - return credentials - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) from e - - -@router.patch("/{credential_id}", response_model=CredentialRead, status_code=200) -def update_credential( - *, - session: Session = Depends(get_session), - credential_id: UUID, - credential: CredentialUpdate, - current_user: User = Depends(get_current_active_user), -): - """Update a credential.""" - try: - db_credential = session.exec( - select(Credential).where(Credential.id == credential_id, Credential.user_id == current_user.id) - ).first() - if not db_credential: - raise HTTPException(status_code=404, detail="Credential not found") - - credential_data = credential.model_dump(exclude_unset=True) - for key, value in credential_data.items(): - setattr(db_credential, key, value) - db_credential.updated_at = datetime.utcnow() - session.commit() - session.refresh(db_credential) - return db_credential - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) from e diff --git a/src/backend/langflow/api/v1/endpoints.py b/src/backend/langflow/api/v1/endpoints.py deleted file mode 100644 index c8ef86906..000000000 --- a/src/backend/langflow/api/v1/endpoints.py +++ /dev/null @@ -1,394 +0,0 @@ -from http import HTTPStatus -from typing import Annotated, Any, List, Optional, Union - -import sqlalchemy as sa -from fastapi import APIRouter, Body, Depends, HTTPException, UploadFile, status -from langflow.api.utils import update_frontend_node_with_template_values -from langflow.api.v1.schemas import ( - CustomComponentCode, - PreloadResponse, - ProcessResponse, - TaskResponse, - TaskStatusResponse, - UploadFileResponse, -) -from langflow.interface.custom.custom_component import CustomComponent -from langflow.interface.custom.directory_reader import DirectoryReader -from langflow.interface.custom.utils import build_custom_component_template -from langflow.processing.process import build_graph_and_generate_result, process_graph_cached, process_tweaks -from langflow.services.auth.utils import api_key_security, get_current_active_user -from langflow.services.cache.utils import save_uploaded_file -from langflow.services.database.models.flow import Flow -from langflow.services.database.models.user.model import User -from langflow.services.deps import get_session, get_session_service, get_settings_service, get_task_service -from langflow.services.session.service import SessionService -from loguru import logger -from sqlmodel import select - -try: - from langflow.worker import process_graph_cached_task -except ImportError: - - def process_graph_cached_task(*args, **kwargs): - raise NotImplementedError("Celery is not installed") - - -from langflow.services.task.service import TaskService -from sqlmodel import Session - -# build router -router = APIRouter(tags=["Base"]) - - -async def process_graph_data( - graph_data: dict, - inputs: Optional[Union[List[dict], dict]] = None, - tweaks: Optional[dict] = None, - clear_cache: bool = False, - session_id: Optional[str] = None, - task_service: "TaskService" = Depends(get_task_service), - sync: bool = True, -): - task_result: Any = None - task_status = None - if tweaks: - try: - graph_data = process_tweaks(graph_data, tweaks) - except Exception as exc: - logger.error(f"Error processing tweaks: {exc}") - if sync: - result = await process_graph_cached( - graph_data, - inputs, - clear_cache, - session_id, - ) - task_id = str(id(result)) - if isinstance(result, dict) and "result" in result: - task_result = result["result"] - session_id = result["session_id"] - elif hasattr(result, "result") and hasattr(result, "session_id"): - task_result = result.result - - session_id = result.session_id - else: - task_result = result - else: - logger.warning( - "This is an experimental feature and may not work as expected." - "Please report any issues to our GitHub repository." - ) - if session_id is None: - # Generate a session ID - session_id = get_session_service().generate_key(session_id=session_id, data_graph=graph_data) - task_id, task = await task_service.launch_task( - process_graph_cached_task if task_service.use_celery else process_graph_cached, - graph_data, - inputs, - clear_cache, - session_id, - ) - task_status = task.status - if task.status == "FAILURE": - logger.error(f"Task {task_id} failed: {task.traceback}") - task_result = str(task._exception) - else: - task_result = task.result - - if task_id: - task_response = TaskResponse(id=task_id, href=f"api/v1/task/{task_id}") - else: - task_response = None - - return ProcessResponse( - result=task_result, - status=task_status, - task=task_response, - session_id=session_id, - backend=task_service.backend_name, - ) - - -@router.get("/all", dependencies=[Depends(get_current_active_user)]) -def get_all( - settings_service=Depends(get_settings_service), -): - from langflow.interface.types import get_all_types_dict - - logger.debug("Building langchain types dict") - try: - return get_all_types_dict(settings_service) - except Exception as exc: - raise HTTPException(status_code=500, detail=str(exc)) from exc - - -@router.post("/process/json", response_model=ProcessResponse) -async def process_json( - session: Annotated[Session, Depends(get_session)], - data: dict, - inputs: Optional[dict] = None, - tweaks: Optional[dict] = None, - clear_cache: Annotated[bool, Body(embed=True)] = False, # noqa: F821 - session_id: Annotated[Union[None, str], Body(embed=True)] = None, # noqa: F821 - task_service: "TaskService" = Depends(get_task_service), - sync: Annotated[bool, Body(embed=True)] = True, # noqa: F821 -): - try: - return await process_graph_data( - graph_data=data, - inputs=inputs, - tweaks=tweaks, - clear_cache=clear_cache, - session_id=session_id, - task_service=task_service, - sync=sync, - ) - except Exception as exc: - logger.exception(exc) - raise HTTPException(status_code=500, detail=str(exc)) from exc - - -# Endpoint to preload a graph -@router.post("/process/preload/{flow_id}", response_model=PreloadResponse) -async def preload_flow( - session: Annotated[Session, Depends(get_session)], - flow_id: str, - session_id: Optional[str] = None, - session_service: SessionService = Depends(get_session_service), - api_key_user: User = Depends(api_key_security), - clear_session: Annotated[bool, Body(embed=True)] = False, # noqa: F821 -): - try: - # Get the flow that matches the flow_id and belongs to the user - # flow = session.query(Flow).filter(Flow.id == flow_id).filter(Flow.user_id == api_key_user.id).first() - if clear_session: - session_service.clear_session(session_id) - # Check if the session exists - session_data = await session_service.load_session(session_id) - # Session data is a tuple of (graph, artifacts) - # or (None, None) if the session is empty - if isinstance(session_data, tuple): - graph, artifacts = session_data - is_clear = graph is None and artifacts is None - else: - is_clear = session_data is None - return PreloadResponse(session_id=session_id, is_clear=is_clear) - else: - if session_id is None: - session_id = flow_id - flow = session.exec(select(Flow).where(Flow.id == flow_id).where(Flow.user_id == api_key_user.id)).first() - if flow is None: - raise ValueError(f"Flow {flow_id} not found") - - if flow.data is None: - raise ValueError(f"Flow {flow_id} has no data") - graph_data = flow.data - session_service.clear_session(session_id) - # Load the graph using SessionService - session_data = await session_service.load_session(session_id, graph_data) - graph, artifacts = session_data if session_data else (None, None) - if not graph: - raise ValueError("Graph not found in the session") - _ = await graph.build() - session_service.update_session(session_id, (graph, artifacts)) - return PreloadResponse(session_id=session_id) - except Exception as exc: - logger.exception(exc) - raise HTTPException(status_code=500, detail=str(exc)) from exc - - -@router.post( - "/predict/{flow_id}", - response_model=ProcessResponse, - dependencies=[Depends(api_key_security)], -) -@router.post( - "/process/{flow_id}", - response_model=ProcessResponse, -) -async def process( - session: Annotated[Session, Depends(get_session)], - flow_id: str, - inputs: Optional[Union[List[dict], dict]] = None, - tweaks: Optional[dict] = None, - clear_cache: Annotated[bool, Body(embed=True)] = False, # noqa: F821 - session_id: Annotated[Union[None, str], Body(embed=True)] = None, # noqa: F821 - task_service: "TaskService" = Depends(get_task_service), - api_key_user: User = Depends(api_key_security), - sync: Annotated[bool, Body(embed=True)] = True, # noqa: F821 - session_service: SessionService = Depends(get_session_service), -): - """ - Endpoint to process an input with a given flow_id. - """ - - try: - if session_id: - session_data = await session_service.load_session(session_id) - graph, artifacts = session_data if session_data else (None, None) - task_result: Any = None - task_status = None - task_id = None - if not graph: - raise ValueError("Graph not found in the session") - result = await build_graph_and_generate_result( - graph=graph, - inputs=inputs, - artifacts=artifacts, - session_id=session_id, - session_service=session_service, - ) - task_id = str(id(result)) - if isinstance(result, dict) and "result" in result: - task_result = result["result"] - session_id = result["session_id"] - elif hasattr(result, "result") and hasattr(result, "session_id"): - task_result = result.result - - session_id = result.session_id - else: - task_result = result - if task_id: - task_response = TaskResponse(id=task_id, href=f"api/v1/task/{task_id}") - else: - task_response = None - return ProcessResponse( - result=task_result, - status=task_status, - task=task_response, - session_id=session_id, - backend=task_service.backend_name, - ) - - else: - if api_key_user is None: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Invalid API Key", - ) - - # Get the flow that matches the flow_id and belongs to the user - # flow = session.query(Flow).filter(Flow.id == flow_id).filter(Flow.user_id == api_key_user.id).first() - flow = session.exec(select(Flow).where(Flow.id == flow_id).where(Flow.user_id == api_key_user.id)).first() - if flow is None: - raise ValueError(f"Flow {flow_id} not found") - - if flow.data is None: - raise ValueError(f"Flow {flow_id} has no data") - graph_data = flow.data - return await process_graph_data( - graph_data=graph_data, - inputs=inputs, - tweaks=tweaks, - clear_cache=clear_cache, - session_id=session_id, - task_service=task_service, - sync=sync, - ) - except sa.exc.StatementError as exc: - # StatementError('(builtins.ValueError) badly formed hexadecimal UUID string') - if "badly formed hexadecimal UUID string" in str(exc): - # This means the Flow ID is not a valid UUID which means it can't find the flow - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc - except ValueError as exc: - if f"Flow {flow_id} not found" in str(exc): - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc - else: - raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc)) from exc - except Exception as e: - # Log stack trace - logger.exception(e) - raise HTTPException(status_code=500, detail=str(e)) from e - - -@router.get("/task/{task_id}", response_model=TaskStatusResponse) -async def get_task_status(task_id: str): - task_service = get_task_service() - task = task_service.get_task(task_id) - result = None - if task is None: - raise HTTPException(status_code=404, detail="Task not found") - if task.ready(): - result = task.result - # If result isinstance of Exception, can we get the traceback? - if isinstance(result, Exception): - logger.exception(task.traceback) - - if isinstance(result, dict) and "result" in result: - result = result["result"] - elif hasattr(result, "result"): - result = result.result - - if task.status == "FAILURE": - result = str(task.result) - logger.error(f"Task {task_id} failed: {task.traceback}") - - return TaskStatusResponse(status=task.status, result=result) - - -@router.post( - "/upload/{flow_id}", - response_model=UploadFileResponse, - status_code=HTTPStatus.CREATED, -) -async def create_upload_file(file: UploadFile, flow_id: str): - # Cache file - try: - file_path = save_uploaded_file(file, folder_name=flow_id) - - return UploadFileResponse( - flowId=flow_id, - file_path=file_path, - ) - except Exception as exc: - logger.error(f"Error saving file: {exc}") - raise HTTPException(status_code=500, detail=str(exc)) from exc - - -# get endpoint to return version of langflow -@router.get("/version") -def get_version(): - from langflow import __version__ - - return {"version": __version__} - - -@router.post("/custom_component", status_code=HTTPStatus.OK) -async def custom_component( - raw_code: CustomComponentCode, - user: User = Depends(get_current_active_user), -): - component = CustomComponent(code=raw_code.code) - - built_frontend_node = build_custom_component_template(component, user_id=user.id) - - built_frontend_node = update_frontend_node_with_template_values(built_frontend_node, raw_code.frontend_node) - return built_frontend_node - - -@router.post("/custom_component/reload", status_code=HTTPStatus.OK) -async def reload_custom_component(path: str, user: User = Depends(get_current_active_user)): - from langflow.interface.custom.utils import build_custom_component_template - - try: - reader = DirectoryReader("") - valid, content = reader.process_file(path) - if not valid: - raise ValueError(content) - - extractor = CustomComponent(code=content) - return build_custom_component_template(extractor, user_id=user.id) - except Exception as exc: - raise HTTPException(status_code=400, detail=str(exc)) - - -@router.post("/custom_component/update", status_code=HTTPStatus.OK) -async def custom_component_update( - raw_code: CustomComponentCode, - user: User = Depends(get_current_active_user), -): - component = CustomComponent(code=raw_code.code) - - component_node = build_custom_component_template(component, user_id=user.id, update_field=raw_code.field) - # Update the field - return component_node diff --git a/src/backend/langflow/api/v1/schemas.py b/src/backend/langflow/api/v1/schemas.py deleted file mode 100644 index b28cc8c2c..000000000 --- a/src/backend/langflow/api/v1/schemas.py +++ /dev/null @@ -1,214 +0,0 @@ -from enum import Enum -from pathlib import Path -from typing import Any, Dict, List, Optional, Union -from uuid import UUID - -from langflow.services.database.models.api_key.model import ApiKeyRead -from langflow.services.database.models.base import orjson_dumps -from langflow.services.database.models.flow import FlowCreate, FlowRead -from langflow.services.database.models.user import UserRead -from pydantic import BaseModel, Field, field_validator - - -class BuildStatus(Enum): - """Status of the build.""" - - SUCCESS = "success" - FAILURE = "failure" - STARTED = "started" - IN_PROGRESS = "in_progress" - - -class GraphData(BaseModel): - """Data inside the exported flow.""" - - nodes: List[Dict[str, Any]] - edges: List[Dict[str, Any]] - - -class ExportedFlow(BaseModel): - """Exported flow from Langflow.""" - - description: str - name: str - id: str - data: GraphData - - -class InputRequest(BaseModel): - input: dict - - -class TweaksRequest(BaseModel): - tweaks: Optional[Dict[str, Dict[str, str]]] = Field(default_factory=dict) - - -class UpdateTemplateRequest(BaseModel): - template: dict - - -class TaskResponse(BaseModel): - """Task response schema.""" - - id: Optional[str] = Field(None) - href: Optional[str] = Field(None) - - -class ProcessResponse(BaseModel): - """Process response schema.""" - - result: Any - status: Optional[str] = None - task: Optional[TaskResponse] = None - session_id: Optional[str] = None - backend: Optional[str] = None - - -class PreloadResponse(BaseModel): - """Preload response schema.""" - - session_id: Optional[str] = None - is_clear: Optional[bool] = None - - -# TaskStatusResponse( -# status=task.status, result=task.result if task.ready() else None -# ) -class TaskStatusResponse(BaseModel): - """Task status response schema.""" - - status: str - result: Optional[Any] = None - - -class ChatMessage(BaseModel): - """Chat message schema.""" - - is_bot: bool = False - message: Union[str, None, dict] = None - chatKey: Optional[str] = None - type: str = "human" - - -class ChatResponse(ChatMessage): - """Chat response schema.""" - - intermediate_steps: str - - type: str - is_bot: bool = True - files: list = [] - - @field_validator("type") - @classmethod - def validate_message_type(cls, v): - if v not in ["start", "stream", "end", "error", "info", "file"]: - raise ValueError("type must be start, stream, end, error, info, or file") - return v - - -class PromptResponse(ChatMessage): - """Prompt response schema.""" - - prompt: str - type: str = "prompt" - is_bot: bool = True - - -class FileResponse(ChatMessage): - """File response schema.""" - - data: Any = None - data_type: str - type: str = "file" - is_bot: bool = True - - @field_validator("data_type") - @classmethod - def validate_data_type(cls, v): - if v not in ["image", "csv"]: - raise ValueError("data_type must be image or csv") - return v - - -class FlowListCreate(BaseModel): - flows: List[FlowCreate] - - -class FlowListRead(BaseModel): - flows: List[FlowRead] - - -class InitResponse(BaseModel): - flowId: str - - -class BuiltResponse(BaseModel): - built: bool - - -class UploadFileResponse(BaseModel): - """Upload file response schema.""" - - flowId: str - file_path: Path - - -class StreamData(BaseModel): - event: str - data: dict - - def __str__(self) -> str: - return f"event: {self.event}\ndata: {orjson_dumps(self.data, indent_2=False)}\n\n" - - -class CustomComponentCode(BaseModel): - code: str - field: Optional[str] = None - frontend_node: Optional[dict] = None - - -class CustomComponentResponseError(BaseModel): - detail: str - traceback: str - - -class ComponentListCreate(BaseModel): - flows: List[FlowCreate] - - -class ComponentListRead(BaseModel): - flows: List[FlowRead] - - -class UsersResponse(BaseModel): - total_count: int - users: List[UserRead] - - -class ApiKeyResponse(BaseModel): - id: str - api_key: str - name: str - created_at: str - last_used_at: str - - -class ApiKeysResponse(BaseModel): - total_count: int - user_id: UUID - api_keys: List[ApiKeyRead] - - -class CreateApiKeyRequest(BaseModel): - name: str - - -class Token(BaseModel): - access_token: str - refresh_token: str - token_type: str - - -class ApiKeyCreateRequest(BaseModel): - api_key: str diff --git a/src/backend/langflow/api/v1/validate.py b/src/backend/langflow/api/v1/validate.py deleted file mode 100644 index 51d0f0d76..000000000 --- a/src/backend/langflow/api/v1/validate.py +++ /dev/null @@ -1,120 +0,0 @@ -from fastapi import APIRouter, HTTPException -from langflow.api.v1.base import ( - Code, - CodeValidationResponse, - PromptValidationResponse, - ValidatePromptRequest, - validate_prompt, -) -from langflow.template.field.base import TemplateField -from langflow.utils.validate import validate_code -from loguru import logger - -# build router -router = APIRouter(prefix="/validate", tags=["Validate"]) - - -@router.post("/code", status_code=200, response_model=CodeValidationResponse) -def post_validate_code(code: Code): - try: - errors = validate_code(code.code) - return CodeValidationResponse( - imports=errors.get("imports", {}), - function=errors.get("function", {}), - ) - except Exception as e: - return HTTPException(status_code=500, detail=str(e)) - - -@router.post("/prompt", status_code=200, response_model=PromptValidationResponse) -def post_validate_prompt(prompt_request: ValidatePromptRequest): - try: - input_variables = validate_prompt(prompt_request.template) - # Check if frontend_node is None before proceeding to avoid attempting to update a non-existent node. - if prompt_request.frontend_node is None: - return PromptValidationResponse( - input_variables=input_variables, - frontend_node=None, - ) - old_custom_fields = get_old_custom_fields(prompt_request) - - add_new_variables_to_template(input_variables, prompt_request) - - remove_old_variables_from_template(old_custom_fields, input_variables, prompt_request) - - update_input_variables_field(input_variables, prompt_request) - - return PromptValidationResponse( - input_variables=input_variables, - frontend_node=prompt_request.frontend_node, - ) - except Exception as e: - logger.exception(e) - raise HTTPException(status_code=500, detail=str(e)) from e - - -def get_old_custom_fields(prompt_request): - try: - if len(prompt_request.frontend_node.custom_fields) == 1 and prompt_request.name == "": - # If there is only one custom field and the name is empty string - # then we are dealing with the first prompt request after the node was created - prompt_request.name = list(prompt_request.frontend_node.custom_fields.keys())[0] - - old_custom_fields = prompt_request.frontend_node.custom_fields[prompt_request.name] - if old_custom_fields is None: - old_custom_fields = [] - - old_custom_fields = old_custom_fields.copy() - except KeyError: - old_custom_fields = [] - prompt_request.frontend_node.custom_fields[prompt_request.name] = [] - return old_custom_fields - - -def add_new_variables_to_template(input_variables, prompt_request): - for variable in input_variables: - try: - template_field = TemplateField( - name=variable, - display_name=variable, - field_type="str", - show=True, - advanced=False, - multiline=True, - input_types=["Document", "BaseOutputParser"], - value="", # Set the value to empty string - ) - if variable in prompt_request.frontend_node.template: - # Set the new field with the old value - template_field.value = prompt_request.frontend_node.template[variable]["value"] - - prompt_request.frontend_node.template[variable] = template_field.to_dict() - - # Check if variable is not already in the list before appending - if variable not in prompt_request.frontend_node.custom_fields[prompt_request.name]: - prompt_request.frontend_node.custom_fields[prompt_request.name].append(variable) - - except Exception as exc: - logger.exception(exc) - raise HTTPException(status_code=500, detail=str(exc)) from exc - - -def remove_old_variables_from_template(old_custom_fields, input_variables, prompt_request): - for variable in old_custom_fields: - if variable not in input_variables: - try: - # Remove the variable from custom_fields associated with the given name - if variable in prompt_request.frontend_node.custom_fields[prompt_request.name]: - prompt_request.frontend_node.custom_fields[prompt_request.name].remove(variable) - - # Remove the variable from the template - prompt_request.frontend_node.template.pop(variable, None) - - except Exception as exc: - logger.exception(exc) - raise HTTPException(status_code=500, detail=str(exc)) from exc - - -def update_input_variables_field(input_variables, prompt_request): - if "input_variables" in prompt_request.frontend_node.template: - prompt_request.frontend_node.template["input_variables"]["value"] = input_variables diff --git a/src/backend/langflow/components/agents/CSVAgent.py b/src/backend/langflow/components/agents/CSVAgent.py deleted file mode 100644 index b54e5d90d..000000000 --- a/src/backend/langflow/components/agents/CSVAgent.py +++ /dev/null @@ -1,23 +0,0 @@ -from langflow import CustomComponent -from langflow.field_typing import BaseLanguageModel, AgentExecutor -from langchain_experimental.agents.agent_toolkits.csv.base import create_csv_agent - - -class CSVAgentComponent(CustomComponent): - display_name = "CSVAgent" - description = "Construct a CSV agent from a CSV and tools." - documentation = "https://python.langchain.com/docs/modules/agents/toolkits/csv" - - def build_config(self): - return { - "llm": {"display_name": "LLM", "type": BaseLanguageModel}, - "path": {"display_name": "Path", "field_type": "file", "suffixes": [".csv"], "file_types": [".csv"]}, - } - - def build( - self, - llm: BaseLanguageModel, - path: str, - ) -> AgentExecutor: - # Instantiate and return the CSV agent class with the provided llm and path - return create_csv_agent(llm=llm, path=path) diff --git a/src/backend/langflow/components/chains/ConversationChain.py b/src/backend/langflow/components/chains/ConversationChain.py deleted file mode 100644 index 76e6d8e25..000000000 --- a/src/backend/langflow/components/chains/ConversationChain.py +++ /dev/null @@ -1,29 +0,0 @@ -from langflow import CustomComponent -from langchain.chains import ConversationChain -from typing import Optional, Union, Callable -from langflow.field_typing import BaseLanguageModel, BaseMemory, Chain - - -class ConversationChainComponent(CustomComponent): - display_name = "ConversationChain" - description = "Chain to have a conversation and load context from memory." - - def build_config(self): - return { - "prompt": {"display_name": "Prompt"}, - "llm": {"display_name": "LLM"}, - "memory": { - "display_name": "Memory", - "info": "Memory to load context from. If none is provided, a ConversationBufferMemory will be used.", - }, - "code": {"show": False}, - } - - def build( - self, - llm: BaseLanguageModel, - memory: Optional[BaseMemory] = None, - ) -> Union[Chain, Callable]: - if memory is None: - return ConversationChain(llm=llm) - return ConversationChain(llm=llm, memory=memory) diff --git a/src/backend/langflow/components/chains/LLMCheckerChain.py b/src/backend/langflow/components/chains/LLMCheckerChain.py deleted file mode 100644 index 527cafbb7..000000000 --- a/src/backend/langflow/components/chains/LLMCheckerChain.py +++ /dev/null @@ -1,22 +0,0 @@ -from typing import Callable, Union - -from langchain.chains import LLMCheckerChain -from langflow import CustomComponent -from langflow.field_typing import BaseLanguageModel, Chain - - -class LLMCheckerChainComponent(CustomComponent): - display_name = "LLMCheckerChain" - description = "" - documentation = "https://python.langchain.com/docs/modules/chains/additional/llm_checker" - - def build_config(self): - return { - "llm": {"display_name": "LLM"}, - } - - def build( - self, - llm: BaseLanguageModel, - ) -> Union[Chain, Callable]: - return LLMCheckerChain.from_llm(llm=llm) diff --git a/src/backend/langflow/components/chains/PromptRunner.py b/src/backend/langflow/components/chains/PromptRunner.py deleted file mode 100644 index 16a3ce429..000000000 --- a/src/backend/langflow/components/chains/PromptRunner.py +++ /dev/null @@ -1,28 +0,0 @@ -from langchain.llms.base import BaseLLM -from langchain.prompts import PromptTemplate -from langchain_core.documents import Document - -from langflow import CustomComponent - - -class PromptRunner(CustomComponent): - display_name: str = "Prompt Runner" - description: str = "Run a Chain with the given PromptTemplate" - beta: bool = True - field_config = { - "llm": {"display_name": "LLM"}, - "prompt": { - "display_name": "Prompt Template", - "info": "Make sure the prompt has all variables filled.", - }, - "code": {"show": False}, - } - - def build(self, llm: BaseLLM, prompt: PromptTemplate, inputs: dict = {}) -> Document: - chain = prompt | llm - # The input is an empty dict because the prompt is already filled - result = chain.invoke(input=inputs) - if hasattr(result, "content"): - result = result.content - self.repr_value = result - return Document(page_content=str(result)) diff --git a/src/backend/langflow/components/chains/RetrievalQA.py b/src/backend/langflow/components/chains/RetrievalQA.py deleted file mode 100644 index 5f1232443..000000000 --- a/src/backend/langflow/components/chains/RetrievalQA.py +++ /dev/null @@ -1,39 +0,0 @@ -from typing import Callable, Optional, Union - -from langchain.chains.combine_documents.base import BaseCombineDocumentsChain -from langchain.chains.retrieval_qa.base import BaseRetrievalQA, RetrievalQA -from langflow import CustomComponent -from langflow.field_typing import BaseMemory, BaseRetriever - - -class RetrievalQAComponent(CustomComponent): - display_name = "RetrievalQA" - description = "Chain for question-answering against an index." - - def build_config(self): - return { - "combine_documents_chain": {"display_name": "Combine Documents Chain"}, - "retriever": {"display_name": "Retriever"}, - "memory": {"display_name": "Memory", "required": False}, - "input_key": {"display_name": "Input Key", "advanced": True}, - "output_key": {"display_name": "Output Key", "advanced": True}, - "return_source_documents": {"display_name": "Return Source Documents"}, - } - - def build( - self, - combine_documents_chain: BaseCombineDocumentsChain, - retriever: BaseRetriever, - memory: Optional[BaseMemory] = None, - input_key: str = "query", - output_key: str = "result", - return_source_documents: bool = True, - ) -> Union[BaseRetrievalQA, Callable]: - return RetrievalQA( - combine_documents_chain=combine_documents_chain, - retriever=retriever, - memory=memory, - input_key=input_key, - output_key=output_key, - return_source_documents=return_source_documents, - ) diff --git a/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py b/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py deleted file mode 100644 index 3c46cd8bd..000000000 --- a/src/backend/langflow/components/chains/RetrievalQAWithSourcesChain.py +++ /dev/null @@ -1,42 +0,0 @@ -from typing import Optional - -from langchain.chains import RetrievalQAWithSourcesChain -from langchain.chains.qa_with_sources.base import BaseQAWithSourcesChain -from langchain.chains.combine_documents.base import BaseCombineDocumentsChain - -from langflow import CustomComponent -from langflow.field_typing import BaseLanguageModel, BaseMemory, BaseRetriever - - -class RetrievalQAWithSourcesChainComponent(CustomComponent): - display_name = "RetrievalQAWithSourcesChain" - description = "Question-answering with sources over an index." - - def build_config(self): - return { - "llm": {"display_name": "LLM"}, - "chain_type": { - "display_name": "Chain Type", - "options": ["stuff", "map_reduce", "map_rerank", "refine"], - }, - "memory": {"display_name": "Memory"}, - "return_source_documents": {"display_name": "Return Source Documents"}, - } - - def build( - self, - retriever: BaseRetriever, - llm: BaseLanguageModel, - combine_documents_chain: BaseCombineDocumentsChain, - chain_type: str, - memory: Optional[BaseMemory] = None, - return_source_documents: Optional[bool] = True, - ) -> BaseQAWithSourcesChain: - return RetrievalQAWithSourcesChain.from_chain_type( - llm=llm, - chain_type=chain_type, - combine_documents_chain=combine_documents_chain, - memory=memory, - return_source_documents=return_source_documents, - retriever=retriever, - ) diff --git a/src/backend/langflow/components/chains/SQLDatabaseChain.py b/src/backend/langflow/components/chains/SQLDatabaseChain.py deleted file mode 100644 index 56bd433ba..000000000 --- a/src/backend/langflow/components/chains/SQLDatabaseChain.py +++ /dev/null @@ -1,25 +0,0 @@ -from langflow import CustomComponent -from typing import Callable, Union -from langflow.field_typing import BasePromptTemplate, BaseLanguageModel, Chain -from langchain_community.utilities.sql_database import SQLDatabase -from langchain_experimental.sql.base import SQLDatabaseChain - - -class SQLDatabaseChainComponent(CustomComponent): - display_name = "SQLDatabaseChain" - description = "" - - def build_config(self): - return { - "db": {"display_name": "Database"}, - "llm": {"display_name": "LLM"}, - "prompt": {"display_name": "Prompt"}, - } - - def build( - self, - db: SQLDatabase, - llm: BaseLanguageModel, - prompt: BasePromptTemplate, - ) -> Union[Chain, Callable, SQLDatabaseChain]: - return SQLDatabaseChain.from_llm(llm=llm, db=db, prompt=prompt) diff --git a/src/backend/langflow/components/custom_components/CustomComponent.py b/src/backend/langflow/components/custom_components/CustomComponent.py deleted file mode 100644 index 533ccb727..000000000 --- a/src/backend/langflow/components/custom_components/CustomComponent.py +++ /dev/null @@ -1,12 +0,0 @@ -from langflow import CustomComponent -from langflow.field_typing import Data - - -class Component(CustomComponent): - documentation: str = "http://docs.langflow.org/components/custom" - - def build_config(self): - return {"param": {"display_name": "Parameter"}} - - def build(self, param: Data) -> Data: - return param diff --git a/src/backend/langflow/components/documentloaders/DirectoryLoader.py b/src/backend/langflow/components/documentloaders/DirectoryLoader.py deleted file mode 100644 index 5b0875126..000000000 --- a/src/backend/langflow/components/documentloaders/DirectoryLoader.py +++ /dev/null @@ -1,42 +0,0 @@ -from typing import Any, Dict, List - -from langchain.docstore.document import Document -from langchain.document_loaders.directory import DirectoryLoader -from langflow import CustomComponent - - -class DirectoryLoaderComponent(CustomComponent): - display_name = "DirectoryLoader" - description = "Load from a directory." - - def build_config(self) -> Dict[str, Any]: - return { - "glob": {"display_name": "Glob Pattern", "value": "**/*.txt"}, - "load_hidden": {"display_name": "Load Hidden Files", "value": False, "advanced": True}, - "max_concurrency": {"display_name": "Max Concurrency", "value": 10, "advanced": True}, - "metadata": {"display_name": "Metadata", "value": {}}, - "path": {"display_name": "Local Directory"}, - "recursive": {"display_name": "Recursive", "value": True, "advanced": True}, - "silent_errors": {"display_name": "Silent Errors", "value": False, "advanced": True}, - "use_multithreading": {"display_name": "Use Multithreading", "value": True, "advanced": True}, - } - - def build( - self, - glob: str, - path: str, - max_concurrency: int = 2, - load_hidden: bool = False, - recursive: bool = True, - silent_errors: bool = False, - use_multithreading: bool = True, - ) -> List[Document]: - return DirectoryLoader( - glob=glob, - path=path, - load_hidden=load_hidden, - max_concurrency=max_concurrency, - recursive=recursive, - silent_errors=silent_errors, - use_multithreading=use_multithreading, - ).load() diff --git a/src/backend/langflow/components/documentloaders/FileLoader.py b/src/backend/langflow/components/documentloaders/FileLoader.py deleted file mode 100644 index 07160557d..000000000 --- a/src/backend/langflow/components/documentloaders/FileLoader.py +++ /dev/null @@ -1,113 +0,0 @@ -from langchain_core.documents import Document -from langflow import CustomComponent -from langflow.utils.constants import LOADERS_INFO - - -class FileLoaderComponent(CustomComponent): - display_name: str = "File Loader" - description: str = "Generic File Loader" - beta = True - - def build_config(self): - loader_options = ["Automatic"] + [loader_info["name"] for loader_info in LOADERS_INFO] - - file_types = [] - suffixes = [] - - for loader_info in LOADERS_INFO: - if "allowedTypes" in loader_info: - file_types.extend(loader_info["allowedTypes"]) - suffixes.extend([f".{ext}" for ext in loader_info["allowedTypes"]]) - - return { - "file_path": { - "display_name": "File Path", - "required": True, - "field_type": "file", - "file_types": [ - "json", - "txt", - "csv", - "jsonl", - "html", - "htm", - "conllu", - "enex", - "msg", - "pdf", - "srt", - "eml", - "md", - "pptx", - "docx", - ], - "suffixes": [ - ".json", - ".txt", - ".csv", - ".jsonl", - ".html", - ".htm", - ".conllu", - ".enex", - ".msg", - ".pdf", - ".srt", - ".eml", - ".md", - ".pptx", - ".docx", - ], - # "file_types" : file_types, - # "suffixes": suffixes, - }, - "loader": { - "display_name": "Loader", - "is_list": True, - "required": True, - "options": loader_options, - "value": "Automatic", - }, - "code": {"show": False}, - } - - def build(self, file_path: str, loader: str) -> Document: - file_type = file_path.split(".")[-1] - - # Mapeie o nome do loader selecionado para suas informações - selected_loader_info = None - for loader_info in LOADERS_INFO: - if loader_info["name"] == loader: - selected_loader_info = loader_info - break - - if selected_loader_info is None and loader != "Automatic": - raise ValueError(f"Loader {loader} not found in the loader info list") - - if loader == "Automatic": - # Determine o loader automaticamente com base na extensão do arquivo - default_loader_info = None - for info in LOADERS_INFO: - if "defaultFor" in info and file_type in info["defaultFor"]: - default_loader_info = info - break - - if default_loader_info is None: - raise ValueError(f"No default loader found for file type: {file_type}") - - selected_loader_info = default_loader_info - if isinstance(selected_loader_info, dict): - loader_import: str = selected_loader_info["import"] - else: - raise ValueError(f"Loader info for {loader} is not a dict\nLoader info:\n{selected_loader_info}") - module_name, class_name = loader_import.rsplit(".", 1) - - try: - # Importe o loader dinamicamente - loader_module = __import__(module_name, fromlist=[class_name]) - loader_instance = getattr(loader_module, class_name) - except ImportError as e: - raise ValueError(f"Loader {loader} could not be imported\nLoader info:\n{selected_loader_info}") from e - - result = loader_instance(file_path=file_path) - return result.load() diff --git a/src/backend/langflow/components/documentloaders/UrlLoader.py b/src/backend/langflow/components/documentloaders/UrlLoader.py deleted file mode 100644 index eb60ac572..000000000 --- a/src/backend/langflow/components/documentloaders/UrlLoader.py +++ /dev/null @@ -1,47 +0,0 @@ -from typing import List - -from langchain import document_loaders -from langchain_core.documents import Document - -from langflow import CustomComponent - - -class UrlLoaderComponent(CustomComponent): - display_name: str = "Url Loader" - description: str = "Generic Url Loader Component" - - def build_config(self): - return { - "web_path": { - "display_name": "Url", - "required": True, - }, - "loader": { - "display_name": "Loader", - "is_list": True, - "required": True, - "options": [ - "AZLyricsLoader", - "CollegeConfidentialLoader", - "GitbookLoader", - "HNLoader", - "IFixitLoader", - "IMSDbLoader", - "WebBaseLoader", - ], - "value": "WebBaseLoader", - }, - "code": {"show": False}, - } - - def build(self, web_path: str, loader: str) -> List[Document]: - try: - loader_instance = getattr(document_loaders, loader)(web_path=web_path) - except Exception as e: - raise ValueError(f"No loader found for: {web_path}") from e - docs = loader_instance.load() - avg_length = sum(len(doc.page_content) for doc in docs if hasattr(doc, "page_content")) / len(docs) - self.status = f"""{len(docs)} documents) - \nAvg. Document Length (characters): {int(avg_length)} - Documents: {docs[:3]}...""" - return docs diff --git a/src/backend/langflow/components/llms/CTransformers.py b/src/backend/langflow/components/llms/CTransformers.py deleted file mode 100644 index a0668814e..000000000 --- a/src/backend/langflow/components/llms/CTransformers.py +++ /dev/null @@ -1,33 +0,0 @@ -from typing import Dict, Optional - -from langchain_community.llms.ctransformers import CTransformers - -from langflow import CustomComponent - - -class CTransformersComponent(CustomComponent): - display_name = "CTransformers" - description = "C Transformers LLM models" - documentation = "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/ctransformers" - - def build_config(self): - return { - "model": {"display_name": "Model", "required": True}, - "model_file": { - "display_name": "Model File", - "required": False, - "field_type": "file", - "file_types": [".bin"], - }, - "model_type": {"display_name": "Model Type", "required": True}, - "config": { - "display_name": "Config", - "advanced": True, - "required": False, - "field_type": "dict", - "value": '{"top_k":40,"top_p":0.95,"temperature":0.8,"repetition_penalty":1.1,"last_n_tokens":64,"seed":-1,"max_new_tokens":256,"stop":"","stream":"False","reset":"True","batch_size":8,"threads":-1,"context_length":-1,"gpu_layers":0}', - }, - } - - def build(self, model: str, model_file: str, model_type: str, config: Optional[Dict] = None) -> CTransformers: - return CTransformers(model=model, model_file=model_file, model_type=model_type, config=config) # type: ignore diff --git a/src/backend/langflow/components/llms/GoogleGenerativeAI.py b/src/backend/langflow/components/llms/GoogleGenerativeAI.py deleted file mode 100644 index 2fa9002bb..000000000 --- a/src/backend/langflow/components/llms/GoogleGenerativeAI.py +++ /dev/null @@ -1,72 +0,0 @@ -from typing import Optional - -from langchain_google_genai import ChatGoogleGenerativeAI # type: ignore -from langflow import CustomComponent -from langflow.field_typing import BaseLanguageModel, RangeSpec, TemplateField -from pydantic.v1.types import SecretStr - - -class GoogleGenerativeAIComponent(CustomComponent): - display_name: str = "Google Generative AI" - description: str = "A component that uses Google Generative AI to generate text." - documentation: str = "http://docs.langflow.org/components/custom" - - def build_config(self): - return { - "google_api_key": TemplateField( - display_name="Google API Key", - info="The Google API Key to use for the Google Generative AI.", - ), - "max_output_tokens": TemplateField( - display_name="Max Output Tokens", - info="The maximum number of tokens to generate.", - ), - "temperature": TemplateField( - display_name="Temperature", - info="Run inference with this temperature. Must by in the closed interval [0.0, 1.0].", - ), - "top_k": TemplateField( - display_name="Top K", - info="Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.", - range_spec=RangeSpec(min=0, max=2, step=0.1), - advanced=True, - ), - "top_p": TemplateField( - display_name="Top P", - info="The maximum cumulative probability of tokens to consider when sampling.", - advanced=True, - ), - "n": TemplateField( - display_name="N", - info="Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.", - advanced=True, - ), - "model": TemplateField( - display_name="Model", - info="The name of the model to use. Supported examples: gemini-pro", - options=["gemini-pro", "gemini-pro-vision"], - ), - "code": TemplateField( - advanced=True, - ), - } - - def build( - self, - google_api_key: str, - model: str, - max_output_tokens: Optional[int] = None, - temperature: float = 0.1, - top_k: Optional[int] = None, - top_p: Optional[float] = None, - n: Optional[int] = 1, - ) -> BaseLanguageModel: - return ChatGoogleGenerativeAI( - model=model, - max_output_tokens=max_output_tokens or None, # type: ignore - temperature=temperature, - top_k=top_k or None, - top_p=top_p or None, # type: ignore - n=n or 1, - google_api_key=SecretStr(google_api_key), - ) diff --git a/src/backend/langflow/components/llms/LlamaCpp.py b/src/backend/langflow/components/llms/LlamaCpp.py deleted file mode 100644 index 831665bfa..000000000 --- a/src/backend/langflow/components/llms/LlamaCpp.py +++ /dev/null @@ -1,129 +0,0 @@ -from typing import Optional, List, Dict, Any -from langflow import CustomComponent -from langchain_community.llms.llamacpp import LlamaCpp - - -class LlamaCppComponent(CustomComponent): - display_name = "LlamaCpp" - description = "llama.cpp model." - documentation = "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/llamacpp" - - def build_config(self): - return { - "grammar": {"display_name": "Grammar", "advanced": True}, - "cache": {"display_name": "Cache", "advanced": True}, - "client": {"display_name": "Client", "advanced": True}, - "echo": {"display_name": "Echo", "advanced": True}, - "f16_kv": {"display_name": "F16 KV", "advanced": True}, - "grammar_path": {"display_name": "Grammar Path", "advanced": True}, - "last_n_tokens_size": {"display_name": "Last N Tokens Size", "advanced": True}, - "logits_all": {"display_name": "Logits All", "advanced": True}, - "logprobs": {"display_name": "Logprobs", "advanced": True}, - "lora_base": {"display_name": "Lora Base", "advanced": True}, - "lora_path": {"display_name": "Lora Path", "advanced": True}, - "max_tokens": {"display_name": "Max Tokens", "advanced": True}, - "metadata": {"display_name": "Metadata", "advanced": True}, - "model_kwargs": {"display_name": "Model Kwargs", "advanced": True}, - "model_path": { - "display_name": "Model Path", - "field_type": "file", - "file_types": [".bin"], - "required": True, - }, - "n_batch": {"display_name": "N Batch", "advanced": True}, - "n_ctx": {"display_name": "N Ctx", "advanced": True}, - "n_gpu_layers": {"display_name": "N GPU Layers", "advanced": True}, - "n_parts": {"display_name": "N Parts", "advanced": True}, - "n_threads": {"display_name": "N Threads", "advanced": True}, - "repeat_penalty": {"display_name": "Repeat Penalty", "advanced": True}, - "rope_freq_base": {"display_name": "Rope Freq Base", "advanced": True}, - "rope_freq_scale": {"display_name": "Rope Freq Scale", "advanced": True}, - "seed": {"display_name": "Seed", "advanced": True}, - "stop": {"display_name": "Stop", "advanced": True}, - "streaming": {"display_name": "Streaming", "advanced": True}, - "suffix": {"display_name": "Suffix", "advanced": True}, - "tags": {"display_name": "Tags", "advanced": True}, - "temperature": {"display_name": "Temperature"}, - "top_k": {"display_name": "Top K", "advanced": True}, - "top_p": {"display_name": "Top P", "advanced": True}, - "use_mlock": {"display_name": "Use Mlock", "advanced": True}, - "use_mmap": {"display_name": "Use Mmap", "advanced": True}, - "verbose": {"display_name": "Verbose", "advanced": True}, - "vocab_only": {"display_name": "Vocab Only", "advanced": True}, - } - - def build( - self, - model_path: str, - grammar: Optional[str] = None, - cache: Optional[bool] = None, - client: Optional[Any] = None, - echo: Optional[bool] = False, - f16_kv: bool = True, - grammar_path: Optional[str] = None, - last_n_tokens_size: Optional[int] = 64, - logits_all: bool = False, - logprobs: Optional[int] = None, - lora_base: Optional[str] = None, - lora_path: Optional[str] = None, - max_tokens: Optional[int] = 256, - metadata: Optional[Dict] = None, - model_kwargs: Dict = {}, - n_batch: Optional[int] = 8, - n_ctx: int = 512, - n_gpu_layers: Optional[int] = 1, - n_parts: int = -1, - n_threads: Optional[int] = 1, - repeat_penalty: Optional[float] = 1.1, - rope_freq_base: float = 10000.0, - rope_freq_scale: float = 1.0, - seed: int = -1, - stop: Optional[List[str]] = [], - streaming: bool = True, - suffix: Optional[str] = "", - tags: Optional[List[str]] = [], - temperature: Optional[float] = 0.8, - top_k: Optional[int] = 40, - top_p: Optional[float] = 0.95, - use_mlock: bool = False, - use_mmap: Optional[bool] = True, - verbose: bool = True, - vocab_only: bool = False, - ) -> LlamaCpp: - return LlamaCpp( - model_path=model_path, - grammar=grammar, - cache=cache, - client=client, - echo=echo, - f16_kv=f16_kv, - grammar_path=grammar_path, - last_n_tokens_size=last_n_tokens_size, - logits_all=logits_all, - logprobs=logprobs, - lora_base=lora_base, - lora_path=lora_path, - max_tokens=max_tokens, - metadata=metadata, - model_kwargs=model_kwargs, - n_batch=n_batch, - n_ctx=n_ctx, - n_gpu_layers=n_gpu_layers, - n_parts=n_parts, - n_threads=n_threads, - repeat_penalty=repeat_penalty, - rope_freq_base=rope_freq_base, - rope_freq_scale=rope_freq_scale, - seed=seed, - stop=stop, - streaming=streaming, - suffix=suffix, - tags=tags, - temperature=temperature, - top_k=top_k, - top_p=top_p, - use_mlock=use_mlock, - use_mmap=use_mmap, - verbose=verbose, - vocab_only=vocab_only, - ) diff --git a/src/backend/langflow/components/utilities/GetRequest.py b/src/backend/langflow/components/utilities/GetRequest.py deleted file mode 100644 index b143a8e7e..000000000 --- a/src/backend/langflow/components/utilities/GetRequest.py +++ /dev/null @@ -1,74 +0,0 @@ -from typing import Optional - -import requests -from langchain_core.documents import Document -from langflow import CustomComponent -from langflow.services.database.models.base import orjson_dumps - - -class GetRequest(CustomComponent): - display_name: str = "GET Request" - description: str = "Make a GET request to the given URL." - output_types: list[str] = ["Document"] - documentation: str = "https://docs.langflow.org/components/utilities#get-request" - beta: bool = True - field_config = { - "url": { - "display_name": "URL", - "info": "The URL to make the request to", - "is_list": True, - }, - "headers": { - "display_name": "Headers", - "info": "The headers to send with the request.", - }, - "code": {"show": False}, - "timeout": { - "display_name": "Timeout", - "field_type": "int", - "info": "The timeout to use for the request.", - "value": 5, - }, - } - - def get_document(self, session: requests.Session, url: str, headers: Optional[dict], timeout: int) -> Document: - try: - response = session.get(url, headers=headers, timeout=int(timeout)) - try: - response_json = response.json() - result = orjson_dumps(response_json, indent_2=False) - except Exception: - result = response.text - self.repr_value = result - return Document( - page_content=result, - metadata={ - "source": url, - "headers": headers, - "status_code": response.status_code, - }, - ) - except requests.Timeout: - return Document( - page_content="Request Timed Out", - metadata={"source": url, "headers": headers, "status_code": 408}, - ) - except Exception as exc: - return Document( - page_content=str(exc), - metadata={"source": url, "headers": headers, "status_code": 500}, - ) - - def build( - self, - url: str, - headers: Optional[dict] = None, - timeout: int = 5, - ) -> list[Document]: - if headers is None: - headers = {} - urls = url if isinstance(url, list) else [url] - with requests.Session() as session: - documents = [self.get_document(session, u, headers, timeout) for u in urls] - self.repr_value = documents - return documents diff --git a/src/backend/langflow/components/utilities/PostRequest.py b/src/backend/langflow/components/utilities/PostRequest.py deleted file mode 100644 index baf734f54..000000000 --- a/src/backend/langflow/components/utilities/PostRequest.py +++ /dev/null @@ -1,77 +0,0 @@ -from typing import Optional - -import requests -from langchain_core.documents import Document -from langflow import CustomComponent -from langflow.services.database.models.base import orjson_dumps - - -class PostRequest(CustomComponent): - display_name: str = "POST Request" - description: str = "Make a POST request to the given URL." - output_types: list[str] = ["Document"] - documentation: str = "https://docs.langflow.org/components/utilities#post-request" - beta: bool = True - field_config = { - "url": {"display_name": "URL", "info": "The URL to make the request to."}, - "headers": { - "display_name": "Headers", - "info": "The headers to send with the request.", - }, - "code": {"show": False}, - "document": {"display_name": "Document"}, - } - - def post_document( - self, - session: requests.Session, - document: Document, - url: str, - headers: Optional[dict] = None, - ) -> Document: - try: - response = session.post(url, headers=headers, data=document.page_content) - try: - response_json = response.json() - result = orjson_dumps(response_json, indent_2=False) - except Exception: - result = response.text - self.repr_value = result - return Document( - page_content=result, - metadata={ - "source": url, - "headers": headers, - "status_code": response, - }, - ) - except Exception as exc: - return Document( - page_content=str(exc), - metadata={ - "source": url, - "headers": headers, - "status_code": 500, - }, - ) - - def build( - self, - document: Document, - url: str, - headers: Optional[dict] = None, - ) -> list[Document]: - if headers is None: - headers = {} - - if not isinstance(document, list) and isinstance(document, Document): - documents: list[Document] = [document] - elif isinstance(document, list) and all(isinstance(doc, Document) for doc in document): - documents = document - else: - raise ValueError("document must be a Document or a list of Documents") - - with requests.Session() as session: - documents = [self.post_document(session, doc, url, headers) for doc in documents] - self.repr_value = documents - return documents diff --git a/src/backend/langflow/components/utilities/UpdateRequest.py b/src/backend/langflow/components/utilities/UpdateRequest.py deleted file mode 100644 index bd40f642d..000000000 --- a/src/backend/langflow/components/utilities/UpdateRequest.py +++ /dev/null @@ -1,88 +0,0 @@ -from typing import List, Optional - -import requests -from langchain_core.documents import Document -from langflow import CustomComponent -from langflow.services.database.models.base import orjson_dumps - - -class UpdateRequest(CustomComponent): - display_name: str = "Update Request" - description: str = "Make a PATCH request to the given URL." - output_types: list[str] = ["Document"] - documentation: str = "https://docs.langflow.org/components/utilities#update-request" - beta: bool = True - field_config = { - "url": {"display_name": "URL", "info": "The URL to make the request to."}, - "headers": { - "display_name": "Headers", - "field_type": "NestedDict", - "info": "The headers to send with the request.", - }, - "code": {"show": False}, - "document": {"display_name": "Document"}, - "method": { - "display_name": "Method", - "field_type": "str", - "info": "The HTTP method to use.", - "options": ["PATCH", "PUT"], - "value": "PATCH", - }, - } - - def update_document( - self, - session: requests.Session, - document: Document, - url: str, - headers: Optional[dict] = None, - method: str = "PATCH", - ) -> Document: - try: - if method == "PATCH": - response = session.patch(url, headers=headers, data=document.page_content) - elif method == "PUT": - response = session.put(url, headers=headers, data=document.page_content) - else: - raise ValueError(f"Unsupported method: {method}") - try: - response_json = response.json() - result = orjson_dumps(response_json, indent_2=False) - except Exception: - result = response.text - self.repr_value = result - return Document( - page_content=result, - metadata={ - "source": url, - "headers": headers, - "status_code": response.status_code, - }, - ) - except Exception as exc: - return Document( - page_content=str(exc), - metadata={"source": url, "headers": headers, "status_code": 500}, - ) - - def build( - self, - method: str, - document: Document, - url: str, - headers: Optional[dict] = None, - ) -> List[Document]: - if headers is None: - headers = {} - - if not isinstance(document, list) and isinstance(document, Document): - documents: list[Document] = [document] - elif isinstance(document, list) and all(isinstance(doc, Document) for doc in document): - documents = document - else: - raise ValueError("document must be a Document or a list of Documents") - - with requests.Session() as session: - documents = [self.update_document(session, doc, url, headers, method) for doc in documents] - self.repr_value = documents - return documents diff --git a/src/backend/langflow/components/vectorstores/FAISS.py b/src/backend/langflow/components/vectorstores/FAISS.py deleted file mode 100644 index dec14f6db..000000000 --- a/src/backend/langflow/components/vectorstores/FAISS.py +++ /dev/null @@ -1,26 +0,0 @@ -from typing import List, Union - -from langchain.schema import BaseRetriever -from langchain_community.vectorstores import VectorStore -from langchain_community.vectorstores.faiss import FAISS -from langflow import CustomComponent -from langflow.field_typing import Document, Embeddings - - -class FAISSComponent(CustomComponent): - display_name = "FAISS" - description = "Construct FAISS wrapper from raw documents." - documentation = "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/faiss" - - def build_config(self): - return { - "documents": {"display_name": "Documents"}, - "embedding": {"display_name": "Embedding"}, - } - - def build( - self, - embedding: Embeddings, - documents: List[Document], - ) -> Union[VectorStore, FAISS, BaseRetriever]: - return FAISS.from_documents(documents=documents, embedding=embedding) diff --git a/src/backend/langflow/components/vectorstores/MongoDBAtlasVectorSearch.py b/src/backend/langflow/components/vectorstores/MongoDBAtlasVectorSearch.py deleted file mode 100644 index 13b62b708..000000000 --- a/src/backend/langflow/components/vectorstores/MongoDBAtlasVectorSearch.py +++ /dev/null @@ -1,48 +0,0 @@ -from typing import List, Optional - -from langchain_community.vectorstores.mongodb_atlas import MongoDBAtlasVectorSearch - -from langflow import CustomComponent -from langflow.field_typing import Document, Embeddings, NestedDict - - -class MongoDBAtlasComponent(CustomComponent): - display_name = "MongoDB Atlas" - description = "a `MongoDB Atlas Vector Search` vector store from raw documents." - - def build_config(self): - return { - "documents": {"display_name": "Documents", "is_list": True}, - "embedding": {"display_name": "Embedding"}, - "collection_name": {"display_name": "Collection Name"}, - "db_name": {"display_name": "Database Name"}, - "index_name": {"display_name": "Index Name"}, - "mongodb_atlas_cluster_uri": {"display_name": "MongoDB Atlas Cluster URI"}, - "search_kwargs": {"display_name": "Search Kwargs", "advanced": True}, - } - - def build( - self, - embedding: Embeddings, - documents: Optional[List[Document]] = None, - collection_name: str = "", - db_name: str = "", - index_name: str = "", - mongodb_atlas_cluster_uri: str = "", - search_kwargs: Optional[NestedDict] = None, - ) -> MongoDBAtlasVectorSearch: - search_kwargs = search_kwargs or {} - vector_store = MongoDBAtlasVectorSearch.from_connection_string( - connection_string=mongodb_atlas_cluster_uri, - namespace=f"{db_name}.{collection_name}", - embedding=embedding, - index_name=index_name, - ) - - if documents is not None: - if len(documents) == 0: - raise ValueError("If documents are provided, there must be at least one document.") - - vector_store.add_documents(documents) - - return vector_store diff --git a/src/backend/langflow/field_typing/range_spec.py b/src/backend/langflow/field_typing/range_spec.py deleted file mode 100644 index 036d21fa9..000000000 --- a/src/backend/langflow/field_typing/range_spec.py +++ /dev/null @@ -1,21 +0,0 @@ -from pydantic import BaseModel, field_validator - - -class RangeSpec(BaseModel): - min: float = -1.0 - max: float = 1.0 - step: float = 0.1 - - @field_validator("max") - @classmethod - def max_must_be_greater_than_min(cls, v, values, **kwargs): - if "min" in values.data and v <= values.data["min"]: - raise ValueError("max must be greater than min") - return v - - @field_validator("step") - @classmethod - def step_must_be_positive(cls, v): - if v <= 0: - raise ValueError("step must be positive") - return v diff --git a/src/backend/langflow/graph/graph/base.py b/src/backend/langflow/graph/graph/base.py deleted file mode 100644 index d1629d49b..000000000 --- a/src/backend/langflow/graph/graph/base.py +++ /dev/null @@ -1,255 +0,0 @@ -from typing import Dict, Generator, List, Type, Union - -from langchain.chains.base import Chain -from loguru import logger - -from langflow.graph.edge.base import Edge -from langflow.graph.graph.constants import lazy_load_vertex_dict -from langflow.graph.graph.utils import process_flow -from langflow.graph.vertex.base import Vertex -from langflow.graph.vertex.types import FileToolVertex, LLMVertex, ToolkitVertex -from langflow.interface.tools.constants import FILE_TOOLS -from langflow.utils import payload - - -class Graph: - """A class representing a graph of vertices and edges.""" - - def __init__( - self, - nodes: List[Dict], - edges: List[Dict[str, str]], - ) -> None: - self._vertices = nodes - self._edges = edges - self.raw_graph_data = {"nodes": nodes, "edges": edges} - - self.top_level_vertices = [] - for vertex in self._vertices: - if vertex_id := vertex.get("id"): - self.top_level_vertices.append(vertex_id) - self._graph_data = process_flow(self.raw_graph_data) - - self._vertices = self._graph_data["nodes"] - self._edges = self._graph_data["edges"] - self._build_graph() - - def __getstate__(self): - return self.raw_graph_data - - def __setstate__(self, state): - self.__init__(**state) - - @classmethod - def from_payload(cls, payload: Dict) -> "Graph": - """ - Creates a graph from a payload. - - Args: - payload (Dict): The payload to create the graph from.˜` - - Returns: - Graph: The created graph. - """ - if "data" in payload: - payload = payload["data"] - try: - vertices = payload["nodes"] - edges = payload["edges"] - return cls(vertices, edges) - except KeyError as exc: - logger.exception(exc) - raise ValueError( - f"Invalid payload. Expected keys 'nodes' and 'edges'. Found {list(payload.keys())}" - ) from exc - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Graph): - return False - return self.__repr__() == other.__repr__() - - def _build_graph(self) -> None: - """Builds the graph from the vertices and edges.""" - self.vertices = self._build_vertices() - self.vertex_map = {vertex.id: vertex for vertex in self.vertices} - self.edges = self._build_edges() - - # This is a hack to make sure that the LLM vertex is sent to - # the toolkit vertex - self._build_vertex_params() - # remove invalid vertices - self._validate_vertices() - - def _build_vertex_params(self) -> None: - """Identifies and handles the LLM vertex within the graph.""" - llm_vertex = None - for vertex in self.vertices: - vertex._build_params() - if isinstance(vertex, LLMVertex): - llm_vertex = vertex - - if llm_vertex: - for vertex in self.vertices: - if isinstance(vertex, ToolkitVertex): - vertex.params["llm"] = llm_vertex - - def _validate_vertices(self) -> None: - """Check that all vertices have edges""" - if len(self.vertices) == 1: - return - for vertex in self.vertices: - if not self._validate_vertex(vertex): - raise ValueError(f"{vertex.vertex_type} is not connected to any other components") - - def _validate_vertex(self, vertex: Vertex) -> bool: - """Validates a vertex.""" - # All vertices that do not have edges are invalid - return len(self.get_vertex_edges(vertex.id)) > 0 - - def get_vertex(self, vertex_id: str) -> Union[None, Vertex]: - """Returns a vertex by id.""" - return self.vertex_map.get(vertex_id) - - def get_vertex_edges(self, vertex_id: str) -> List[Edge]: - """Returns a list of edges for a given vertex.""" - return [edge for edge in self.edges if edge.source_id == vertex_id or edge.target_id == vertex_id] - - def get_vertices_with_target(self, vertex_id: str) -> List[Vertex]: - """Returns the vertices connected to a vertex.""" - vertices: List[Vertex] = [] - for edge in self.edges: - if edge.target_id == vertex_id: - vertex = self.get_vertex(edge.source_id) - if vertex is None: - continue - vertices.append(vertex) - return vertices - - async def build(self) -> Chain: - """Builds the graph.""" - # Get root vertex - root_vertex = payload.get_root_vertex(self) - if root_vertex is None: - raise ValueError("No root vertex found") - return await root_vertex.build() - - def topological_sort(self) -> List[Vertex]: - """ - Performs a topological sort of the vertices in the graph. - - Returns: - List[Vertex]: A list of vertices in topological order. - - Raises: - ValueError: If the graph contains a cycle. - """ - # States: 0 = unvisited, 1 = visiting, 2 = visited - state = {vertex: 0 for vertex in self.vertices} - sorted_vertices = [] - - def dfs(vertex): - if state[vertex] == 1: - # We have a cycle - raise ValueError("Graph contains a cycle, cannot perform topological sort") - if state[vertex] == 0: - state[vertex] = 1 - for edge in vertex.edges: - if edge.source_id == vertex.id: - dfs(self.get_vertex(edge.target_id)) - state[vertex] = 2 - sorted_vertices.append(vertex) - - # Visit each vertex - for vertex in self.vertices: - if state[vertex] == 0: - dfs(vertex) - - return list(reversed(sorted_vertices)) - - def generator_build(self) -> Generator[Vertex, None, None]: - """Builds each vertex in the graph and yields it.""" - sorted_vertices = self.topological_sort() - logger.debug("There are %s vertices in the graph", len(sorted_vertices)) - yield from sorted_vertices - - def get_vertex_neighbors(self, vertex: Vertex) -> Dict[Vertex, int]: - """Returns the neighbors of a vertex.""" - neighbors: Dict[Vertex, int] = {} - for edge in self.edges: - if edge.source_id == vertex.id: - neighbor = self.get_vertex(edge.target_id) - if neighbor is None: - continue - if neighbor not in neighbors: - neighbors[neighbor] = 0 - neighbors[neighbor] += 1 - elif edge.target_id == vertex.id: - neighbor = self.get_vertex(edge.source_id) - if neighbor is None: - continue - if neighbor not in neighbors: - neighbors[neighbor] = 0 - neighbors[neighbor] += 1 - return neighbors - - def _build_edges(self) -> List[Edge]: - """Builds the edges of the graph.""" - # Edge takes two vertices as arguments, so we need to build the vertices first - # and then build the edges - # if we can't find a vertex, we raise an error - - edges: List[Edge] = [] - for edge in self._edges: - source = self.get_vertex(edge["source"]) - target = self.get_vertex(edge["target"]) - if source is None: - raise ValueError(f"Source vertex {edge['source']} not found") - if target is None: - raise ValueError(f"Target vertex {edge['target']} not found") - edges.append(Edge(source, target, edge)) - return edges - - def _get_vertex_class(self, vertex_type: str, vertex_base_type: str) -> Type[Vertex]: - """Returns the vertex class based on the vertex type.""" - if vertex_type in FILE_TOOLS: - return FileToolVertex - if vertex_base_type == "CustomComponent": - return lazy_load_vertex_dict.get_custom_component_vertex_type() - - if vertex_base_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP: - return lazy_load_vertex_dict.VERTEX_TYPE_MAP[vertex_base_type] - return ( - lazy_load_vertex_dict.VERTEX_TYPE_MAP[vertex_type] - if vertex_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP - else Vertex - ) - - def _build_vertices(self) -> List[Vertex]: - """Builds the vertices of the graph.""" - vertices: List[Vertex] = [] - for vertex in self._vertices: - vertex_data = vertex["data"] - vertex_type: str = vertex_data["type"] # type: ignore - vertex_base_type: str = vertex_data["node"]["template"]["_type"] # type: ignore - - VertexClass = self._get_vertex_class(vertex_type, vertex_base_type) - vertex_instance = VertexClass(vertex, graph=self) - vertex_instance.set_top_level(self.top_level_vertices) - vertices.append(vertex_instance) - - return vertices - - def get_children_by_vertex_type(self, vertex: Vertex, vertex_type: str) -> List[Vertex]: - """Returns the children of a vertex based on the vertex type.""" - children = [] - vertex_types = [vertex.data["type"]] - if "node" in vertex.data: - vertex_types += vertex.data["node"]["base_classes"] - if vertex_type in vertex_types: - children.append(vertex) - return children - - def __repr__(self): - vertex_ids = [vertex.id for vertex in self.vertices] - edges_repr = "\n".join([f"{edge.source_id} --> {edge.target_id}" for edge in self.edges]) - return f"Graph:\nNodes: {vertex_ids}\nConnections:\n{edges_repr}" diff --git a/src/backend/langflow/graph/vertex/base.py b/src/backend/langflow/graph/vertex/base.py deleted file mode 100644 index bd38186e1..000000000 --- a/src/backend/langflow/graph/vertex/base.py +++ /dev/null @@ -1,378 +0,0 @@ -import ast -import inspect -import types -from typing import TYPE_CHECKING, Any, Coroutine, Dict, List, Optional - -from langflow.graph.utils import UnbuiltObject -from langflow.interface.initialize import loading -from langflow.interface.listing import lazy_load_dict -from langflow.utils.constants import DIRECT_TYPES -from langflow.utils.util import sync_to_async -from loguru import logger - -if TYPE_CHECKING: - from langflow.graph.edge.base import Edge - from langflow.graph.graph.base import Graph - - -class Vertex: - def __init__( - self, - data: Dict, - graph: "Graph", - base_type: Optional[str] = None, - is_task: bool = False, - params: Optional[Dict] = None, - ) -> None: - self.graph = graph - self.id: str = data["id"] - self._data = data - self.base_type: Optional[str] = base_type - self._parse_data() - self._built_object = UnbuiltObject() - self._built = False - self.artifacts: Dict[str, Any] = {} - self.task_id: Optional[str] = None - self.is_task = is_task - self.params = params or {} - self.parent_node_id: Optional[str] = self._data.get("parent_node_id") - self.parent_is_top_level = False - - @property - def edges(self) -> List["Edge"]: - return self.graph.get_vertex_edges(self.id) - - def __getstate__(self): - return { - "_data": self._data, - "params": {}, - "base_type": self.base_type, - "is_task": self.is_task, - "id": self.id, - "_built_object": UnbuiltObject(), - "_built": False, - "parent_node_id": self.parent_node_id, - "parent_is_top_level": self.parent_is_top_level, - } - - def __setstate__(self, state): - self._data = state["_data"] - self.params = state["params"] - self.base_type = state["base_type"] - self.is_task = state["is_task"] - self.id = state["id"] - self._parse_data() - if "_built_object" in state: - self._built_object = state["_built_object"] - self._built = state["_built"] - else: - self._built_object = UnbuiltObject() - self._built = False - self.artifacts: Dict[str, Any] = {} - self.task_id: Optional[str] = None - self.parent_node_id = state["parent_node_id"] - self.parent_is_top_level = state["parent_is_top_level"] - - def set_top_level(self, top_level_vertices: List[str]) -> None: - self.parent_is_top_level = self.parent_node_id in top_level_vertices - - def _parse_data(self) -> None: - self.data = self._data["data"] - self.output = self.data["node"]["base_classes"] - template_dicts = {key: value for key, value in self.data["node"]["template"].items() if isinstance(value, dict)} - - self.required_inputs = [ - template_dicts[key]["type"] for key, value in template_dicts.items() if value["required"] - ] - self.optional_inputs = [ - template_dicts[key]["type"] for key, value in template_dicts.items() if not value["required"] - ] - # Add the template_dicts[key]["input_types"] to the optional_inputs - self.optional_inputs.extend( - [input_type for value in template_dicts.values() for input_type in value.get("input_types", [])] - ) - - template_dict = self.data["node"]["template"] - self.vertex_type = ( - self.data["type"] - if "Tool" not in self.output or template_dict["_type"].islower() - else template_dict["_type"] - ) - - if self.base_type is None: - for base_type, value in lazy_load_dict.ALL_TYPES_DICT.items(): - if self.vertex_type in value: - self.base_type = base_type - break - - def get_task(self): - # using the task_id, get the task from celery - # and return it - from celery.result import AsyncResult # type: ignore - - return AsyncResult(self.task_id) - - def _build_params(self): - # sourcery skip: merge-list-append, remove-redundant-if - # Some params are required, some are optional - # but most importantly, some params are python base classes - # like str and others are LangChain objects like LLMChain, BasePromptTemplate - # so we need to be able to distinguish between the two - - # The dicts with "type" == "str" are the ones that are python base classes - # and most likely have a "value" key - - # So for each key besides "_type" in the template dict, we have a dict - # with a "type" key. If the type is not "str", then we need to get the - # edge that connects to that node and get the Node with the required data - # and use that as the value for the param - # If the type is "str", then we need to get the value of the "value" key - # and use that as the value for the param - - if self.graph is None: - raise ValueError("Graph not found") - - template_dict = {key: value for key, value in self.data["node"]["template"].items() if isinstance(value, dict)} - params = self.params.copy() if self.params else {} - - for edge in self.edges: - if not hasattr(edge, "target_param"): - continue - param_key = edge.target_param - - # If the param_key is in the template_dict and the edge.target_id is the current node - # We check this to make sure params with the same name but different target_id - # don't get overwritten - if param_key in template_dict and edge.target_id == self.id: - if template_dict[param_key]["list"]: - if param_key not in params: - params[param_key] = [] - params[param_key].append(self.graph.get_vertex(edge.source_id)) - elif edge.target_id == self.id: - params[param_key] = self.graph.get_vertex(edge.source_id) - - for key, value in template_dict.items(): - if key in params: - continue - # Skip _type and any value that has show == False and is not code - # If we don't want to show code but we want to use it - if key == "_type" or (not value.get("show") and key != "code"): - continue - # If the type is not transformable to a python base class - # then we need to get the edge that connects to this node - if value.get("type") == "file": - # Load the type in value.get('fileTypes') using - # what is inside value.get('content') - # value.get('value') is the file name - if file_path := value.get("file_path"): - params[key] = file_path - else: - raise ValueError(f"File path not found for {self.vertex_type}") - elif value.get("type") in DIRECT_TYPES and params.get(key) is None: - val = value.get("value") - if value.get("type") == "code": - try: - params[key] = ast.literal_eval(val) if val else None - except Exception as exc: - logger.debug(f"Error parsing code: {exc}") - params[key] = val - elif value.get("type") in ["dict", "NestedDict"]: - # When dict comes from the frontend it comes as a - # list of dicts, so we need to convert it to a dict - # before passing it to the build method - if isinstance(val, list): - params[key] = {k: v for item in value.get("value", []) for k, v in item.items()} - elif isinstance(val, dict): - params[key] = val - elif value.get("type") == "int" and val is not None: - try: - params[key] = int(val) - except ValueError: - params[key] = val - elif value.get("type") == "float" and val is not None: - try: - params[key] = float(val) - except ValueError: - params[key] = val - elif val is not None and val != "": - params[key] = val - - if not value.get("required") and params.get(key) is None: - if value.get("default"): - params[key] = value.get("default") - else: - params.pop(key, None) - # Add _type to params - self._raw_params = params - self.params = params - - async def _build(self, user_id=None): - """ - Initiate the build process. - """ - logger.debug(f"Building {self.vertex_type}") - await self._build_each_node_in_params_dict(user_id) - await self._get_and_instantiate_class(user_id) - self._validate_built_object() - - self._built = True - - async def _build_each_node_in_params_dict(self, user_id=None): - """ - Iterates over each node in the params dictionary and builds it. - """ - for key, value in self.params.copy().items(): - if self._is_node(value): - if value == self: - del self.params[key] - continue - await self._build_node_and_update_params(key, value, user_id) - elif isinstance(value, list) and self._is_list_of_nodes(value): - await self._build_list_of_nodes_and_update_params(key, value, user_id) - - def _is_node(self, value): - """ - Checks if the provided value is an instance of Vertex. - """ - return isinstance(value, Vertex) - - def _is_list_of_nodes(self, value): - """ - Checks if the provided value is a list of Vertex instances. - """ - return all(self._is_node(node) for node in value) - - async def get_result(self, user_id=None, timeout=None) -> Any: - # Check if the Vertex was built already - if self._built: - return self._built_object - - if self.is_task and self.task_id is not None: - task = self.get_task() - - result = task.get(timeout=timeout) - if isinstance(result, Coroutine): - result = await result - if result is not None: # If result is ready - self._update_built_object_and_artifacts(result) - return self._built_object - else: - # Handle the case when the result is not ready (retry, throw exception, etc.) - pass - - # If there's no task_id, build the vertex locally - await self.build(user_id=user_id) - return self._built_object - - async def _build_node_and_update_params(self, key, node, user_id=None): - """ - Builds a given node and updates the params dictionary accordingly. - """ - - result = await node.get_result(user_id) - self._handle_func(key, result) - if isinstance(result, list): - self._extend_params_list_with_result(key, result) - self.params[key] = result - - async def _build_list_of_nodes_and_update_params(self, key, nodes: List["Vertex"], user_id=None): - """ - Iterates over a list of nodes, builds each and updates the params dictionary. - """ - self.params[key] = [] - for node in nodes: - built = await node.get_result(user_id) - if isinstance(built, list): - if key not in self.params: - self.params[key] = [] - self.params[key].extend(built) - else: - self.params[key].append(built) - - def _handle_func(self, key, result): - """ - Handles 'func' key by checking if the result is a function and setting it as coroutine. - """ - if key == "func": - if not isinstance(result, types.FunctionType): - if hasattr(result, "run"): - result = result.run # type: ignore - elif hasattr(result, "get_function"): - result = result.get_function() # type: ignore - elif inspect.iscoroutinefunction(result): - self.params["coroutine"] = result - else: - self.params["coroutine"] = sync_to_async(result) - - def _extend_params_list_with_result(self, key, result): - """ - Extends a list in the params dictionary with the given result if it exists. - """ - if isinstance(self.params[key], list): - self.params[key].extend(result) - - async def _get_and_instantiate_class(self, user_id=None): - """ - Gets the class from a dictionary and instantiates it with the params. - """ - if self.base_type is None: - raise ValueError(f"Base type for node {self.vertex_type} not found") - try: - result = await loading.instantiate_class( - node_type=self.vertex_type, - base_type=self.base_type, - params=self.params, - user_id=user_id, - ) - self._update_built_object_and_artifacts(result) - except Exception as exc: - logger.exception(exc) - raise ValueError(f"Error building node {self.vertex_type}(ID:{self.id}): {str(exc)}") from exc - - def _update_built_object_and_artifacts(self, result): - """ - Updates the built object and its artifacts. - """ - if isinstance(result, tuple): - self._built_object, self.artifacts = result - else: - self._built_object = result - - def _validate_built_object(self): - """ - Checks if the built object is None and raises a ValueError if so. - """ - if isinstance(self._built_object, UnbuiltObject): - raise ValueError(f"{self.vertex_type}: {self._built_object_repr()}") - elif self._built_object is None: - message = f"{self.vertex_type} returned None." - if self.base_type == "custom_components": - message += " Make sure your build method returns a component." - - logger.warning(message) - - async def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any: - if not self._built or force: - await self._build(user_id, *args, **kwargs) - - return self._built_object - - def add_edge(self, edge: "Edge") -> None: - if edge not in self.edges: - self.edges.append(edge) - - def __repr__(self) -> str: - return f"Vertex(id={self.id}, data={self.data})" - - def __eq__(self, __o: object) -> bool: - try: - return self.id == __o.id if isinstance(__o, Vertex) else False - except AttributeError: - return False - - def __hash__(self) -> int: - return id(self) - - def _built_object_repr(self): - # Add a message with an emoji, stars for sucess, - return "Built sucessfully ✨" if self._built_object is not None else "Failed to build 😵‍💫" diff --git a/src/backend/langflow/graph/vertex/types.py b/src/backend/langflow/graph/vertex/types.py deleted file mode 100644 index 7439876fa..000000000 --- a/src/backend/langflow/graph/vertex/types.py +++ /dev/null @@ -1,302 +0,0 @@ -import ast -from typing import Any, Dict, List, Optional, Union - -from langflow.graph.utils import UnbuiltObject, flatten_list -from langflow.graph.vertex.base import Vertex -from langflow.interface.utils import extract_input_variables_from_prompt - - -class AgentVertex(Vertex): - def __init__(self, data: Dict, graph, params: Optional[Dict] = None): - super().__init__(data, graph=graph, base_type="agents", params=params) - - self.tools: List[Union[ToolkitVertex, ToolVertex]] = [] - self.chains: List[ChainVertex] = [] - - def __getstate__(self): - state = super().__getstate__() - state["tools"] = self.tools - state["chains"] = self.chains - return state - - def __setstate__(self, state): - self.tools = state["tools"] - self.chains = state["chains"] - super().__setstate__(state) - - def _set_tools_and_chains(self) -> None: - for edge in self.edges: - if not hasattr(edge, "source_id"): - continue - source_node = self.graph.get_vertex(edge.source_id) - if isinstance(source_node, (ToolVertex, ToolkitVertex)): - self.tools.append(source_node) - elif isinstance(source_node, ChainVertex): - self.chains.append(source_node) - - async def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any: - if not self._built or force: - self._set_tools_and_chains() - # First, build the tools - for tool_node in self.tools: - await tool_node.build(user_id=user_id) - - # Next, build the chains and the rest - for chain_node in self.chains: - await chain_node.build(tools=self.tools, user_id=user_id) - - await self._build(user_id=user_id) - - return self._built_object - - -class ToolVertex(Vertex): - def __init__( - self, - data: Dict, - graph, - params: Optional[Dict] = None, - ): - super().__init__(data, graph=graph, base_type="tools", params=params) - - -class LLMVertex(Vertex): - built_node_type = None - class_built_object = None - - def __init__(self, data: Dict, graph, params: Optional[Dict] = None): - super().__init__(data, graph=graph, base_type="llms", params=params) - - async def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any: - # LLM is different because some models might take up too much memory - # or time to load. So we only load them when we need them.ß - if self.vertex_type == self.built_node_type: - return self.class_built_object - if not self._built or force: - await self._build(user_id=user_id) - self.built_node_type = self.vertex_type - self.class_built_object = self._built_object - # Avoid deepcopying the LLM - # that are loaded from a file - return self._built_object - - -class ToolkitVertex(Vertex): - def __init__(self, data: Dict, graph, params=None): - super().__init__(data, graph=graph, base_type="toolkits", params=params) - - -class FileToolVertex(ToolVertex): - def __init__(self, data: Dict, graph, params=None): - super().__init__(data, graph=graph, params=params) - - -class WrapperVertex(Vertex): - def __init__(self, data: Dict, graph): - super().__init__(data, graph=graph, base_type="wrappers") - - async def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any: - if not self._built or force: - if "headers" in self.params: - self.params["headers"] = ast.literal_eval(self.params["headers"]) - await self._build(user_id=user_id) - return self._built_object - - -class DocumentLoaderVertex(Vertex): - def __init__(self, data: Dict, graph, params: Optional[Dict] = None): - super().__init__(data, graph=graph, base_type="documentloaders", params=params) - - def _built_object_repr(self): - # This built_object is a list of documents. Maybe we should - # show how many documents are in the list? - - if self._built_object and not isinstance(self._built_object, UnbuiltObject): - avg_length = sum(len(doc.page_content) for doc in self._built_object if hasattr(doc, "page_content")) / len( - self._built_object - ) - return f"""{self.vertex_type}({len(self._built_object)} documents) - \nAvg. Document Length (characters): {int(avg_length)} - Documents: {self._built_object[:3]}...""" - return f"{self.vertex_type}()" - - -class EmbeddingVertex(Vertex): - def __init__(self, data: Dict, graph, params: Optional[Dict] = None): - super().__init__(data, graph=graph, base_type="embeddings", params=params) - - -class VectorStoreVertex(Vertex): - def __init__(self, data: Dict, graph, params=None): - super().__init__(data, graph=graph, base_type="vectorstores") - - self.params = params or {} - - # VectorStores may contain databse connections - # so we need to define the __reduce__ method and the __setstate__ method - # to avoid pickling errors - - def remove_docs_and_texts_from_params(self): - # remove documents and texts from params - # so that we don't try to pickle a database connection - self.params.pop("documents", None) - self.params.pop("texts", None) - - # def __getstate__(self): - # # We want to save the params attribute - # # and if "documents" or "texts" are in the params - # # we want to remove them because they have already - # # been processed. - # params = self.params.copy() - # params.pop("documents", None) - # params.pop("texts", None) - - # return super().__getstate__() - - def __setstate__(self, state): - super().__setstate__(state) - self.remove_docs_and_texts_from_params() - - -class MemoryVertex(Vertex): - def __init__(self, data: Dict, graph): - super().__init__(data, graph=graph, base_type="memory") - - -class RetrieverVertex(Vertex): - def __init__(self, data: Dict, graph): - super().__init__(data, graph=graph, base_type="retrievers") - - -class TextSplitterVertex(Vertex): - def __init__(self, data: Dict, graph, params: Optional[Dict] = None): - super().__init__(data, graph=graph, base_type="textsplitters", params=params) - - def _built_object_repr(self): - # This built_object is a list of documents. Maybe we should - # show how many documents are in the list? - - if self._built_object and not isinstance(self._built_object, UnbuiltObject): - avg_length = sum(len(doc.page_content) for doc in self._built_object) / len(self._built_object) - return f"""{self.vertex_type}({len(self._built_object)} documents) - \nAvg. Document Length (characters): {int(avg_length)} - \nDocuments: {self._built_object[:3]}...""" - return f"{self.vertex_type}()" - - -class ChainVertex(Vertex): - def __init__(self, data: Dict, graph): - super().__init__(data, graph=graph, base_type="chains") - - async def build( - self, - force: bool = False, - user_id=None, - *args, - **kwargs, - ) -> Any: - if not self._built or force: - # Temporarily remove the code from the params - self.params.pop("code", None) - # Check if the chain requires a PromptVertex - - # Temporarily remove "code" from the params - self.params.pop("code", None) - - for key, value in self.params.items(): - if isinstance(value, PromptVertex): - # Build the PromptVertex, passing the tools if available - tools = kwargs.get("tools", None) - self.params[key] = await value.build(tools=tools, force=force) - - await self._build(user_id=user_id) - - return self._built_object - - -class PromptVertex(Vertex): - def __init__(self, data: Dict, graph): - super().__init__(data, graph=graph, base_type="prompts") - - async def build( - self, - force: bool = False, - user_id=None, - tools: Optional[List[Union[ToolkitVertex, ToolVertex]]] = None, - *args, - **kwargs, - ) -> Any: - if not self._built or force: - if "input_variables" not in self.params or self.params["input_variables"] is None: - self.params["input_variables"] = [] - # Check if it is a ZeroShotPrompt and needs a tool - if "ShotPrompt" in self.vertex_type: - tools = [await tool_node.build(user_id=user_id) for tool_node in tools] if tools is not None else [] - # flatten the list of tools if it is a list of lists - # first check if it is a list - if tools and isinstance(tools, list) and isinstance(tools[0], list): - tools = flatten_list(tools) - self.params["tools"] = tools - prompt_params = [ - key for key, value in self.params.items() if isinstance(value, str) and key != "format_instructions" - ] - else: - prompt_params = ["template"] - - if "prompt" not in self.params and "messages" not in self.params: - for param in prompt_params: - prompt_text = self.params[param] - variables = extract_input_variables_from_prompt(prompt_text) - self.params["input_variables"].extend(variables) - self.params["input_variables"] = list(set(self.params["input_variables"])) - elif isinstance(self.params, dict): - self.params.pop("input_variables", None) - - await self._build(user_id=user_id) - return self._built_object - - def _built_object_repr(self): - if not self.artifacts or self._built_object is None or not hasattr(self._built_object, "format"): - return super()._built_object_repr() - # We'll build the prompt with the artifacts - # to show the user what the prompt looks like - # with the variables filled in - artifacts = self.artifacts.copy() - # Remove the handle_keys from the artifacts - # so the prompt format doesn't break - artifacts.pop("handle_keys", None) - try: - if ( - not hasattr(self._built_object, "template") - and hasattr(self._built_object, "prompt") - and not isinstance(self._built_object, UnbuiltObject) - ): - template = self._built_object.prompt.template - elif not isinstance(self._built_object, UnbuiltObject) and hasattr(self._built_object, "template"): - template = self._built_object.template - for key, value in artifacts.items(): - if value: - replace_key = "{" + key + "}" - template = template.replace(replace_key, value) - return template if isinstance(template, str) else f"{self.vertex_type}({template})" - except KeyError: - return str(self._built_object) - - -class OutputParserVertex(Vertex): - def __init__(self, data: Dict, graph): - super().__init__(data, graph=graph, base_type="output_parsers") - - -class CustomComponentVertex(Vertex): - def __init__(self, data: Dict, graph): - super().__init__(data, graph=graph, base_type="custom_components", is_task=False) - - def _built_object_repr(self): - if self.task_id and self.is_task: - if task := self.get_task(): - return str(task.info) - else: - return f"Task {self.task_id} is not running" - if self.artifacts and "repr" in self.artifacts: - return self.artifacts["repr"] or super()._built_object_repr() diff --git a/src/backend/langflow/graph/vertex/utils.py b/src/backend/langflow/graph/vertex/utils.py deleted file mode 100644 index e1d439f4b..000000000 --- a/src/backend/langflow/graph/vertex/utils.py +++ /dev/null @@ -1,5 +0,0 @@ -from langflow.utils.constants import PYTHON_BASIC_TYPES - - -def is_basic_type(obj): - return type(obj) in PYTHON_BASIC_TYPES diff --git a/src/backend/langflow/interface/custom/custom_component/custom_component.py b/src/backend/langflow/interface/custom/custom_component/custom_component.py deleted file mode 100644 index 3cc52438e..000000000 --- a/src/backend/langflow/interface/custom/custom_component/custom_component.py +++ /dev/null @@ -1,238 +0,0 @@ -import operator -from typing import Any, Callable, ClassVar, List, Optional, Union -from uuid import UUID - -import yaml -from cachetools import TTLCache, cachedmethod -from fastapi import HTTPException -from langflow.interface.custom.code_parser.utils import ( - extract_inner_type_from_generic_alias, - extract_union_types_from_generic_alias, -) -from langflow.interface.custom.custom_component.component import Component -from langflow.services.database.models.flow import Flow -from langflow.services.database.utils import session_getter -from langflow.services.deps import get_credential_service, get_db_service -from langflow.utils import validate - - -class CustomComponent(Component): - display_name: Optional[str] = None - """The display name of the component. Defaults to None.""" - description: Optional[str] = None - """The description of the component. Defaults to None.""" - icon: Optional[str] = None - """The icon of the component. It should be an emoji. Defaults to None.""" - code: Optional[str] = None - """The code of the component. Defaults to None.""" - field_config: dict = {} - """The field configuration of the component. Defaults to an empty dictionary.""" - field_order: Optional[List[str]] = None - """The field order of the component. Defaults to an empty list.""" - code_class_base_inheritance: ClassVar[str] = "CustomComponent" - function_entrypoint_name: ClassVar[str] = "build" - function: Optional[Callable] = None - repr_value: Optional[Any] = "" - user_id: Optional[Union[UUID, str]] = None - status: Optional[Any] = None - """The status of the component. This is displayed on the frontend. Defaults to None.""" - _tree: Optional[dict] = None - - def __init__(self, **data): - self.cache = TTLCache(maxsize=1024, ttl=60) - super().__init__(**data) - - def _get_field_order(self): - return self.field_order or list(self.field_config.keys()) - - def custom_repr(self): - if self.repr_value == "": - self.repr_value = self.status - if isinstance(self.repr_value, dict): - return yaml.dump(self.repr_value) - if isinstance(self.repr_value, str): - return self.repr_value - return str(self.repr_value) - - def build_config(self): - return self.field_config - - @property - def tree(self): - return self.get_code_tree(self.code or "") - - @property - def get_function_entrypoint_args(self) -> list: - build_method = self.get_build_method() - if not build_method: - return [] - - args = build_method["args"] - for arg in args: - if arg.get("type") == "prompt": - raise HTTPException( - status_code=400, - detail={ - "error": "Type hint Error", - "traceback": ( - "Prompt type is not supported in the build method." " Try using PromptTemplate instead." - ), - }, - ) - elif not arg.get("type") and arg.get("name") != "self": - # Set the type to Data - arg["type"] = "Data" - return args - - @cachedmethod(operator.attrgetter("cache")) - def get_build_method(self): - if not self.code: - return {} - - component_classes = [cls for cls in self.tree["classes"] if self.code_class_base_inheritance in cls["bases"]] - if not component_classes: - return {} - - # Assume the first Component class is the one we're interested in - component_class = component_classes[0] - build_methods = [ - method for method in component_class["methods"] if method["name"] == self.function_entrypoint_name - ] - - return build_methods[0] if build_methods else {} - - @property - def get_function_entrypoint_return_type(self) -> List[Any]: - build_method = self.get_build_method() - if not build_method or not build_method.get("has_return"): - return [] - return_type = build_method["return_type"] - - # If list or List is in the return type, then we remove it and return the inner type - if hasattr(return_type, "__origin__") and return_type.__origin__ in [list, List]: - return_type = extract_inner_type_from_generic_alias(return_type) - - # If the return type is not a Union, then we just return it as a list - if not hasattr(return_type, "__origin__") or return_type.__origin__ != Union: - return return_type if isinstance(return_type, list) else [return_type] - # If the return type is a Union, then we need to parse itx - return_type = extract_union_types_from_generic_alias(return_type) - return return_type - - @property - def get_main_class_name(self): - if not self.code: - return "" - - base_name = self.code_class_base_inheritance - method_name = self.function_entrypoint_name - - classes = [] - for item in self.tree.get("classes", []): - if base_name in item["bases"]: - method_names = [method["name"] for method in item["methods"]] - if method_name in method_names: - classes.append(item["name"]) - - # Get just the first item - return next(iter(classes), "") - - @property - def template_config(self): - return self.build_template_config() - - def build_template_config(self): - if not self.code: - return {} - - attributes = [ - main_class["attributes"] - for main_class in self.tree.get("classes", []) - if main_class["name"] == self.get_main_class_name - ] - # Get just the first item - attributes = next(iter(attributes), []) - - return super().build_template_config(attributes) - - @property - def keys(self): - def get_credential(name: str): - if hasattr(self, "_user_id") and not self._user_id: - raise ValueError(f"User id is not set for {self.__class__.__name__}") - credential_service = get_credential_service() # Get service instance - # Retrieve and decrypt the credential by name for the current user - db_service = get_db_service() - with session_getter(db_service) as session: - return credential_service.get_credential(user_id=self._user_id or "", name=name, session=session) - - return get_credential - - def list_key_names(self): - if hasattr(self, "_user_id") and not self._user_id: - raise ValueError(f"User id is not set for {self.__class__.__name__}") - credential_service = get_credential_service() - db_service = get_db_service() - with session_getter(db_service) as session: - return credential_service.list_credentials(user_id=self._user_id, session=session) - - def index(self, value: int = 0): - """Returns a function that returns the value at the given index in the iterable.""" - - def get_index(iterable: List[Any]): - return iterable[value] if iterable else iterable - - return get_index - - @property - def get_function(self): - return validate.create_function(self.code, self.function_entrypoint_name) - - async def load_flow(self, flow_id: str, tweaks: Optional[dict] = None) -> Any: - from langflow.processing.process import build_sorted_vertices, process_tweaks - - db_service = get_db_service() - with session_getter(db_service) as session: - graph_data = flow.data if (flow := session.get(Flow, flow_id)) else None - if not graph_data: - raise ValueError(f"Flow {flow_id} not found") - if tweaks: - graph_data = process_tweaks(graph_data=graph_data, tweaks=tweaks) - return await build_sorted_vertices(graph_data, self.user_id) - - def list_flows(self, *, get_session: Optional[Callable] = None) -> List[Flow]: - if not self._user_id: - raise ValueError("Session is invalid") - try: - get_session = get_session or session_getter - db_service = get_db_service() - with get_session(db_service) as session: - flows = session.query(Flow).filter(Flow.user_id == self.user_id).all() - return flows - except Exception as e: - raise ValueError("Session is invalid") from e - - async def get_flow( - self, - *, - flow_name: Optional[str] = None, - flow_id: Optional[str] = None, - tweaks: Optional[dict] = None, - get_session: Optional[Callable] = None, - ) -> Flow: - get_session = get_session or session_getter - db_service = get_db_service() - with get_session(db_service) as session: - if flow_id: - flow = session.query(Flow).get(flow_id) - elif flow_name: - flow = (session.query(Flow).filter(Flow.name == flow_name).filter(Flow.user_id == self.user_id)).first() - else: - raise ValueError("Either flow_name or flow_id must be provided") - - if not flow: - raise ValueError(f"Flow {flow_name or flow_id} not found") - return await self.load_flow(flow.id, tweaks) - - def build(self, *args: Any, **kwargs: Any) -> Any: - raise NotImplementedError diff --git a/src/backend/langflow/processing/load.py b/src/backend/langflow/processing/load.py deleted file mode 100644 index d564d101f..000000000 --- a/src/backend/langflow/processing/load.py +++ /dev/null @@ -1,52 +0,0 @@ -import asyncio -import json -from pathlib import Path -from typing import Optional, Union - -from langflow.graph import Graph -from langflow.processing.process import fix_memory_inputs, process_tweaks - - -def load_flow_from_json(flow: Union[Path, str, dict], tweaks: Optional[dict] = None, build=True): - """ - Load flow from a JSON file or a JSON object. - - :param flow: JSON file path or JSON object - :param tweaks: Optional tweaks to be processed - :param build: If True, build the graph, otherwise return the graph object - :return: Langchain object or Graph object depending on the build parameter - """ - # If input is a file path, load JSON from the file - if isinstance(flow, (str, Path)): - with open(flow, "r", encoding="utf-8") as f: - flow_graph = json.load(f) - # If input is a dictionary, assume it's a JSON object - elif isinstance(flow, dict): - flow_graph = flow - else: - raise TypeError("Input must be either a file path (str) or a JSON object (dict)") - - graph_data = flow_graph["data"] - if tweaks is not None: - graph_data = process_tweaks(graph_data, tweaks) - nodes = graph_data["nodes"] - edges = graph_data["edges"] - graph = Graph(nodes, edges) - - if build: - langchain_object = asyncio.run(graph.build()) - - if hasattr(langchain_object, "verbose"): - langchain_object.verbose = True - - if hasattr(langchain_object, "return_intermediate_steps"): - # Deactivating until we have a frontend solution - # to display intermediate steps - langchain_object.return_intermediate_steps = False - - fix_memory_inputs(langchain_object) - return langchain_object - - return graph - - return graph diff --git a/src/backend/langflow/processing/process.py b/src/backend/langflow/processing/process.py deleted file mode 100644 index 394fb1fde..000000000 --- a/src/backend/langflow/processing/process.py +++ /dev/null @@ -1,299 +0,0 @@ -import asyncio -from typing import Any, Coroutine, Dict, List, Optional, Tuple, Union - -from langchain.agents import AgentExecutor -from langchain.chains.base import Chain -from langchain.schema import AgentAction, Document -from langchain_community.vectorstores import VectorStore -from langchain_core.messages import AIMessage -from langchain_core.runnables.base import Runnable -from loguru import logger -from pydantic import BaseModel - -from langflow.graph.graph.base import Graph -from langflow.interface.run import build_sorted_vertices, get_memory_key, update_memory_keys -from langflow.services.deps import get_session_service -from langflow.services.session.service import SessionService - - -def fix_memory_inputs(langchain_object): - """ - Given a LangChain object, this function checks if it has a memory attribute and if that memory key exists in the - object's input variables. If so, it does nothing. Otherwise, it gets a possible new memory key using the - get_memory_key function and updates the memory keys using the update_memory_keys function. - """ - if not hasattr(langchain_object, "memory") or langchain_object.memory is None: - return - try: - if ( - hasattr(langchain_object.memory, "memory_key") - and langchain_object.memory.memory_key in langchain_object.input_variables - ): - return - except AttributeError: - input_variables = ( - langchain_object.prompt.input_variables - if hasattr(langchain_object, "prompt") - else langchain_object.input_keys - ) - if langchain_object.memory.memory_key in input_variables: - return - - possible_new_mem_key = get_memory_key(langchain_object) - if possible_new_mem_key is not None: - update_memory_keys(langchain_object, possible_new_mem_key) - - -def format_actions(actions: List[Tuple[AgentAction, str]]) -> str: - """Format a list of (AgentAction, answer) tuples into a string.""" - output = [] - for action, answer in actions: - log = action.log - tool = action.tool - tool_input = action.tool_input - output.append(f"Log: {log}") - if "Action" not in log and "Action Input" not in log: - output.append(f"Tool: {tool}") - output.append(f"Tool Input: {tool_input}") - output.append(f"Answer: {answer}") - output.append("") # Add a blank line - return "\n".join(output) - - -def get_result_and_thought(langchain_object: Any, inputs: dict): - """Get result and thought from extracted json""" - try: - if hasattr(langchain_object, "verbose"): - langchain_object.verbose = True - - if hasattr(langchain_object, "return_intermediate_steps"): - langchain_object.return_intermediate_steps = False - - try: - if not isinstance(langchain_object, AgentExecutor): - fix_memory_inputs(langchain_object) - except Exception as exc: - logger.error(f"Error fixing memory inputs: {exc}") - - try: - output = langchain_object(inputs, return_only_outputs=True) - except ValueError as exc: - # make the error message more informative - logger.debug(f"Error: {str(exc)}") - output = langchain_object.run(inputs) - - except Exception as exc: - raise ValueError(f"Error: {str(exc)}") from exc - return output - - -def get_input_str_if_only_one_input(inputs: dict) -> Optional[str]: - """Get input string if only one input is provided""" - return list(inputs.values())[0] if len(inputs) == 1 else None - - -def get_build_result(data_graph, session_id): - # If session_id is provided, load the langchain_object from the session - # using build_sorted_vertices_with_caching.get_result_by_session_id - # if it returns something different than None, return it - # otherwise, build the graph and return the result - if session_id: - logger.debug(f"Loading LangChain object from session {session_id}") - result = build_sorted_vertices(data_graph=data_graph) - if result is not None: - logger.debug("Loaded LangChain object") - return result - - logger.debug("Building langchain object") - return build_sorted_vertices(data_graph) - - -def process_inputs( - inputs: Optional[Union[dict, List[dict]]] = None, artifacts: Optional[Dict[str, Any]] = None -) -> Union[dict, List[dict]]: - if inputs is None: - inputs = {} - if artifacts is None: - artifacts = {} - - if isinstance(inputs, dict): - inputs = update_inputs_dict(inputs, artifacts) - elif isinstance(inputs, List): - inputs = [update_inputs_dict(inp, artifacts) for inp in inputs] - - return inputs - - -def update_inputs_dict(inputs: dict, artifacts: Dict[str, Any]) -> dict: - for key, value in artifacts.items(): - if key == "repr": - continue - elif key not in inputs or not inputs[key]: - inputs[key] = value - - return inputs - - -async def process_runnable(runnable: Runnable, inputs: Union[dict, List[dict]]): - if isinstance(inputs, List) and hasattr(runnable, "abatch"): - result = await runnable.abatch(inputs) - elif isinstance(inputs, dict) and hasattr(runnable, "ainvoke"): - result = await runnable.ainvoke(inputs) - else: - raise ValueError(f"Runnable {runnable} does not support inputs of type {type(inputs)}") - # Check if the result is a list of AIMessages - if isinstance(result, list) and all(isinstance(r, AIMessage) for r in result): - result = [r.content for r in result] - elif isinstance(result, AIMessage): - result = result.content - return result - - -async def process_inputs_dict(built_object: Union[Chain, VectorStore, Runnable], inputs: dict): - if isinstance(built_object, Chain): - if inputs is None: - raise ValueError("Inputs must be provided for a Chain") - logger.debug("Generating result and thought") - result = get_result_and_thought(built_object, inputs) - - logger.debug("Generated result and thought") - elif isinstance(built_object, VectorStore) and "query" in inputs: - if isinstance(inputs, dict) and "search_type" not in inputs: - inputs["search_type"] = "similarity" - logger.info("search_type not provided, using default value: similarity") - result = built_object.search(**inputs) - elif isinstance(built_object, Document): - result = built_object.dict() - elif isinstance(built_object, Runnable): - result = await process_runnable(built_object, inputs) - if isinstance(result, list): - result = [r.content if hasattr(r, "content") else r for r in result] - elif hasattr(result, "content"): - result = result.content - else: - result = result - else: - result = None - - return result - - -async def process_inputs_list(built_object: Runnable, inputs: List[dict]): - return await process_runnable(built_object, inputs) - - -async def generate_result(built_object: Union[Chain, VectorStore, Runnable], inputs: Union[dict, List[dict]]): - if isinstance(inputs, dict): - result = await process_inputs_dict(built_object, inputs) - elif isinstance(inputs, List) and isinstance(built_object, Runnable): - result = await process_inputs_list(built_object, inputs) - else: - raise ValueError(f"Invalid inputs type: {type(inputs)}") - - if result is None: - logger.warning(f"Unknown built_object type: {type(built_object)}") - if isinstance(built_object, Coroutine): - result = asyncio.run(built_object) - result = built_object - - return result - - -class Result(BaseModel): - result: Any - session_id: str - - -async def process_graph_cached( - data_graph: Dict[str, Any], - inputs: Optional[Union[dict, List[dict]]] = None, - clear_cache=False, - session_id=None, -) -> Result: - session_service = get_session_service() - if clear_cache: - session_service.clear_session(session_id) - if session_id is None: - session_id = session_service.generate_key(session_id=session_id, data_graph=data_graph) - # Load the graph using SessionService - session = await session_service.load_session(session_id, data_graph) - graph, artifacts = session if session else (None, None) - if not graph: - raise ValueError("Graph not found in the session") - - result = await build_graph_and_generate_result( - graph=graph, session_id=session_id, inputs=inputs, artifacts=artifacts, session_service=session_service - ) - - return result - - -async def build_graph_and_generate_result( - graph: "Graph", - session_id: str, - inputs: Optional[Union[dict, List[dict]]] = None, - artifacts: Optional[Dict[str, Any]] = None, - session_service: Optional[SessionService] = None, -): - """Build the graph and generate the result""" - built_object = await graph.build() - processed_inputs = process_inputs(inputs, artifacts or {}) - result = await generate_result(built_object, processed_inputs) - # langchain_object is now updated with the new memory - # we need to update the cache with the updated langchain_object - if session_id and session_service: - session_service.update_session(session_id, (graph, artifacts)) - return Result(result=result, session_id=session_id) - - -def validate_input(graph_data: Dict[str, Any], tweaks: Dict[str, Dict[str, Any]]) -> List[Dict[str, Any]]: - if not isinstance(graph_data, dict) or not isinstance(tweaks, dict): - raise ValueError("graph_data and tweaks should be dictionaries") - - nodes = graph_data.get("data", {}).get("nodes") or graph_data.get("nodes") - - if not isinstance(nodes, list): - raise ValueError("graph_data should contain a list of nodes under 'data' key or directly under 'nodes' key") - - return nodes - - -def apply_tweaks(node: Dict[str, Any], node_tweaks: Dict[str, Any]) -> None: - template_data = node.get("data", {}).get("node", {}).get("template") - - if not isinstance(template_data, dict): - logger.warning(f"Template data for node {node.get('id')} should be a dictionary") - return - - for tweak_name, tweak_value in node_tweaks.items(): - if tweak_name and tweak_value and tweak_name in template_data: - key = tweak_name if tweak_name == "file_path" else "value" - template_data[tweak_name][key] = tweak_value - - -def process_tweaks(graph_data: Dict[str, Any], tweaks: Dict[str, Dict[str, Any]]) -> Dict[str, Any]: - """ - This function is used to tweak the graph data using the node id and the tweaks dict. - - :param graph_data: The dictionary containing the graph data. It must contain a 'data' key with - 'nodes' as its child or directly contain 'nodes' key. Each node should have an 'id' and 'data'. - :param tweaks: A dictionary where the key is the node id and the value is a dictionary of the tweaks. - The inner dictionary contains the name of a certain parameter as the key and the value to be tweaked. - - :return: The modified graph_data dictionary. - - :raises ValueError: If the input is not in the expected format. - """ - nodes = validate_input(graph_data, tweaks) - - for node in nodes: - if isinstance(node, dict) and isinstance(node.get("id"), str): - node_id = node["id"] - if node_tweaks := tweaks.get(node_id): - apply_tweaks(node, node_tweaks) - else: - logger.warning("Each node should be a dictionary with an 'id' key of type str") - - return graph_data - return graph_data - return graph_data diff --git a/src/backend/langflow/services/base.py b/src/backend/langflow/services/base.py deleted file mode 100644 index 301771944..000000000 --- a/src/backend/langflow/services/base.py +++ /dev/null @@ -1,12 +0,0 @@ -from abc import ABC - - -class Service(ABC): - name: str - ready: bool = False - - def teardown(self): - pass - - def set_ready(self): - self.ready = True diff --git a/src/backend/langflow/services/cache/__init__.py b/src/backend/langflow/services/cache/__init__.py deleted file mode 100644 index bf3a7c5ee..000000000 --- a/src/backend/langflow/services/cache/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from . import factory, service -from langflow.services.cache.service import InMemoryCache - - -__all__ = [ - "factory", - "service", - "InMemoryCache", -] diff --git a/src/backend/langflow/services/cache/base.py b/src/backend/langflow/services/cache/base.py deleted file mode 100644 index 3b34e12f6..000000000 --- a/src/backend/langflow/services/cache/base.py +++ /dev/null @@ -1,98 +0,0 @@ -import abc - -from langflow.services.base import Service - - -class BaseCacheService(Service): - """ - Abstract base class for a cache. - """ - - name = "cache_service" - - @abc.abstractmethod - def get(self, key): - """ - Retrieve an item from the cache. - - Args: - key: The key of the item to retrieve. - - Returns: - The value associated with the key, or None if the key is not found. - """ - - @abc.abstractmethod - def set(self, key, value): - """ - Add an item to the cache. - - Args: - key: The key of the item. - value: The value to cache. - """ - - @abc.abstractmethod - def upsert(self, key, value): - """ - Add an item to the cache if it doesn't exist, or update it if it does. - - Args: - key: The key of the item. - value: The value to cache. - """ - - @abc.abstractmethod - def delete(self, key): - """ - Remove an item from the cache. - - Args: - key: The key of the item to remove. - """ - - @abc.abstractmethod - def clear(self): - """ - Clear all items from the cache. - """ - - @abc.abstractmethod - def __contains__(self, key): - """ - Check if the key is in the cache. - - Args: - key: The key of the item to check. - - Returns: - True if the key is in the cache, False otherwise. - """ - - @abc.abstractmethod - def __getitem__(self, key): - """ - Retrieve an item from the cache using the square bracket notation. - - Args: - key: The key of the item to retrieve. - """ - - @abc.abstractmethod - def __setitem__(self, key, value): - """ - Add an item to the cache using the square bracket notation. - - Args: - key: The key of the item. - value: The value to cache. - """ - - @abc.abstractmethod - def __delitem__(self, key): - """ - Remove an item from the cache using the square bracket notation. - - Args: - key: The key of the item to remove. - """ diff --git a/src/backend/langflow/services/chat/service.py b/src/backend/langflow/services/chat/service.py deleted file mode 100644 index 91fe98845..000000000 --- a/src/backend/langflow/services/chat/service.py +++ /dev/null @@ -1,260 +0,0 @@ -import asyncio -import uuid -from collections import defaultdict -from typing import Any, Dict, List - -import orjson -from fastapi import WebSocket, status -from loguru import logger -from starlette.websockets import WebSocketState - -from langflow.api.v1.schemas import ChatMessage, ChatResponse, FileResponse -from langflow.interface.utils import pil_to_base64 -from langflow.services import ServiceType, service_manager -from langflow.services.base import Service -from langflow.services.chat.cache import Subject -from langflow.services.chat.utils import process_graph - -from .cache import cache_service - - -class ChatHistory(Subject): - def __init__(self): - super().__init__() - self.history: Dict[str, List[ChatMessage]] = defaultdict(list) - - def add_message(self, client_id: str, message: ChatMessage): - """Add a message to the chat history.""" - - self.history[client_id].append(message) - - if not isinstance(message, FileResponse): - self.notify() - - def get_history(self, client_id: str, filter_messages=True) -> List[ChatMessage]: - """Get the chat history for a client.""" - if history := self.history.get(client_id, []): - if filter_messages: - return [msg for msg in history if msg.type not in ["start", "stream"]] - return history - else: - return [] - - def empty_history(self, client_id: str): - """Empty the chat history for a client.""" - self.history[client_id] = [] - - -class ChatService(Service): - name = "chat_service" - - def __init__(self): - self.active_connections: Dict[str, WebSocket] = {} - self.connection_ids: Dict[str, str] = {} - self.chat_history = ChatHistory() - self.chat_cache = cache_service - self.chat_cache.attach(self.update) - self.cache_service = service_manager.get(ServiceType.CACHE_SERVICE) - - def on_chat_history_update(self): - """Send the last chat message to the client.""" - client_id = self.chat_cache.current_client_id - if client_id in self.active_connections: - chat_response = self.chat_history.get_history(client_id, filter_messages=False)[-1] - if chat_response.is_bot: - # Process FileResponse - if isinstance(chat_response, FileResponse): - # If data_type is pandas, convert to csv - if chat_response.data_type == "pandas": - chat_response.data = chat_response.data.to_csv() - elif chat_response.data_type == "image": - # Base64 encode the image - chat_response.data = pil_to_base64(chat_response.data) - # get event loop - loop = asyncio.get_event_loop() - - coroutine = self.send_json(client_id, chat_response) - asyncio.run_coroutine_threadsafe(coroutine, loop) - - def update(self): - if self.chat_cache.current_client_id in self.active_connections: - self.last_cached_object_dict = self.chat_cache.get_last() - # Add a new ChatResponse with the data - chat_response = FileResponse( - message=None, - type="file", - data=self.last_cached_object_dict["obj"], - data_type=self.last_cached_object_dict["type"], - ) - - self.chat_history.add_message(self.chat_cache.current_client_id, chat_response) - - async def connect(self, client_id: str, websocket: WebSocket): - self.active_connections[client_id] = websocket - # This is to avoid having multiple clients with the same id - #! Temporary solution - self.connection_ids[client_id] = f"{client_id}-{uuid.uuid4()}" - - def disconnect(self, client_id: str): - self.active_connections.pop(client_id, None) - self.connection_ids.pop(client_id, None) - - async def send_message(self, client_id: str, message: str): - websocket = self.active_connections[client_id] - await websocket.send_text(message) - - async def send_json(self, client_id: str, message: ChatMessage): - websocket = self.active_connections[client_id] - await websocket.send_json(message.model_dump()) - - async def close_connection(self, client_id: str, code: int, reason: str): - if websocket := self.active_connections[client_id]: - try: - await websocket.close(code=code, reason=reason) - self.disconnect(client_id) - except RuntimeError as exc: - # This is to catch the following error: - # Unexpected ASGI message 'websocket.close', after sending 'websocket.close' - if "after sending" in str(exc): - logger.error(f"Error closing connection: {exc}") - - async def process_message(self, client_id: str, payload: Dict, build_result: Any): - # Process the graph data and chat message - chat_inputs = payload.pop("inputs", {}) - chatkey = payload.pop("chatKey", None) - chat_inputs = ChatMessage(message=chat_inputs, chatKey=chatkey) - self.chat_history.add_message(client_id, chat_inputs) - - # graph_data = payload - start_resp = ChatResponse(message=None, type="start", intermediate_steps="") - await self.send_json(client_id, start_resp) - - # is_first_message = len(self.chat_history.get_history(client_id=client_id)) <= 1 - # Generate result and thought - try: - logger.debug("Generating result and thought") - - result, intermediate_steps, raw_output = await process_graph( - build_result=build_result, - chat_inputs=chat_inputs, - client_id=client_id, - session_id=self.connection_ids[client_id], - ) - self.set_cache(client_id, build_result) - except Exception as e: - # Log stack trace - logger.exception(e) - self.chat_history.empty_history(client_id) - raise e - # Send a response back to the frontend, if needed - intermediate_steps = intermediate_steps or "" - history = self.chat_history.get_history(client_id, filter_messages=False) - file_responses = [] - if history: - # Iterate backwards through the history - for msg in reversed(history): - if isinstance(msg, FileResponse): - if msg.data_type == "image": - # Base64 encode the image - if isinstance(msg.data, str): - continue - msg.data = pil_to_base64(msg.data) - file_responses.append(msg) - if msg.type == "start": - break - - response = ChatResponse( - message=result, - intermediate_steps=intermediate_steps.strip(), - type="end", - files=file_responses, - ) - await self.send_json(client_id, response) - self.chat_history.add_message(client_id, response) - - def set_cache(self, client_id: str, langchain_object: Any) -> bool: - """ - Set the cache for a client. - """ - # client_id is the flow id but that already exists in the cache - # so we need to change it to something else - - result_dict = { - "result": langchain_object, - "type": type(langchain_object), - } - self.cache_service.upsert(client_id, result_dict) - return client_id in self.cache_service - - async def handle_websocket(self, client_id: str, websocket: WebSocket): - await self.connect(client_id, websocket) - - try: - chat_history = self.chat_history.get_history(client_id) - # iterate and make BaseModel into dict - chat_history = [chat.model_dump() for chat in chat_history] - await websocket.send_json(chat_history) - - while True: - json_payload = await websocket.receive_json() - if isinstance(json_payload, str): - payload = orjson.loads(json_payload) - elif isinstance(json_payload, dict): - payload = json_payload - if "clear_history" in payload and payload["clear_history"]: - self.chat_history.history[client_id] = [] - continue - - with self.chat_cache.set_client_id(client_id): - if build_result := self.cache_service.get(client_id).get("result"): - await self.process_message(client_id, payload, build_result) - - else: - raise RuntimeError(f"Could not find a build result for client_id {client_id}") - except Exception as exc: - # Handle any exceptions that might occur - logger.exception(f"Error handling websocket: {exc}") - if websocket.client_state == WebSocketState.CONNECTED: - await self.close_connection( - client_id=client_id, - code=status.WS_1011_INTERNAL_ERROR, - reason=str(exc), - ) - elif websocket.client_state == WebSocketState.DISCONNECTED: - self.disconnect(client_id) - - finally: - try: - # first check if the connection is still open - if websocket.client_state == WebSocketState.CONNECTED: - await self.close_connection( - client_id=client_id, - code=status.WS_1000_NORMAL_CLOSURE, - reason="Client disconnected", - ) - except Exception as exc: - logger.error(f"Error closing connection: {exc}") - self.disconnect(client_id) - - -def dict_to_markdown_table(my_dict): - markdown_table = "| Key | Value |\n|---|---|\n" - for key, value in my_dict.items(): - markdown_table += f"| {key} | {value} |\n" - return markdown_table - - -def list_of_dicts_to_markdown_table(dict_list): - if not dict_list: - return "No data provided." - - # Extract headers from the keys of the first dictionary - headers = dict_list[0].keys() - markdown_table = "| " + " | ".join(headers) + " |\n" - markdown_table += "| " + " | ".join("---" for _ in headers) + " |\n" - - for row_dict in dict_list: - row = [str(row_dict.get(header, "")) for header in headers] - markdown_table += "| " + " | ".join(row) + " |\n" - - return markdown_table diff --git a/src/backend/langflow/services/credentials/service.py b/src/backend/langflow/services/credentials/service.py deleted file mode 100644 index 08e04fdaa..000000000 --- a/src/backend/langflow/services/credentials/service.py +++ /dev/null @@ -1,37 +0,0 @@ -from typing import TYPE_CHECKING, Optional, Union -from uuid import UUID - -from fastapi import Depends -from langflow.services.auth import utils as auth_utils -from langflow.services.base import Service -from langflow.services.database.models.credential.model import Credential -from langflow.services.deps import get_session -from sqlmodel import Session, select - -if TYPE_CHECKING: - from langflow.services.settings.service import SettingsService - - -class CredentialService(Service): - name = "credential_service" - - def __init__(self, settings_service: "SettingsService"): - self.settings_service = settings_service - - def get_credential(self, user_id: Union[UUID, str], name: str, session: Session = Depends(get_session)) -> str: - # we get the credential from the database - # credential = session.query(Credential).filter(Credential.user_id == user_id, Credential.name == name).first() - credential = session.exec( - select(Credential).where(Credential.user_id == user_id, Credential.name == name) - ).first() - # we decrypt the value - if not credential or not credential.value: - raise ValueError(f"{name} credential not found.") - decrypted = auth_utils.decrypt_api_key(credential.value, settings_service=self.settings_service) - return decrypted - - def list_credentials( - self, user_id: Union[UUID, str], session: Session = Depends(get_session) - ) -> list[Optional[str]]: - credentials = session.exec(select(Credential).where(Credential.user_id == user_id)).all() - return [credential.name for credential in credentials] diff --git a/src/backend/langflow/services/database/models/__init__.py b/src/backend/langflow/services/database/models/__init__.py deleted file mode 100644 index 9dd3e0285..000000000 --- a/src/backend/langflow/services/database/models/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from .api_key import ApiKey -from .credential import Credential -from .flow import Flow -from .user import User - -__all__ = ["Flow", "User", "ApiKey", "Credential"] diff --git a/src/backend/langflow/services/database/models/component/__init__.py b/src/backend/langflow/services/database/models/component/__init__.py deleted file mode 100644 index c5ad7d47f..000000000 --- a/src/backend/langflow/services/database/models/component/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .model import Component, ComponentModel - -__all__ = ["Component", "ComponentModel"] diff --git a/src/backend/langflow/services/database/models/component/model.py b/src/backend/langflow/services/database/models/component/model.py deleted file mode 100644 index 0a5afc440..000000000 --- a/src/backend/langflow/services/database/models/component/model.py +++ /dev/null @@ -1,29 +0,0 @@ -import uuid -from datetime import datetime -from typing import Optional - -from sqlmodel import Field, SQLModel - - -class Component(SQLModel, table=True): - id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) - frontend_node_id: uuid.UUID = Field(index=True) - name: str = Field(index=True) - description: Optional[str] = Field(default=None) - python_code: Optional[str] = Field(default=None) - return_type: Optional[str] = Field(default=None) - is_disabled: bool = Field(default=False) - is_read_only: bool = Field(default=False) - create_at: datetime = Field(default_factory=datetime.utcnow) - update_at: datetime = Field(default_factory=datetime.utcnow) - - -class ComponentModel(SQLModel): - id: uuid.UUID = Field(default_factory=uuid.uuid4) - frontend_node_id: uuid.UUID = Field(default=uuid.uuid4()) - name: str = Field(default="") - description: Optional[str] = None - python_code: Optional[str] = None - return_type: Optional[str] = None - is_disabled: bool = False - is_read_only: bool = False diff --git a/src/backend/langflow/services/database/models/credential/__init__.py b/src/backend/langflow/services/database/models/credential/__init__.py deleted file mode 100644 index 2f8b6cd01..000000000 --- a/src/backend/langflow/services/database/models/credential/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .model import Credential, CredentialCreate, CredentialRead, CredentialUpdate - -__all__ = ["Credential", "CredentialCreate", "CredentialRead", "CredentialUpdate"] diff --git a/src/backend/langflow/services/database/models/credential/model.py b/src/backend/langflow/services/database/models/credential/model.py deleted file mode 100644 index 95bd4b829..000000000 --- a/src/backend/langflow/services/database/models/credential/model.py +++ /dev/null @@ -1,43 +0,0 @@ -from datetime import datetime -from typing import TYPE_CHECKING, Optional -from uuid import UUID, uuid4 - -from sqlmodel import Field, Relationship, SQLModel - -from langflow.services.database.models.credential.schema import CredentialType - -if TYPE_CHECKING: - from langflow.services.database.models.user import User - - -class CredentialBase(SQLModel): - name: Optional[str] = Field(None, description="Name of the credential") - value: Optional[str] = Field(None, description="Encrypted value of the credential") - provider: Optional[str] = Field(None, description="Provider of the credential (e.g OpenAI)") - - -class Credential(CredentialBase, table=True): - id: Optional[UUID] = Field(default_factory=uuid4, primary_key=True, description="Unique ID for the credential") - # name is unique per user - created_at: datetime = Field(default_factory=datetime.utcnow, description="Creation time of the credential") - updated_at: Optional[datetime] = Field(None, description="Last update time of the credential") - # foreign key to user table - user_id: UUID = Field(description="User ID associated with this credential", foreign_key="user.id") - user: "User" = Relationship(back_populates="credentials") - - -class CredentialCreate(CredentialBase): - # AcceptedProviders is a custom Enum - provider: CredentialType = Field(description="Provider of the credential (e.g OpenAI)") - - -class CredentialRead(SQLModel): - id: UUID - name: Optional[str] = Field(None, description="Name of the credential") - provider: Optional[str] = Field(None, description="Provider of the credential (e.g OpenAI)") - - -class CredentialUpdate(SQLModel): - id: UUID # Include the ID for updating - name: Optional[str] = Field(None, description="Name of the credential") - value: Optional[str] = Field(None, description="Encrypted value of the credential") diff --git a/src/backend/langflow/services/database/models/credential/schema.py b/src/backend/langflow/services/database/models/credential/schema.py deleted file mode 100644 index 985915340..000000000 --- a/src/backend/langflow/services/database/models/credential/schema.py +++ /dev/null @@ -1,8 +0,0 @@ -from enum import Enum - - -class CredentialType(str, Enum): - """CredentialType is an Enum of the accepted providers""" - - OPENAI_API_KEY = "OPENAI_API_KEY" - ANTHROPIC_API_KEY = "ANTHROPIC_API_KEY" diff --git a/src/backend/langflow/services/database/models/flow/model.py b/src/backend/langflow/services/database/models/flow/model.py deleted file mode 100644 index d942fa93c..000000000 --- a/src/backend/langflow/services/database/models/flow/model.py +++ /dev/null @@ -1,73 +0,0 @@ -# Path: src/backend/langflow/database/models/flow.py - -from datetime import datetime -from typing import TYPE_CHECKING, Dict, Optional -from uuid import UUID, uuid4 - -from pydantic import field_serializer, field_validator -from sqlmodel import JSON, Column, Field, Relationship, SQLModel - -if TYPE_CHECKING: - from langflow.services.database.models.user import User - - -class FlowBase(SQLModel): - name: str = Field(index=True) - description: Optional[str] = Field(index=True, nullable=True, default=None) - data: Optional[Dict] = Field(default=None, nullable=True) - is_component: Optional[bool] = Field(default=False, nullable=True) - updated_at: Optional[datetime] = Field(default_factory=datetime.utcnow, nullable=True) - folder: Optional[str] = Field(default=None, nullable=True) - - @field_validator("data") - def validate_json(v): - if not v: - return v - if not isinstance(v, dict): - raise ValueError("Flow must be a valid JSON") - - # data must contain nodes and edges - if "nodes" not in v.keys(): - raise ValueError("Flow must have nodes") - if "edges" not in v.keys(): - raise ValueError("Flow must have edges") - - return v - - # updated_at can be serialized to JSON - @field_serializer("updated_at") - def serialize_dt(self, dt: datetime, _info): - if dt is None: - return None - return dt.isoformat() - - @field_validator("updated_at", mode="before") - def validate_dt(cls, v): - if v is None: - return v - elif isinstance(v, datetime): - return v - - return datetime.fromisoformat(v) - - -class Flow(FlowBase, table=True): - id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) - data: Optional[Dict] = Field(default=None, sa_column=Column(JSON)) - user_id: UUID = Field(index=True, foreign_key="user.id", nullable=True) - user: "User" = Relationship(back_populates="flows") - - -class FlowCreate(FlowBase): - user_id: Optional[UUID] = None - - -class FlowRead(FlowBase): - id: UUID - user_id: UUID = Field() - - -class FlowUpdate(SQLModel): - name: Optional[str] = None - description: Optional[str] = None - data: Optional[Dict] = None diff --git a/src/backend/langflow/services/deps.py b/src/backend/langflow/services/deps.py deleted file mode 100644 index 1ac06738b..000000000 --- a/src/backend/langflow/services/deps.py +++ /dev/null @@ -1,63 +0,0 @@ -from typing import TYPE_CHECKING, Generator - -from langflow.services import ServiceType, service_manager - -if TYPE_CHECKING: - from langflow.services.cache.service import BaseCacheService - from langflow.services.chat.service import ChatService - from langflow.services.credentials.service import CredentialService - from langflow.services.database.service import DatabaseService - from langflow.services.plugins.service import PluginService - from langflow.services.session.service import SessionService - from langflow.services.settings.service import SettingsService - from langflow.services.store.service import StoreService - from langflow.services.task.service import TaskService - from sqlmodel import Session - - -def get_credential_service() -> "CredentialService": - return service_manager.get(ServiceType.CREDENTIAL_SERVICE) # type: ignore - - -def get_plugins_service() -> "PluginService": - return service_manager.get(ServiceType.PLUGIN_SERVICE) # type: ignore - - -def get_settings_service() -> "SettingsService": - try: - return service_manager.get(ServiceType.SETTINGS_SERVICE) # type: ignore - except ValueError: - # initialize settings service - from langflow.services.manager import initialize_settings_service - - initialize_settings_service() - return service_manager.get(ServiceType.SETTINGS_SERVICE) # type: ignore - - -def get_db_service() -> "DatabaseService": - return service_manager.get(ServiceType.DATABASE_SERVICE) # type: ignore - - -def get_session() -> Generator["Session", None, None]: - db_service = get_db_service() - yield from db_service.get_session() - - -def get_cache_service() -> "BaseCacheService": - return service_manager.get(ServiceType.CACHE_SERVICE) # type: ignore - - -def get_session_service() -> "SessionService": - return service_manager.get(ServiceType.SESSION_SERVICE) # type: ignore - - -def get_task_service() -> "TaskService": - return service_manager.get(ServiceType.TASK_SERVICE) # type: ignore - - -def get_chat_service() -> "ChatService": - return service_manager.get(ServiceType.CHAT_SERVICE) # type: ignore - - -def get_store_service() -> "StoreService": - return service_manager.get(ServiceType.STORE_SERVICE) # type: ignore diff --git a/src/backend/langflow/services/factory.py b/src/backend/langflow/services/factory.py deleted file mode 100644 index 874d7374c..000000000 --- a/src/backend/langflow/services/factory.py +++ /dev/null @@ -1,12 +0,0 @@ -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from langflow.services.base import Service - - -class ServiceFactory: - def __init__(self, service_class): - self.service_class = service_class - - def create(self, *args, **kwargs) -> "Service": - raise NotImplementedError diff --git a/src/backend/langflow/utils/types.py b/src/backend/langflow/utils/types.py deleted file mode 100644 index 3657d550e..000000000 --- a/src/backend/langflow/utils/types.py +++ /dev/null @@ -1,2 +0,0 @@ -class Prompt: - pass diff --git a/src/backend/langflow/version/__init__.py b/src/backend/langflow/version/__init__.py new file mode 100644 index 000000000..5c0cf285c --- /dev/null +++ b/src/backend/langflow/version/__init__.py @@ -0,0 +1 @@ +from .version import __version__ # noqa: F401 diff --git a/src/backend/langflow/version/version.py b/src/backend/langflow/version/version.py new file mode 100644 index 000000000..540b465f9 --- /dev/null +++ b/src/backend/langflow/version/version.py @@ -0,0 +1,7 @@ +from importlib import metadata + +try: + __version__ = metadata.version("langflow") +except metadata.PackageNotFoundError: + __version__ = "" +del metadata diff --git a/src/backend/langflow/worker.py b/src/backend/langflow/worker.py deleted file mode 100644 index b3872bcd8..000000000 --- a/src/backend/langflow/worker.py +++ /dev/null @@ -1,75 +0,0 @@ -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union - -from asgiref.sync import async_to_sync -from celery.exceptions import SoftTimeLimitExceeded # type: ignore -from langflow.core.celery_app import celery_app -from langflow.processing.process import Result, generate_result, process_inputs -from langflow.services.deps import get_session_service -from langflow.services.manager import initialize_session_service -from loguru import logger -from rich import print - -if TYPE_CHECKING: - from langflow.graph.vertex.base import Vertex - - -@celery_app.task(acks_late=True) -def test_celery(word: str) -> str: - return f"test task return {word}" - - -@celery_app.task(bind=True, soft_time_limit=30, max_retries=3) -def build_vertex(self, vertex: "Vertex") -> "Vertex": - """ - Build a vertex - """ - try: - vertex.task_id = self.request.id - async_to_sync(vertex.build)() - return vertex - except SoftTimeLimitExceeded as e: - raise self.retry(exc=SoftTimeLimitExceeded("Task took too long"), countdown=2) from e - - -@celery_app.task(acks_late=True) -def process_graph_cached_task( - data_graph: Dict[str, Any], - inputs: Optional[Union[dict, List[dict]]] = None, - clear_cache=False, - session_id=None, -) -> Dict[str, Any]: - try: - initialize_session_service() - session_service = get_session_service() - - if clear_cache: - session_service.clear_session(session_id) - - if session_id is None: - session_id = session_service.generate_key(session_id=session_id, data_graph=data_graph) - - # Use async_to_sync to handle the asynchronous part of the session service - session_data = async_to_sync(session_service.load_session, force_new_loop=True)(session_id, data_graph) - logger.warning(f"session_data: {session_data}") - graph, artifacts = session_data if session_data else (None, None) - - if not graph: - raise ValueError("Graph not found in the session") - - # Use async_to_sync for the asynchronous build method - built_object = async_to_sync(graph.build, force_new_loop=True)() - - logger.debug(f"Built object: {built_object}") - - processed_inputs = process_inputs(inputs, artifacts or {}) - result = async_to_sync(generate_result, force_new_loop=True)(built_object, processed_inputs) - - # Update the session with the new data - session_service.update_session(session_id, (graph, artifacts)) - result_object = Result(result=result, session_id=session_id).model_dump() - print(f"Result object: {result_object}") - return result_object - except Exception as e: - logger.error(f"Error in process_graph_cached_task: {e}") - # Handle the exception as needed, maybe re-raise or return an error message - raise diff --git a/src/frontend/.eslintrc.json b/src/frontend/.eslintrc.json new file mode 100644 index 000000000..682c7fc87 --- /dev/null +++ b/src/frontend/.eslintrc.json @@ -0,0 +1,29 @@ +{ + "extends": ["eslint:recommended", "plugin:node/recommended"], + "parserOptions": { + "ecmaVersion": 2018 + }, + "rules": { + "no-console": "warn", + "no-self-assign": "warn", + "no-self-compare": "warn", + "complexity": ["error", { "max": 15 }], + "indent": ["error", 2, { "SwitchCase": 1 }], + "no-dupe-keys": "error", + "no-invalid-regexp": "error", + "no-undef": "error", + "no-return-assign": "error", + "no-redeclare": "error", + "no-empty": "error", + "no-await-in-loop": "error", + "node/exports-style": ["error", "module.exports"], + "node/file-extension-in-import": ["error", "always"], + "node/prefer-global/buffer": ["error", "always"], + "node/prefer-global/console": ["error", "always"], + "node/prefer-global/process": ["error", "always"], + "node/prefer-global/url-search-params": ["error", "always"], + "node/prefer-global/url": ["error", "always"], + "node/prefer-promises/dns": "error", + "node/prefer-promises/fs": "error" + } +} diff --git a/src/frontend/.github/workflows/playwright.yml b/src/frontend/.github/workflows/playwright.yml deleted file mode 100644 index 90b6b700d..000000000 --- a/src/frontend/.github/workflows/playwright.yml +++ /dev/null @@ -1,27 +0,0 @@ -name: Playwright Tests -on: - push: - branches: [ main, master ] - pull_request: - branches: [ main, master ] -jobs: - test: - timeout-minutes: 60 - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-node@v3 - with: - node-version: 18 - - name: Install dependencies - run: npm ci - - name: Install Playwright Browsers - run: npx playwright install --with-deps - - name: Run Playwright tests - run: npx playwright test - - uses: actions/upload-artifact@v3 - if: always() - with: - name: playwright-report - path: playwright-report/ - retention-days: 30 diff --git a/src/frontend/.gitignore b/src/frontend/.gitignore index 9dfae2bb0..a6dee659a 100644 --- a/src/frontend/.gitignore +++ b/src/frontend/.gitignore @@ -24,3 +24,7 @@ yarn-error.log* /test-results/ /playwright-report/*/ /playwright/.cache/ +/test-results/ +/playwright-report/ +/blob-report/ +/playwright/.cache/ diff --git a/src/frontend/harFiles/backend_12112023.har b/src/frontend/harFiles/backend_12112023.har deleted file mode 100644 index 63dbde94a..000000000 --- a/src/frontend/harFiles/backend_12112023.har +++ /dev/null @@ -1,599 +0,0 @@ -{ - "log": { - "version": "1.2", - "creator": { - "name": "Playwright", - "version": "1.39.0" - }, - "browser": { - "name": "chromium", - "version": "119.0.6045.9" - }, - "entries": [ - { - "startedDateTime": "2023-12-11T18:54:58.349Z", - "time": 1.142, - "request": { - "method": "GET", - "url": "http://localhost:3000/api/v1/store/check/", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Accept", "value": "application/json, text/plain, */*" }, - { "name": "Accept-Encoding", "value": "gzip, deflate, br" }, - { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, - { "name": "Connection", "value": "keep-alive" }, - { "name": "Host", "value": "localhost:3000" }, - { "name": "Referer", "value": "http://localhost:3000/" }, - { "name": "Sec-Fetch-Dest", "value": "empty" }, - { "name": "Sec-Fetch-Mode", "value": "cors" }, - { "name": "Sec-Fetch-Site", "value": "same-origin" }, - { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36" }, - { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"119\", \"Not?A_Brand\";v=\"24\"" }, - { "name": "sec-ch-ua-mobile", "value": "?0" }, - { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } - ], - "queryString": [], - "headersSize": -1, - "bodySize": -1 - }, - "response": { - "status": 200, - "statusText": "OK", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Access-Control-Allow-Origin", "value": "*" }, - { "name": "connection", "value": "close" }, - { "name": "content-length", "value": "16" }, - { "name": "content-type", "value": "application/json" }, - { "name": "date", "value": "Mon, 11 Dec 2023 18:54:57 GMT" }, - { "name": "server", "value": "uvicorn" } - ], - "content": { - "size": -1, - "mimeType": "application/json", - "text": "{\"enabled\":true}" - }, - "headersSize": -1, - "bodySize": -1, - "redirectURL": "" - }, - "cache": {}, - "timings": { "send": -1, "wait": -1, "receive": 1.142 } - }, - { - "startedDateTime": "2023-12-11T18:54:58.349Z", - "time": 0.484, - "request": { - "method": "GET", - "url": "http://localhost:3000/api/v1/store/check/api_key", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Accept", "value": "application/json, text/plain, */*" }, - { "name": "Accept-Encoding", "value": "gzip, deflate, br" }, - { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, - { "name": "Connection", "value": "keep-alive" }, - { "name": "Host", "value": "localhost:3000" }, - { "name": "Referer", "value": "http://localhost:3000/" }, - { "name": "Sec-Fetch-Dest", "value": "empty" }, - { "name": "Sec-Fetch-Mode", "value": "cors" }, - { "name": "Sec-Fetch-Site", "value": "same-origin" }, - { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36" }, - { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"119\", \"Not?A_Brand\";v=\"24\"" }, - { "name": "sec-ch-ua-mobile", "value": "?0" }, - { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } - ], - "queryString": [], - "headersSize": -1, - "bodySize": -1 - }, - "response": { - "status": 403, - "statusText": "Forbidden", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Access-Control-Allow-Origin", "value": "*" }, - { "name": "connection", "value": "close" }, - { "name": "content-length", "value": "73" }, - { "name": "content-type", "value": "application/json" }, - { "name": "date", "value": "Mon, 11 Dec 2023 18:54:57 GMT" }, - { "name": "server", "value": "uvicorn" } - ], - "content": { - "size": -1, - "mimeType": "application/json", - "text": "{\"detail\":\"An API key as query or header, or a JWT token must be passed\"}" - }, - "headersSize": -1, - "bodySize": -1, - "redirectURL": "" - }, - "cache": {}, - "timings": { "send": -1, "wait": -1, "receive": 0.484 } - }, - { - "startedDateTime": "2023-12-11T18:54:58.349Z", - "time": 0.476, - "request": { - "method": "GET", - "url": "http://localhost:3000/api/v1/version", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Accept", "value": "application/json, text/plain, */*" }, - { "name": "Accept-Encoding", "value": "gzip, deflate, br" }, - { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, - { "name": "Connection", "value": "keep-alive" }, - { "name": "Host", "value": "localhost:3000" }, - { "name": "Referer", "value": "http://localhost:3000/" }, - { "name": "Sec-Fetch-Dest", "value": "empty" }, - { "name": "Sec-Fetch-Mode", "value": "cors" }, - { "name": "Sec-Fetch-Site", "value": "same-origin" }, - { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36" }, - { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"119\", \"Not?A_Brand\";v=\"24\"" }, - { "name": "sec-ch-ua-mobile", "value": "?0" }, - { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } - ], - "queryString": [], - "headersSize": -1, - "bodySize": -1 - }, - "response": { - "status": 200, - "statusText": "OK", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Access-Control-Allow-Origin", "value": "*" }, - { "name": "connection", "value": "close" }, - { "name": "content-length", "value": "22" }, - { "name": "content-type", "value": "application/json" }, - { "name": "date", "value": "Mon, 11 Dec 2023 18:54:57 GMT" }, - { "name": "server", "value": "uvicorn" } - ], - "content": { - "size": -1, - "mimeType": "application/json", - "text": "{\"version\":\"0.6.0rc1\"}" - }, - "headersSize": -1, - "bodySize": -1, - "redirectURL": "" - }, - "cache": {}, - "timings": { "send": -1, "wait": -1, "receive": 0.476 } - }, - { - "startedDateTime": "2023-12-11T18:54:58.349Z", - "time": 0.59, - "request": { - "method": "GET", - "url": "http://localhost:3000/api/v1/auto_login", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Accept", "value": "application/json, text/plain, */*" }, - { "name": "Accept-Encoding", "value": "gzip, deflate, br" }, - { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, - { "name": "Connection", "value": "keep-alive" }, - { "name": "Host", "value": "localhost:3000" }, - { "name": "Referer", "value": "http://localhost:3000/" }, - { "name": "Sec-Fetch-Dest", "value": "empty" }, - { "name": "Sec-Fetch-Mode", "value": "cors" }, - { "name": "Sec-Fetch-Site", "value": "same-origin" }, - { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36" }, - { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"119\", \"Not?A_Brand\";v=\"24\"" }, - { "name": "sec-ch-ua-mobile", "value": "?0" }, - { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } - ], - "queryString": [], - "headersSize": -1, - "bodySize": -1 - }, - "response": { - "status": 200, - "statusText": "OK", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Access-Control-Allow-Origin", "value": "*" }, - { "name": "connection", "value": "close" }, - { "name": "content-length", "value": "227" }, - { "name": "content-type", "value": "application/json" }, - { "name": "date", "value": "Mon, 11 Dec 2023 18:54:57 GMT" }, - { "name": "server", "value": "uvicorn" } - ], - "content": { - "size": -1, - "mimeType": "application/json", - "text": "{\"access_token\":\"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkMjUzYmZiYS02MzY4LTQ0ZGMtODVmNy0wZDZkYTllNDU5NjgiLCJleHAiOjE3MzM4NTY4OTh9.5MFFb0JCck3ITSKXbxhwO9yAscnXcwXNTV70ZYBRB20\",\"refresh_token\":null,\"token_type\":\"bearer\"}" - }, - "headersSize": -1, - "bodySize": -1, - "redirectURL": "" - }, - "cache": {}, - "timings": { "send": -1, "wait": -1, "receive": 0.59 } - }, - { - "startedDateTime": "2023-12-11T18:54:58.380Z", - "time": 0.762, - "request": { - "method": "GET", - "url": "http://localhost:3000/api/v1/store/check/api_key", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Accept", "value": "application/json, text/plain, */*" }, - { "name": "Accept-Encoding", "value": "gzip, deflate, br" }, - { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, - { "name": "Connection", "value": "keep-alive" }, - { "name": "Host", "value": "localhost:3000" }, - { "name": "Referer", "value": "http://localhost:3000/" }, - { "name": "Sec-Fetch-Dest", "value": "empty" }, - { "name": "Sec-Fetch-Mode", "value": "cors" }, - { "name": "Sec-Fetch-Site", "value": "same-origin" }, - { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36" }, - { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"119\", \"Not?A_Brand\";v=\"24\"" }, - { "name": "sec-ch-ua-mobile", "value": "?0" }, - { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } - ], - "queryString": [], - "headersSize": -1, - "bodySize": -1 - }, - "response": { - "status": 403, - "statusText": "Forbidden", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Access-Control-Allow-Origin", "value": "*" }, - { "name": "connection", "value": "close" }, - { "name": "content-length", "value": "73" }, - { "name": "content-type", "value": "application/json" }, - { "name": "date", "value": "Mon, 11 Dec 2023 18:54:58 GMT" }, - { "name": "server", "value": "uvicorn" } - ], - "content": { - "size": -1, - "mimeType": "application/json", - "text": "{\"detail\":\"An API key as query or header, or a JWT token must be passed\"}" - }, - "headersSize": -1, - "bodySize": -1, - "redirectURL": "" - }, - "cache": {}, - "timings": { "send": -1, "wait": -1, "receive": 0.762 } - }, - { - "startedDateTime": "2023-12-11T18:54:58.423Z", - "time": 1.03, - "request": { - "method": "GET", - "url": "http://localhost:3000/api/v1/flows/", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Accept", "value": "application/json, text/plain, */*" }, - { "name": "Accept-Encoding", "value": "gzip, deflate, br" }, - { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, - { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkMjUzYmZiYS02MzY4LTQ0ZGMtODVmNy0wZDZkYTllNDU5NjgiLCJleHAiOjE3MzM4NTY4OTh9.5MFFb0JCck3ITSKXbxhwO9yAscnXcwXNTV70ZYBRB20" }, - { "name": "Connection", "value": "keep-alive" }, - { "name": "Cookie", "value": "access_token_lf=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkMjUzYmZiYS02MzY4LTQ0ZGMtODVmNy0wZDZkYTllNDU5NjgiLCJleHAiOjE3MzM4NTY4OTh9.5MFFb0JCck3ITSKXbxhwO9yAscnXcwXNTV70ZYBRB20; refresh_tkn_lflw=auto" }, - { "name": "Host", "value": "localhost:3000" }, - { "name": "Referer", "value": "http://localhost:3000/flows" }, - { "name": "Sec-Fetch-Dest", "value": "empty" }, - { "name": "Sec-Fetch-Mode", "value": "cors" }, - { "name": "Sec-Fetch-Site", "value": "same-origin" }, - { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36" }, - { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"119\", \"Not?A_Brand\";v=\"24\"" }, - { "name": "sec-ch-ua-mobile", "value": "?0" }, - { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } - ], - "queryString": [], - "headersSize": -1, - "bodySize": -1 - }, - "response": { - "status": 200, - "statusText": "OK", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Access-Control-Allow-Origin", "value": "*" }, - { "name": "connection", "value": "close" }, - { "name": "content-length", "value": "375696" }, - { "name": "content-type", "value": "application/json" }, - { "name": "date", "value": "Mon, 11 Dec 2023 18:54:58 GMT" }, - { "name": "server", "value": "uvicorn" } - ], - "content": { - "size": -1, - "mimeType": "application/json", - "text": "[{\"name\":\"Awesome Euclid\",\"description\":\"Language Models, Mapped and Mastered.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":374,\"id\":\"PromptTemplate-m2yFu\",\"type\":\"genericNode\",\"position\":{\"x\":462.6456058272081,\"y\":1033.9314297130313},\"data\":{\"type\":\"PromptTemplate\",\"node\":{\"template\":{\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false},\"input_types\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_types\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"input_variables\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":true,\"value\":[\"transcription\"]},\"partial_variables\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"template\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"prompt\",\"list\":false,\"value\":\"create an image prompt based on the song's lyrics to be used as the album cover of this song:\\n\\nlyrics:\\n{transcription}\"},\"template_format\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"f-string\",\"password\":false,\"name\":\"template_format\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":false},\"validate_template\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"validate_template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"PromptTemplate\",\"transcription\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"transcription\",\"display_name\":\"transcription\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"A prompt template for a language model.\",\"base_classes\":[\"PromptTemplate\",\"StringPromptTemplate\",\"BasePromptTemplate\"],\"name\":\"\",\"display_name\":\"PromptTemplate\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/\",\"custom_fields\":{\"\":[\"transcription\"],\"template\":[\"transcription\",\"question\"]},\"output_types\":[],\"full_path\":null,\"field_formatters\":{},\"beta\":false,\"error\":null},\"id\":\"PromptTemplate-m2yFu\"},\"selected\":false,\"positionAbsolute\":{\"x\":462.6456058272081,\"y\":1033.9314297130313},\"dragging\":false},{\"width\":384,\"height\":626,\"id\":\"ChatOpenAI-urapv\",\"type\":\"genericNode\",\"position\":{\"x\":189.94856095084924,\"y\":-85.06556385338186},\"data\":{\"type\":\"ChatOpenAI\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"list\":true},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"default_query\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"http_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"http_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":2,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false,\"value\":\"\"},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"gpt-4-1106-preview\",\"password\":false,\"options\":[\"gpt-4-1106-preview\",\"gpt-4\",\"gpt-4-32k\",\"gpt-3.5-turbo\",\"gpt-3.5-turbo-16k\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.7,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatOpenAI\"},\"description\":\"`OpenAI` Chat large language models API.\",\"base_classes\":[\"ChatOpenAI\",\"BaseChatModel\",\"BaseLanguageModel\",\"BaseLLM\"],\"display_name\":\"ChatOpenAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"ChatOpenAI-urapv\"},\"selected\":false,\"positionAbsolute\":{\"x\":189.94856095084924,\"y\":-85.06556385338186},\"dragging\":false},{\"width\":384,\"height\":806,\"id\":\"CustomComponent-IEIUl\",\"type\":\"genericNode\",\"position\":{\"x\":2364.865919667005,\"y\":88.43094097025096},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nfrom platformdirs import user_cache_dir\\nimport base64\\nfrom PIL import Image\\nfrom io import BytesIO\\nfrom openai import OpenAI\\nfrom pathlib import Path\\n\\nclass Component(CustomComponent):\\n display_name: str = \\\"Image generator\\\"\\n description: str = \\\"generate images using Dall-E\\\"\\n documentation: str = \\\"http://docs.langflow.org/components/custom\\\"\\n\\n def build_config(self):\\n return {\\\"prompt\\\": {\\\"display_name\\\": \\\"Prompt\\\",\\\"input_types\\\":[\\\"str\\\"]},\\n \\\"api_key\\\":{\\\"display_name\\\":\\\"OpenAI API key\\\",\\\"password\\\":True},\\n \\\"model\\\":{\\\"display_name\\\":\\\"Model name\\\",\\\"advanced\\\":True,\\\"options\\\":[\\\"dall-e-2\\\",\\\"dall-e-3\\\"], \\\"value\\\":\\\"dall-e-3\\\"},\\n \\\"file_name\\\":{\\\"display_name\\\":\\\"File Name\\\"},\\n \\\"output_format\\\":{\\\"display_name\\\":\\\"Output format\\\",\\\"options\\\":[\\\"jpeg\\\",\\\"png\\\"],\\\"value\\\":\\\"jpeg\\\"},\\n \\\"width\\\":{\\\"display_name\\\":\\\"Width\\\" ,\\\"value\\\":1024},\\n \\\"height\\\":{\\\"display_name\\\":\\\"Height\\\", \\\"value\\\":1024}\\n }\\n\\n def build(self, prompt:str,api_key:str,model:str,file_name:str,output_format:str,width:int,height:int):\\n client = OpenAI(api_key=api_key)\\n cache_dir = Path(user_cache_dir(\\\"langflow\\\"))\\n images_dir = cache_dir / \\\"images\\\"\\n images_dir.mkdir(parents=True, exist_ok=True)\\n image_path = images_dir / f\\\"{file_name}.{output_format}\\\"\\n response = client.images.generate(\\n model=model,\\n prompt=prompt,\\n size=f\\\"{height}x{width}\\\",\\n response_format=\\\"b64_json\\\",\\n n=1,\\n )\\n # Decode base64-encoded image string\\n binary_data = base64.b64decode(response.data[0].b64_json)\\n # Create PIL Image object from binary image data\\n image_pil = Image.open(BytesIO(binary_data))\\n image_pil.save(image_path, format=output_format.upper())\\n return \\\"\\\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"api_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"api_key\",\"display_name\":\"OpenAI API key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"file_name\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"file_name\",\"display_name\":\"File Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"album\"},\"height\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1024,\"password\":false,\"name\":\"height\",\"display_name\":\"Height\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"model\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"dall-e-3\",\"password\":false,\"options\":[\"dall-e-2\",\"dall-e-3\"],\"name\":\"model\",\"display_name\":\"Model name\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"output_format\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"jpeg\",\"password\":false,\"options\":[\"jpeg\",\"png\"],\"name\":\"output_format\",\"display_name\":\"Output format\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt\",\"advanced\":false,\"input_types\":[\"str\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"width\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1024,\"password\":false,\"name\":\"width\",\"display_name\":\"Width\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false}},\"description\":\"generate images using Dall-E\",\"base_classes\":[\"Data\"],\"display_name\":\"Image generator\",\"custom_fields\":{\"api_key\":null,\"file_name\":null,\"height\":null,\"model\":null,\"output_format\":null,\"prompt\":null,\"width\":null},\"output_types\":[\"Data\"],\"documentation\":\"http://docs.langflow.org/components/custom\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-IEIUl\"},\"selected\":false,\"positionAbsolute\":{\"x\":2364.865919667005,\"y\":88.43094097025096}},{\"width\":384,\"height\":338,\"id\":\"LLMChain-KlJb3\",\"type\":\"genericNode\",\"position\":{\"x\":1242.9851164540805,\"y\":-139.74634696823108},\"data\":{\"type\":\"LLMChain\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Callable, Optional, Union\\n\\nfrom langchain.chains import LLMChain\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import (\\n BaseLanguageModel,\\n BaseMemory,\\n BasePromptTemplate,\\n Chain,\\n)\\n\\n\\nclass LLMChainComponent(CustomComponent):\\n display_name = \\\"LLMChain\\\"\\n description = \\\"Chain to run queries against LLMs\\\"\\n\\n def build_config(self):\\n return {\\n \\\"prompt\\\": {\\\"display_name\\\": \\\"Prompt\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n prompt: BasePromptTemplate,\\n llm: BaseLanguageModel,\\n memory: Optional[BaseMemory] = None,\\n ) -> Union[Chain, Callable]:\\n return LLMChain(prompt=prompt, llm=llm, memory=memory)\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"display_name\":\"code\"},\"_type\":\"CustomComponent\",\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false}},\"description\":\"Chain to run queries against LLMs\",\"base_classes\":[\"Chain\",\"Callable\"],\"display_name\":\"LLMChain\",\"custom_fields\":{\"llm\":null,\"memory\":null,\"prompt\":null},\"output_types\":[\"LLMChain\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"LLMChain-KlJb3\"},\"selected\":false,\"positionAbsolute\":{\"x\":1242.9851164540805,\"y\":-139.74634696823108},\"dragging\":false},{\"width\":384,\"height\":328,\"id\":\"CustomComponent-bCuc0\",\"type\":\"genericNode\",\"position\":{\"x\":1747.8107777342625,\"y\":319.13729017446667},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\n\\nclass Component(CustomComponent):\\n display_name: str = \\\"Custom Component\\\"\\n description: str = \\\"Create any custom component you want!\\\"\\n documentation: str = \\\"http://docs.langflow.org/components/custom\\\"\\n\\n def build_config(self):\\n return {\\\"param\\\": {\\\"display_name\\\": \\\"Parameter\\\"}}\\n\\n def build(self, param: Chain) -> str:\\n result = param.run({})\\n self.status = result\\n return result\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"param\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"param\",\"display_name\":\"Parameter\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Chain\",\"list\":false}},\"description\":\"Create any custom component you want!\",\"base_classes\":[\"str\"],\"display_name\":\"Custom Component\",\"custom_fields\":{\"param\":null},\"output_types\":[\"str\"],\"documentation\":\"http://docs.langflow.org/components/custom\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-bCuc0\"},\"selected\":false,\"positionAbsolute\":{\"x\":1747.8107777342625,\"y\":319.13729017446667}}],\"edges\":[{\"source\":\"LLMChain-KlJb3\",\"target\":\"CustomComponent-bCuc0\",\"sourceHandle\":\"{œbaseClassesœ:[œChainœ,œCallableœ],œdataTypeœ:œLLMChainœ,œidœ:œLLMChain-KlJb3œ}\",\"targetHandle\":\"{œfieldNameœ:œparamœ,œidœ:œCustomComponent-bCuc0œ,œinputTypesœ:null,œtypeœ:œChainœ}\",\"id\":\"reactflow__edge-LLMChain-KlJb3{œbaseClassesœ:[œChainœ,œCallableœ],œdataTypeœ:œLLMChainœ,œidœ:œLLMChain-KlJb3œ}-CustomComponent-bCuc0{œfieldNameœ:œparamœ,œidœ:œCustomComponent-bCuc0œ,œinputTypesœ:null,œtypeœ:œChainœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"param\",\"id\":\"CustomComponent-bCuc0\",\"inputTypes\":null,\"type\":\"Chain\"},\"sourceHandle\":{\"baseClasses\":[\"Chain\",\"Callable\"],\"dataType\":\"LLMChain\",\"id\":\"LLMChain-KlJb3\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"selected\":false},{\"source\":\"ChatOpenAI-urapv\",\"target\":\"LLMChain-KlJb3\",\"sourceHandle\":\"{œbaseClassesœ:[œChatOpenAIœ,œBaseChatModelœ,œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-urapvœ}\",\"targetHandle\":\"{œfieldNameœ:œllmœ,œidœ:œLLMChain-KlJb3œ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"id\":\"reactflow__edge-ChatOpenAI-urapv{œbaseClassesœ:[œChatOpenAIœ,œBaseChatModelœ,œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-urapvœ}-LLMChain-KlJb3{œfieldNameœ:œllmœ,œidœ:œLLMChain-KlJb3œ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"llm\",\"id\":\"LLMChain-KlJb3\",\"inputTypes\":null,\"type\":\"BaseLanguageModel\"},\"sourceHandle\":{\"baseClasses\":[\"ChatOpenAI\",\"BaseChatModel\",\"BaseLanguageModel\",\"BaseLLM\"],\"dataType\":\"ChatOpenAI\",\"id\":\"ChatOpenAI-urapv\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"selected\":false},{\"source\":\"PromptTemplate-m2yFu\",\"target\":\"LLMChain-KlJb3\",\"sourceHandle\":\"{œbaseClassesœ:[œPromptTemplateœ,œStringPromptTemplateœ,œBasePromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-m2yFuœ}\",\"targetHandle\":\"{œfieldNameœ:œpromptœ,œidœ:œLLMChain-KlJb3œ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\",\"id\":\"reactflow__edge-PromptTemplate-m2yFu{œbaseClassesœ:[œPromptTemplateœ,œStringPromptTemplateœ,œBasePromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-m2yFuœ}-LLMChain-KlJb3{œfieldNameœ:œpromptœ,œidœ:œLLMChain-KlJb3œ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"prompt\",\"id\":\"LLMChain-KlJb3\",\"inputTypes\":null,\"type\":\"BasePromptTemplate\"},\"sourceHandle\":{\"baseClasses\":[\"PromptTemplate\",\"StringPromptTemplate\",\"BasePromptTemplate\"],\"dataType\":\"PromptTemplate\",\"id\":\"PromptTemplate-m2yFu\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"selected\":false},{\"source\":\"CustomComponent-bCuc0\",\"target\":\"CustomComponent-IEIUl\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-bCuc0œ}\",\"targetHandle\":\"{œfieldNameœ:œpromptœ,œidœ:œCustomComponent-IEIUlœ,œinputTypesœ:[œstrœ],œtypeœ:œstrœ}\",\"id\":\"reactflow__edge-CustomComponent-bCuc0{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-bCuc0œ}-CustomComponent-IEIUl{œfieldNameœ:œpromptœ,œidœ:œCustomComponent-IEIUlœ,œinputTypesœ:[œstrœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"prompt\",\"id\":\"CustomComponent-IEIUl\",\"inputTypes\":[\"str\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-bCuc0\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"selected\":false}],\"viewport\":{\"x\":92.23454077990459,\"y\":183.8125619056221,\"zoom\":0.6070974421975234}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:33:18.503954\",\"folder\":null,\"id\":\"fe142ce5-32dc-4955-b186-672ced662f13\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Goofy Darwin\",\"description\":\"Conversation Catalyst Engine.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":626,\"id\":\"OpenAI-3ZVDh\",\"type\":\"genericNode\",\"position\":{\"x\":-4.0041891741949485,\"y\":-114.02615182719649},\"data\":{\"type\":\"OpenAI\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"list\":true},\"allowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":[],\"password\":false,\"name\":\"allowed_special\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"batch_size\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":20,\"password\":false,\"name\":\"batch_size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"best_of\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"best_of\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"default_query\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"disallowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"all\",\"password\":false,\"name\":\"disallowed_special\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"frequency_penalty\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":0,\"password\":false,\"name\":\"frequency_penalty\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"http_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"http_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"logit_bias\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"logit_bias\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":2,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":256,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-davinci-003\",\"password\":false,\"options\":[\"text-davinci-003\",\"text-davinci-002\",\"text-curie-001\",\"text-babbage-001\",\"text-ada-001\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"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\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"presence_penalty\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":0,\"password\":false,\"name\":\"presence_penalty\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.7,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"top_p\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"top_p\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"OpenAI\"},\"description\":\"OpenAI large language models.\",\"base_classes\":[\"OpenAI\",\"BaseLLM\",\"BaseOpenAI\",\"BaseLanguageModel\"],\"display_name\":\"OpenAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"OpenAI-3ZVDh\"},\"selected\":true,\"positionAbsolute\":{\"x\":-4.0041891741949485,\"y\":-114.02615182719649},\"dragging\":false},{\"width\":384,\"height\":338,\"id\":\"LLMChain-RFYXY\",\"type\":\"genericNode\",\"position\":{\"x\":586.672100458868,\"y\":10.967049167706678},\"data\":{\"type\":\"LLMChain\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Callable, Optional, Union\\n\\nfrom langchain.chains import LLMChain\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import (\\n BaseLanguageModel,\\n BaseMemory,\\n BasePromptTemplate,\\n Chain,\\n)\\n\\n\\nclass LLMChainComponent(CustomComponent):\\n display_name = \\\"LLMChain\\\"\\n description = \\\"Chain to run queries against LLMs\\\"\\n\\n def build_config(self):\\n return {\\n \\\"prompt\\\": {\\\"display_name\\\": \\\"Prompt\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n prompt: BasePromptTemplate,\\n llm: BaseLanguageModel,\\n memory: Optional[BaseMemory] = None,\\n ) -> Union[Chain, Callable]:\\n return LLMChain(prompt=prompt, llm=llm, memory=memory)\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false}},\"description\":\"Chain to run queries against LLMs\",\"base_classes\":[\"Chain\",\"Callable\"],\"display_name\":\"LLMChain\",\"custom_fields\":{\"llm\":null,\"memory\":null,\"prompt\":null},\"output_types\":[\"LLMChain\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"LLMChain-RFYXY\"},\"positionAbsolute\":{\"x\":586.672100458868,\"y\":10.967049167706678}},{\"width\":384,\"height\":242,\"id\":\"ChatPromptTemplate-ce1sg\",\"type\":\"genericNode\",\"position\":{\"x\":395.598984452791,\"y\":612.188491773035},\"data\":{\"type\":\"ChatPromptTemplate\",\"node\":{\"template\":{\"messages\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"messages\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseMessagePromptTemplate\",\"list\":true},\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false},\"input_types\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_types\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"input_variables\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":true},\"partial_variables\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"validate_template\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"validate_template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatPromptTemplate\"},\"description\":\"A prompt template for chat models.\",\"base_classes\":[\"ChatPromptTemplate\",\"BaseChatPromptTemplate\",\"BasePromptTemplate\"],\"display_name\":\"ChatPromptTemplate\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/how_to/prompts\",\"beta\":false,\"error\":null},\"id\":\"ChatPromptTemplate-ce1sg\"},\"selected\":false,\"positionAbsolute\":{\"x\":395.598984452791,\"y\":612.188491773035},\"dragging\":false}],\"edges\":[{\"source\":\"OpenAI-3ZVDh\",\"sourceHandle\":\"{œbaseClassesœ:[œOpenAIœ,œBaseLLMœ,œBaseOpenAIœ,œBaseLanguageModelœ],œdataTypeœ:œOpenAIœ,œidœ:œOpenAI-3ZVDhœ}\",\"target\":\"LLMChain-RFYXY\",\"targetHandle\":\"{œfieldNameœ:œllmœ,œidœ:œLLMChain-RFYXYœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"llm\",\"id\":\"LLMChain-RFYXY\",\"inputTypes\":null,\"type\":\"BaseLanguageModel\"},\"sourceHandle\":{\"baseClasses\":[\"OpenAI\",\"BaseLLM\",\"BaseOpenAI\",\"BaseLanguageModel\"],\"dataType\":\"OpenAI\",\"id\":\"OpenAI-3ZVDh\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-OpenAI-3ZVDh{œbaseClassesœ:[œOpenAIœ,œBaseLLMœ,œBaseOpenAIœ,œBaseLanguageModelœ],œdataTypeœ:œOpenAIœ,œidœ:œOpenAI-3ZVDhœ}-LLMChain-RFYXY{œfieldNameœ:œllmœ,œidœ:œLLMChain-RFYXYœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\"},{\"source\":\"ChatPromptTemplate-ce1sg\",\"sourceHandle\":\"{œbaseClassesœ:[œChatPromptTemplateœ,œBaseChatPromptTemplateœ,œBasePromptTemplateœ],œdataTypeœ:œChatPromptTemplateœ,œidœ:œChatPromptTemplate-ce1sgœ}\",\"target\":\"LLMChain-RFYXY\",\"targetHandle\":\"{œfieldNameœ:œpromptœ,œidœ:œLLMChain-RFYXYœ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"prompt\",\"id\":\"LLMChain-RFYXY\",\"inputTypes\":null,\"type\":\"BasePromptTemplate\"},\"sourceHandle\":{\"baseClasses\":[\"ChatPromptTemplate\",\"BaseChatPromptTemplate\",\"BasePromptTemplate\"],\"dataType\":\"ChatPromptTemplate\",\"id\":\"ChatPromptTemplate-ce1sg\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-ChatPromptTemplate-ce1sg{œbaseClassesœ:[œChatPromptTemplateœ,œBaseChatPromptTemplateœ,œBasePromptTemplateœ],œdataTypeœ:œChatPromptTemplateœ,œidœ:œChatPromptTemplate-ce1sgœ}-LLMChain-RFYXY{œfieldNameœ:œpromptœ,œidœ:œLLMChain-RFYXYœ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\"}],\"viewport\":{\"x\":8.832868402772647,\"y\":189.85443326477025,\"zoom\":0.6070974421975235}},\"is_component\":false,\"updated_at\":\"2023-12-04T22:50:19.584160\",\"folder\":null,\"id\":\"103766f0-1f50-427a-9ba7-2ab73343c524\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Perky Easley\",\"description\":\"Your Toolkit for Text Generation.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":626,\"id\":\"ChatOpenAI-VPh47\",\"type\":\"genericNode\",\"position\":{\"x\":-328.5742193020408,\"y\":-420.1025929438987},\"data\":{\"type\":\"ChatOpenAI\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"list\":true},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"default_query\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"http_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"http_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":2,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"gpt-3.5-turbo\",\"password\":false,\"options\":[\"gpt-4-1106-preview\",\"gpt-4\",\"gpt-4-32k\",\"gpt-3.5-turbo\",\"gpt-3.5-turbo-16k\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"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\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.7,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatOpenAI\"},\"description\":\"`OpenAI` Chat large language models API.\",\"base_classes\":[\"BaseChatModel\",\"ChatOpenAI\",\"BaseLanguageModel\",\"BaseLLM\"],\"display_name\":\"ChatOpenAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"ChatOpenAI-VPh47\"},\"selected\":false,\"positionAbsolute\":{\"x\":-328.5742193020408,\"y\":-420.1025929438987},\"dragging\":false},{\"width\":384,\"height\":338,\"id\":\"LLMChain-NgFyo\",\"type\":\"genericNode\",\"position\":{\"x\":225.3113389084088,\"y\":-193.3520019494289},\"data\":{\"type\":\"LLMChain\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Callable, Optional, Union\\n\\nfrom langchain.chains import LLMChain\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import (\\n BaseLanguageModel,\\n BaseMemory,\\n BasePromptTemplate,\\n Chain,\\n)\\n\\n\\nclass LLMChainComponent(CustomComponent):\\n display_name = \\\"LLMChain\\\"\\n description = \\\"Chain to run queries against LLMs\\\"\\n\\n def build_config(self):\\n return {\\n \\\"prompt\\\": {\\\"display_name\\\": \\\"Prompt\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n prompt: BasePromptTemplate,\\n llm: BaseLanguageModel,\\n memory: Optional[BaseMemory] = None,\\n ) -> Union[Chain, Callable]:\\n return LLMChain(prompt=prompt, llm=llm, memory=memory)\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false}},\"description\":\"Chain to run queries against LLMs\",\"base_classes\":[\"Chain\",\"Callable\"],\"display_name\":\"LLMChain\",\"custom_fields\":{\"llm\":null,\"memory\":null,\"prompt\":null},\"output_types\":[\"LLMChain\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"LLMChain-NgFyo\"},\"selected\":false,\"positionAbsolute\":{\"x\":225.3113389084088,\"y\":-193.3520019494289},\"dragging\":false},{\"width\":384,\"height\":374,\"id\":\"PromptTemplate-oAFjh\",\"type\":\"genericNode\",\"position\":{\"x\":-342.62522294052764,\"y\":391.20629510686103},\"data\":{\"type\":\"PromptTemplate\",\"node\":{\"template\":{\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false},\"input_types\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_types\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"input_variables\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":true,\"value\":[\"links\"]},\"partial_variables\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"template\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"prompt\",\"list\":false,\"value\":\"Answer everything with links\\n{links}\"},\"template_format\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"f-string\",\"password\":false,\"name\":\"template_format\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":false},\"validate_template\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"validate_template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"PromptTemplate\",\"links\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"links\",\"display_name\":\"links\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"A prompt template for a language model.\",\"base_classes\":[\"BasePromptTemplate\",\"StringPromptTemplate\",\"PromptTemplate\"],\"name\":\"\",\"display_name\":\"PromptTemplate\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/\",\"custom_fields\":{\"\":[\"links\"]},\"output_types\":[],\"full_path\":null,\"field_formatters\":{},\"beta\":false,\"error\":null},\"id\":\"PromptTemplate-oAFjh\"},\"selected\":true,\"dragging\":false,\"positionAbsolute\":{\"x\":-342.62522294052764,\"y\":391.20629510686103}}],\"edges\":[{\"source\":\"ChatOpenAI-VPh47\",\"sourceHandle\":\"{œbaseClassesœ:[œBaseChatModelœ,œChatOpenAIœ,œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-VPh47œ}\",\"target\":\"LLMChain-NgFyo\",\"targetHandle\":\"{œfieldNameœ:œllmœ,œidœ:œLLMChain-NgFyoœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"llm\",\"id\":\"LLMChain-NgFyo\",\"inputTypes\":null,\"type\":\"BaseLanguageModel\"},\"sourceHandle\":{\"baseClasses\":[\"BaseChatModel\",\"ChatOpenAI\",\"BaseLanguageModel\",\"BaseLLM\"],\"dataType\":\"ChatOpenAI\",\"id\":\"ChatOpenAI-VPh47\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-ChatOpenAI-VPh47{œbaseClassesœ:[œBaseChatModelœ,œChatOpenAIœ,œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-VPh47œ}-LLMChain-NgFyo{œfieldNameœ:œllmœ,œidœ:œLLMChain-NgFyoœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\"},{\"source\":\"PromptTemplate-oAFjh\",\"sourceHandle\":\"{œbaseClassesœ:[œBasePromptTemplateœ,œStringPromptTemplateœ,œPromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-oAFjhœ}\",\"target\":\"LLMChain-NgFyo\",\"targetHandle\":\"{œfieldNameœ:œpromptœ,œidœ:œLLMChain-NgFyoœ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"prompt\",\"id\":\"LLMChain-NgFyo\",\"inputTypes\":null,\"type\":\"BasePromptTemplate\"},\"sourceHandle\":{\"baseClasses\":[\"BasePromptTemplate\",\"StringPromptTemplate\",\"PromptTemplate\"],\"dataType\":\"PromptTemplate\",\"id\":\"PromptTemplate-oAFjh\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-PromptTemplate-oAFjh{œbaseClassesœ:[œBasePromptTemplateœ,œStringPromptTemplateœ,œPromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-oAFjhœ}-LLMChain-NgFyo{œfieldNameœ:œpromptœ,œidœ:œLLMChain-NgFyoœ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\"}],\"viewport\":{\"x\":338.6546182690814,\"y\":53.026340800283265,\"zoom\":0.7169776240079143}},\"is_component\":false,\"updated_at\":\"2023-12-04T22:53:25.148460\",\"folder\":null,\"id\":\"a794fc48-5e9b-42a3-924f-7fe610500035\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"(D) Basic Chat (1) (4)\",\"description\":\"Simplest possible chat model\",\"data\":{\"nodes\":[{\"width\":384,\"height\":626,\"id\":\"ChatOpenAI-fBcfh\",\"type\":\"genericNode\",\"position\":{\"x\":228.87326389541306,\"y\":465.8628482073749},\"data\":{\"type\":\"ChatOpenAI\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":6,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"gpt-3.5-turbo\",\"password\":false,\"options\":[\"gpt-3.5-turbo-0613\",\"gpt-3.5-turbo\",\"gpt-3.5-turbo-16k-0613\",\"gpt-3.5-turbo-16k\",\"gpt-4-0613\",\"gpt-4-32k-0613\",\"gpt-4\",\"gpt-4-32k\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"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\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false,\"value\":60},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.7,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatOpenAI\"},\"description\":\"`OpenAI` Chat large language models API.\",\"base_classes\":[\"BaseChatModel\",\"BaseLanguageModel\",\"ChatOpenAI\",\"BaseLLM\"],\"display_name\":\"ChatOpenAI\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai\"},\"id\":\"ChatOpenAI-fBcfh\",\"value\":null},\"selected\":false,\"dragging\":false,\"positionAbsolute\":{\"x\":228.87326389541306,\"y\":465.8628482073749}},{\"width\":384,\"height\":310,\"id\":\"ConversationChain-bVNex\",\"type\":\"genericNode\",\"position\":{\"x\":806,\"y\":554},\"data\":{\"type\":\"ConversationChain\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":{\"_type\":\"default\"},\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLLMOutputParser\",\"list\":false},\"prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"history\",\"input\"],\"output_parser\":null,\"partial_variables\":{},\"template\":\"The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.\\n\\nCurrent conversation:\\n{history}\\nHuman: {input}\\nAI:\",\"template_format\":\"f-string\",\"validate_template\":true,\"_type\":\"prompt\"},\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"input_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"input\",\"password\":false,\"name\":\"input_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"llm_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"llm_kwargs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"output_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"response\",\"password\":false,\"name\":\"output_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"return_final_only\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"return_final_only\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ConversationChain\"},\"description\":\"Chain to have a conversation and load context from memory.\",\"base_classes\":[\"ConversationChain\",\"Chain\",\"LLMChain\",\"function\"],\"display_name\":\"ConversationChain\",\"documentation\":\"\"},\"id\":\"ConversationChain-bVNex\",\"value\":null},\"selected\":false,\"dragging\":false,\"positionAbsolute\":{\"x\":806,\"y\":554}}],\"edges\":[{\"source\":\"ChatOpenAI-fBcfh\",\"sourceHandle\":\"{œbaseClassesœ:[œBaseChatModelœ,œBaseLanguageModelœ,œChatOpenAIœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-fBcfhœ}\",\"target\":\"ConversationChain-bVNex\",\"targetHandle\":\"{œfieldNameœ:œllmœ,œidœ:œConversationChain-bVNexœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"className\":\"stroke-gray-900 stroke-connection\",\"id\":\"reactflow__edge-ChatOpenAI-fBcfh{œbaseClassesœ:[œBaseChatModelœ,œBaseLanguageModelœ,œChatOpenAIœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-fBcfhœ}-ConversationChain-bVNex{œfieldNameœ:œllmœ,œidœ:œConversationChain-bVNexœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"data\":{\"sourceHandle\":{\"baseClasses\":[\"BaseChatModel\",\"BaseLanguageModel\",\"ChatOpenAI\",\"BaseLLM\"],\"dataType\":\"ChatOpenAI\",\"id\":\"ChatOpenAI-fBcfh\"},\"targetHandle\":{\"fieldName\":\"llm\",\"id\":\"ConversationChain-bVNex\",\"inputTypes\":null,\"type\":\"BaseLanguageModel\"}},\"style\":{\"stroke\":\"#555\"},\"animated\":false}],\"viewport\":{\"x\":-118.21416568593895,\"y\":-240.64815770363373,\"zoom\":0.7642485855675408}},\"is_component\":false,\"updated_at\":\"2023-12-04T22:57:55.879806\",\"folder\":null,\"id\":\"bddebeea-b80a-4b28-8895-c4425687dcce\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Directory Loader\",\"description\":\"Generic File Loader\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"import os\\n\\nfrom langchain.schema import Document\\nfrom langflow import CustomComponent\\nimport glob\\n\\nclass DirectoryLoaderComponent(CustomComponent):\\n display_name: str = \\\"Directory Loader\\\"\\n description: str = \\\"Generic File Loader\\\"\\n beta = True\\n loaders_info = [\\n {\\n \\\"loader\\\": \\\"AirbyteJSONLoader\\\",\\n \\\"name\\\": \\\"Airbyte JSON (.jsonl)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.AirbyteJSONLoader\\\",\\n \\\"defaultFor\\\": [\\\"jsonl\\\"],\\n \\\"allowdTypes\\\": [\\\"jsonl\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"JSONLoader\\\",\\n \\\"name\\\": \\\"JSON (.json)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.JSONLoader\\\",\\n \\\"defaultFor\\\": [\\\"json\\\"],\\n \\\"allowdTypes\\\": [\\\"json\\\"],\\n \\\"kwargs\\\": {\\\"jq_schema\\\": \\\".\\\", \\\"text_content\\\": False}\\n },\\n {\\n \\\"loader\\\": \\\"BSHTMLLoader\\\",\\n \\\"name\\\": \\\"BeautifulSoup4 HTML (.html, .htm)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.BSHTMLLoader\\\",\\n \\\"allowdTypes\\\": [\\\"html\\\", \\\"htm\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"CSVLoader\\\",\\n \\\"name\\\": \\\"CSV (.csv)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.CSVLoader\\\",\\n \\\"defaultFor\\\": [\\\"csv\\\"],\\n \\\"allowdTypes\\\": [\\\"csv\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"CoNLLULoader\\\",\\n \\\"name\\\": \\\"CoNLL-U (.conllu)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.CoNLLULoader\\\",\\n \\\"defaultFor\\\": [\\\"conllu\\\"],\\n \\\"allowdTypes\\\": [\\\"conllu\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"EverNoteLoader\\\",\\n \\\"name\\\": \\\"EverNote (.enex)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.EverNoteLoader\\\",\\n \\\"defaultFor\\\": [\\\"enex\\\"],\\n \\\"allowdTypes\\\": [\\\"enex\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"FacebookChatLoader\\\",\\n \\\"name\\\": \\\"Facebook Chat (.json)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.FacebookChatLoader\\\",\\n \\\"allowdTypes\\\": [\\\"json\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"OutlookMessageLoader\\\",\\n \\\"name\\\": \\\"Outlook Message (.msg)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.OutlookMessageLoader\\\",\\n \\\"defaultFor\\\": [\\\"msg\\\"],\\n \\\"allowdTypes\\\": [\\\"msg\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"PyPDFLoader\\\",\\n \\\"name\\\": \\\"PyPDF (.pdf)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.PyPDFLoader\\\",\\n \\\"defaultFor\\\": [\\\"pdf\\\"],\\n \\\"allowdTypes\\\": [\\\"pdf\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"STRLoader\\\",\\n \\\"name\\\": \\\"Subtitle (.str)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.STRLoader\\\",\\n \\\"defaultFor\\\": [\\\"str\\\"],\\n \\\"allowdTypes\\\": [\\\"str\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"TextLoader\\\",\\n \\\"name\\\": \\\"Text (.txt)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.TextLoader\\\",\\n \\\"defaultFor\\\": [\\\"txt\\\"],\\n \\\"allowdTypes\\\": [\\\"txt\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"UnstructuredEmailLoader\\\",\\n \\\"name\\\": \\\"Unstructured Email (.eml)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.UnstructuredEmailLoader\\\",\\n \\\"defaultFor\\\": [\\\"eml\\\"],\\n \\\"allowdTypes\\\": [\\\"eml\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"UnstructuredHTMLLoader\\\",\\n \\\"name\\\": \\\"Unstructured HTML (.html, .htm)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.UnstructuredHTMLLoader\\\",\\n \\\"defaultFor\\\": [\\\"html\\\", \\\"htm\\\"],\\n \\\"allowdTypes\\\": [\\\"html\\\", \\\"htm\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"UnstructuredMarkdownLoader\\\",\\n \\\"name\\\": \\\"Unstructured Markdown (.md)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.UnstructuredMarkdownLoader\\\",\\n \\\"defaultFor\\\": [\\\"md\\\"],\\n \\\"allowdTypes\\\": [\\\"md\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"UnstructuredPowerPointLoader\\\",\\n \\\"name\\\": \\\"Unstructured PowerPoint (.pptx)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.UnstructuredPowerPointLoader\\\",\\n \\\"defaultFor\\\": [\\\"pptx\\\"],\\n \\\"allowdTypes\\\": [\\\"pptx\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"UnstructuredWordLoader\\\",\\n \\\"name\\\": \\\"Unstructured Word (.docx)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.UnstructuredWordLoader\\\",\\n \\\"defaultFor\\\": [\\\"docx\\\"],\\n \\\"allowdTypes\\\": [\\\"docx\\\"],\\n },\\n]\\n\\n\\n def build_config(self):\\n loader_options = [\\\"Automatic\\\"] + [\\n loader_info[\\\"name\\\"] for loader_info in self.loaders_info\\n ]\\n\\n file_types = []\\n suffixes = []\\n\\n for loader_info in self.loaders_info:\\n if \\\"allowedTypes\\\" in loader_info:\\n file_types.extend(loader_info[\\\"allowedTypes\\\"])\\n suffixes.extend([f\\\".{ext}\\\" for ext in loader_info[\\\"allowedTypes\\\"]])\\n\\n return {\\n \\\"directory_path\\\": {\\n \\\"display_name\\\": \\\"Directory Path\\\",\\n \\\"required\\\": True,\\n },\\n \\\"loader\\\": {\\n \\\"display_name\\\": \\\"Loader\\\",\\n \\\"is_list\\\": True,\\n \\\"required\\\": True,\\n \\\"options\\\": loader_options,\\n \\\"value\\\": \\\"Automatic\\\",\\n },\\n }\\n\\n def build(self, directory_path: str, loader: str) -> Document:\\n # Verifique se o diretório existe\\n if not os.path.exists(directory_path):\\n raise ValueError(f\\\"Directory not found: {directory_path}\\\")\\n\\n files = glob.glob(directory_path + \\\"/*.*\\\")\\n\\n\\n loader_info = None\\n if loader == \\\"Automatic\\\":\\n for file in files:\\n file_type = file.split(\\\".\\\")[-1]\\n\\n\\n for info in self.loaders_info:\\n if \\\"defaultFor\\\" in info:\\n if file_type in info[\\\"defaultFor\\\"]:\\n loader_info = info\\n break\\n if loader_info:\\n break\\n\\n if not loader_info:\\n raise ValueError(\\n \\\"No default loader found for any file in the directory\\\"\\n )\\n\\n else:\\n for info in self.loaders_info:\\n if info[\\\"name\\\"] == loader:\\n loader_info = info\\n break\\n\\n if not loader_info:\\n raise ValueError(f\\\"Loader {loader} not found in the loader info list\\\")\\n\\n loader_import = loader_info[\\\"import\\\"]\\n module_name, class_name = loader_import.rsplit(\\\".\\\", 1)\\n\\n try:\\n # Importe o loader dinamicamente\\n loader_module = __import__(module_name, fromlist=[class_name])\\n loader_instance = getattr(loader_module, class_name)\\n except ImportError as e:\\n raise ValueError(\\n f\\\"Loader {loader} could not be imported\\\\nLoader info:\\\\n{loader_info}\\\"\\n ) from e\\n\\n results = []\\n for file in files:\\n file_path = os.path.join(directory_path, file)\\n kwargs = loader_info.get(\\\"kwargs\\\", {})\\n result = loader_instance(file_path=file_path, **kwargs).load()\\n results.append(result)\\n self.status = results\\n return results\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"directory_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"directory_path\",\"display_name\":\"Directory Path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"/Users/ogabrielluiz/Projects/langflow2/docs\"},\"loader\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"Automatic\",\"password\":false,\"options\":[\"Automatic\",\"Airbyte JSON (.jsonl)\",\"JSON (.json)\",\"BeautifulSoup4 HTML (.html, .htm)\",\"CSV (.csv)\",\"CoNLL-U (.conllu)\",\"EverNote (.enex)\",\"Facebook Chat (.json)\",\"Outlook Message (.msg)\",\"PyPDF (.pdf)\",\"Subtitle (.str)\",\"Text (.txt)\",\"Unstructured Email (.eml)\",\"Unstructured HTML (.html, .htm)\",\"Unstructured Markdown (.md)\",\"Unstructured PowerPoint (.pptx)\",\"Unstructured Word (.docx)\"],\"name\":\"loader\",\"display_name\":\"Loader\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true}},\"description\":\"Generic File Loader\",\"base_classes\":[\"Document\"],\"display_name\":\"Directory Loader\",\"custom_fields\":{\"directory_path\":null,\"loader\":null},\"output_types\":[\"Document\"],\"documentation\":\"\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"CustomComponent-Ip6tG\"},\"id\":\"CustomComponent-Ip6tG\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-04T22:58:38.570303\",\"folder\":null,\"id\":\"5fe4debc-b6a7-45d4-a694-c02d8aa93b08\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Whisper Transcriber\",\"description\":\"Converts audio to text using OpenAI's Whisper.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom typing import Optional, List, Dict, Union\\nfrom langflow.field_typing import (\\n AgentExecutor,\\n BaseChatMemory,\\n BaseLanguageModel,\\n BaseLLM,\\n BaseLoader,\\n BaseMemory,\\n BaseOutputParser,\\n BasePromptTemplate,\\n BaseRetriever,\\n Callable,\\n Chain,\\n ChatPromptTemplate,\\n Data,\\n Document,\\n Embeddings,\\n NestedDict,\\n Object,\\n PromptTemplate,\\n TextSplitter,\\n Tool,\\n VectorStore,\\n)\\n\\nfrom openai import OpenAI\\nclass Component(CustomComponent):\\n display_name: str = \\\"Whisper Transcriber\\\"\\n description: str = \\\"Converts audio to text using OpenAI's Whisper.\\\"\\n\\n def build_config(self):\\n return {\\\"audio\\\":{\\\"field_type\\\":\\\"file\\\",\\\"suffixes\\\":[\\\".mp3\\\", \\\".mp4\\\", \\\".m4a\\\"]},\\\"OpenAIKey\\\":{\\\"field_type\\\":\\\"str\\\",\\\"password\\\":True}}\\n\\n def build(self, audio:str, OpenAIKey:str) -> str:\\n \\n # TODO: if output path, persist & load it instead\\n \\n client = OpenAI(api_key=OpenAIKey)\\n \\n audio_file= open(audio, \\\"rb\\\")\\n transcript = client.audio.transcriptions.create(\\n model=\\\"whisper-1\\\", \\n file=audio_file,\\n response_format=\\\"text\\\"\\n )\\n \\n \\n self.status = transcript\\n return transcript\\n\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"OpenAIKey\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"OpenAIKey\",\"display_name\":\"OpenAIKey\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"audio\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"suffixes\":[\".mp3\",\".mp4\",\".m4a\"],\"password\":false,\"name\":\"audio\",\"display_name\":\"audio\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"file_path\":\"/Users/rodrigonader/Library/Caches/langflow/19b3e1c9-90bf-405f-898a-e982f47adf76/a3308ce7e9b10088fcd985aabb6d17b012c6b6e81ff8806356574474c9d10229.m4a\",\"value\":\"Audio Recording 2023-11-29 at 12.12.04.m4a\"}},\"description\":\"Converts audio to text using OpenAI's Whisper.\",\"base_classes\":[\"str\"],\"display_name\":\"Whisper Transcriber\",\"custom_fields\":{\"OpenAIKey\":null,\"audio\":null},\"output_types\":[\"str\"],\"documentation\":\"\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"CustomComponent-GJRrs\"},\"id\":\"CustomComponent-GJRrs\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-04T22:58:39.710502\",\"folder\":null,\"id\":\"bd9e911d-46bd-429f-aa75-dd740430695e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Youtube Conversation\",\"description\":\"Craft Meaningful Interactions, Generate Value.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":589,\"id\":\"CustomComponent-h0NSI\",\"type\":\"genericNode\",\"position\":{\"x\":714.9531293402755,\"y\":0.4994374160582993},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"import requests\\nfrom langflow import CustomComponent\\nfrom langchain.llms.base import BaseLLM\\nfrom langchain.schema import Document\\nfrom langchain.document_loaders import YoutubeLoader\\n\\n# Example usage:\\n\\nclass YourComponent(CustomComponent):\\n display_name: str = \\\"YouTube Transcript\\\"\\n description: str = \\\"YouTube transcripts refer to the written text versions of the spoken content in a video uploaded to the YouTube platform.\\\"\\n\\n def build_config(self):\\n return {\\\"url\\\": {\\\"multiline\\\": True, \\\"required\\\": True}}\\n\\n def build(self, url: str, llm: BaseLLM, dependencies: Document, language: str = \\\"en\\\") -> Document:\\n dependencies\\n response = requests.get(url)\\n loader = YoutubeLoader.from_youtube_url(url, add_video_info=True, language=[language])\\n result = loader.load()\\n self.repr_value = str(result)\\n return result\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"dependencies\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"dependencies\",\"display_name\":\"dependencies\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":false},\"language\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"en\",\"password\":false,\"name\":\"language\",\"display_name\":\"language\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLLM\",\"list\":false},\"url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"YouTube transcripts refer to the written text versions of the spoken content in a video uploaded to the YouTube platform.\",\"base_classes\":[\"Document\"],\"display_name\":\"YouTube Transcript\",\"custom_fields\":{\"dependencies\":null,\"language\":null,\"llm\":null,\"url\":null},\"output_types\":[\"Document\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-h0NSI\"},\"selected\":false,\"dragging\":false,\"positionAbsolute\":{\"x\":714.9531293402755,\"y\":0.4994374160582993}},{\"width\":384,\"height\":629,\"id\":\"ChatOpenAI-q61Y2\",\"type\":\"genericNode\",\"position\":{\"x\":18,\"y\":625.5521045590924},\"data\":{\"type\":\"ChatOpenAI\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":6,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false,\"value\":\"\"},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"gpt-3.5-turbo-16k\",\"password\":false,\"options\":[\"gpt-3.5-turbo-0613\",\"gpt-3.5-turbo\",\"gpt-3.5-turbo-16k-0613\",\"gpt-3.5-turbo-16k\",\"gpt-4-0613\",\"gpt-4-32k-0613\",\"gpt-4\",\"gpt-4-32k\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.7,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatOpenAI\"},\"description\":\"`OpenAI` Chat large language models API.\",\"base_classes\":[\"ChatOpenAI\",\"BaseChatModel\",\"BaseLanguageModel\",\"BaseLLM\"],\"display_name\":\"ChatOpenAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"ChatOpenAI-q61Y2\"},\"selected\":false,\"positionAbsolute\":{\"x\":18,\"y\":625.5521045590924},\"dragging\":false},{\"width\":384,\"height\":539,\"id\":\"Chroma-gVhy7\",\"type\":\"genericNode\",\"position\":{\"x\":2110.365967257855,\"y\":805.0149521868784},\"data\":{\"type\":\"Chroma\",\"node\":{\"template\":{\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"chromadb.Client\",\"list\":false},\"client_settings\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client_settings\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"chromadb.config.Setting\",\"list\":true},\"documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":true},\"embedding\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Embeddings\",\"list\":false},\"chroma_server_cors_allow_origins\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_cors_allow_origins\",\"display_name\":\"Chroma Server CORS Allow Origins\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"chroma_server_grpc_port\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_grpc_port\",\"display_name\":\"Chroma Server GRPC Port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"chroma_server_host\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_host\",\"display_name\":\"Chroma Server Host\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"chroma_server_http_port\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_http_port\",\"display_name\":\"Chroma Server HTTP Port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"chroma_server_ssl_enabled\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"chroma_server_ssl_enabled\",\"display_name\":\"Chroma Server SSL Enabled\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"collection_metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"collection_metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"collection_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"langchain\",\"password\":false,\"name\":\"collection_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"ids\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"ids\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"metadatas\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":true},\"persist\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"persist\",\"display_name\":\"Persist\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"persist_directory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"persist_directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"search_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"NestedDict\",\"list\":false},\"_type\":\"Chroma\"},\"description\":\"Create a Chroma vectorstore from a raw documents.\",\"base_classes\":[\"VectorStore\",\"Chroma\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"Chroma\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/chroma\",\"beta\":false,\"error\":null},\"id\":\"Chroma-gVhy7\"},\"selected\":false,\"positionAbsolute\":{\"x\":2110.365967257855,\"y\":805.0149521868784},\"dragging\":false},{\"width\":384,\"height\":501,\"id\":\"RecursiveCharacterTextSplitter-1eaOX\",\"type\":\"genericNode\",\"position\":{\"x\":1409.2872773444874,\"y\":19.45456141103284},\"data\":{\"type\":\"RecursiveCharacterTextSplitter\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional\\nfrom langflow import CustomComponent\\nfrom langchain.schema import Document\\nfrom langflow.utils.util import build_loader_repr_from_documents\\n\\n\\nclass RecursiveCharacterTextSplitterComponent(CustomComponent):\\n display_name: str = \\\"Recursive Character Text Splitter\\\"\\n description: str = \\\"Split text into chunks of a specified length.\\\"\\n documentation: str = \\\"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\\\"\\n\\n def build_config(self):\\n return {\\n \\\"documents\\\": {\\n \\\"display_name\\\": \\\"Documents\\\",\\n \\\"info\\\": \\\"The documents to split.\\\",\\n },\\n \\\"separators\\\": {\\n \\\"display_name\\\": \\\"Separators\\\",\\n \\\"info\\\": 'The characters to split on.\\\\nIf left empty defaults to [\\\"\\\\\\\\n\\\\\\\\n\\\", \\\"\\\\\\\\n\\\", \\\" \\\", \\\"\\\"].',\\n \\\"is_list\\\": True,\\n },\\n \\\"chunk_size\\\": {\\n \\\"display_name\\\": \\\"Chunk Size\\\",\\n \\\"info\\\": \\\"The maximum length of each chunk.\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 1000,\\n },\\n \\\"chunk_overlap\\\": {\\n \\\"display_name\\\": \\\"Chunk Overlap\\\",\\n \\\"info\\\": \\\"The amount of overlap between chunks.\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 200,\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n documents: list[Document],\\n separators: Optional[list[str]] = None,\\n chunk_size: Optional[int] = 1000,\\n chunk_overlap: Optional[int] = 200,\\n ) -> list[Document]:\\n \\\"\\\"\\\"\\n Split text into chunks of a specified length.\\n\\n Args:\\n separators (list[str]): The characters to split on.\\n chunk_size (int): The maximum length of each chunk.\\n chunk_overlap (int): The amount of overlap between chunks.\\n length_function (function): The function to use to calculate the length of the text.\\n\\n Returns:\\n list[str]: The chunks of text.\\n \\\"\\\"\\\"\\n from langchain.text_splitter import RecursiveCharacterTextSplitter\\n\\n if separators == \\\"\\\":\\n separators = None\\n elif separators:\\n # check if the separators list has escaped characters\\n # if there are escaped characters, unescape them\\n separators = [x.encode().decode(\\\"unicode-escape\\\") for x in separators]\\n\\n # Make sure chunk_size and chunk_overlap are ints\\n if isinstance(chunk_size, str):\\n chunk_size = int(chunk_size)\\n if isinstance(chunk_overlap, str):\\n chunk_overlap = int(chunk_overlap)\\n splitter = RecursiveCharacterTextSplitter(\\n separators=separators,\\n chunk_size=chunk_size,\\n chunk_overlap=chunk_overlap,\\n )\\n\\n docs = splitter.split_documents(documents)\\n self.repr_value = build_loader_repr_from_documents(docs)\\n return docs\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"chunk_overlap\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"50\",\"password\":false,\"name\":\"chunk_overlap\",\"display_name\":\"Chunk Overlap\",\"advanced\":false,\"dynamic\":false,\"info\":\"The amount of overlap between chunks.\",\"type\":\"int\",\"list\":false},\"chunk_size\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"400\",\"password\":false,\"name\":\"chunk_size\",\"display_name\":\"Chunk Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"The maximum length of each chunk.\",\"type\":\"int\",\"list\":false},\"documents\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"The documents to split.\",\"type\":\"Document\",\"list\":true},\"separators\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"separators\",\"display_name\":\"Separators\",\"advanced\":false,\"dynamic\":false,\"info\":\"The characters to split on.\\nIf left empty defaults to [\\\"\\\\n\\\\n\\\", \\\"\\\\n\\\", \\\" \\\", \\\"\\\"].\",\"type\":\"str\",\"list\":true,\"value\":[\" \"]}},\"description\":\"Split text into chunks of a specified length.\",\"base_classes\":[\"Document\"],\"display_name\":\"Recursive Character Text Splitter\",\"custom_fields\":{\"chunk_overlap\":null,\"chunk_size\":null,\"documents\":null,\"separators\":null},\"output_types\":[\"RecursiveCharacterTextSplitter\"],\"documentation\":\"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\",\"beta\":true,\"error\":null},\"id\":\"RecursiveCharacterTextSplitter-1eaOX\"},\"selected\":false,\"positionAbsolute\":{\"x\":1409.2872773444874,\"y\":19.45456141103284},\"dragging\":false},{\"width\":384,\"height\":367,\"id\":\"OpenAIEmbeddings-cduOO\",\"type\":\"genericNode\",\"position\":{\"x\":1405.1176670984544,\"y\":1029.459061269321},\"data\":{\"type\":\"OpenAIEmbeddings\",\"node\":{\"template\":{\"allowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[],\"password\":false,\"name\":\"allowed_special\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Literal'all'\",\"list\":true},\"disallowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"all\",\"password\":false,\"name\":\"disallowed_special\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Literal'all'\",\"list\":true},\"chunk_size\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1000,\"password\":false,\"name\":\"chunk_size\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"deployment\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-embedding-ada-002\",\"password\":false,\"name\":\"deployment\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"embedding_ctx_length\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":8191,\"password\":false,\"name\":\"embedding_ctx_length\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"{'Authorization': 'Bearer '}\",\"password\":false,\"name\":\"headers\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":6,\"password\":false,\"name\":\"max_retries\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"model\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-embedding-ada-002\",\"password\":false,\"name\":\"model\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_api_type\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_type\",\"display_name\":\"OpenAI API Type\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"openai_api_version\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_version\",\"display_name\":\"OpenAI API Version\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"show_progress_bar\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"show_progress_bar\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"skip_empty\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"skip_empty\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"_type\":\"OpenAIEmbeddings\"},\"description\":\"OpenAI embedding models.\",\"base_classes\":[\"Embeddings\",\"OpenAIEmbeddings\"],\"display_name\":\"OpenAIEmbeddings\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"OpenAIEmbeddings-cduOO\"},\"selected\":false,\"positionAbsolute\":{\"x\":1405.1176670984544,\"y\":1029.459061269321}},{\"width\":384,\"height\":339,\"id\":\"RetrievalQA-4PmTB\",\"type\":\"genericNode\",\"position\":{\"x\":2711.7786966415715,\"y\":709.4450828605463},\"data\":{\"type\":\"RetrievalQA\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"combine_documents_chain\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"combine_documents_chain\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseCombineDocumentsChain\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"retriever\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseRetriever\",\"list\":false},\"input_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"query\",\"password\":false,\"name\":\"input_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"output_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"result\",\"password\":false,\"name\":\"output_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"return_source_documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"return_source_documents\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"RetrievalQA\"},\"description\":\"Chain for question-answering against an index.\",\"base_classes\":[\"RetrievalQA\",\"Chain\",\"BaseRetrievalQA\",\"function\"],\"display_name\":\"RetrievalQA\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/chains/popular/vector_db_qa\",\"beta\":false,\"error\":null},\"id\":\"RetrievalQA-4PmTB\"},\"selected\":false,\"positionAbsolute\":{\"x\":2711.7786966415715,\"y\":709.4450828605463}},{\"width\":384,\"height\":333,\"id\":\"CombineDocsChain-yk0JF\",\"type\":\"genericNode\",\"position\":{\"x\":2125.6202053356537,\"y\":199.07708924409633},\"data\":{\"type\":\"CombineDocsChain\",\"node\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"chain_type\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"stuff\",\"password\":false,\"options\":[\"stuff\",\"map_reduce\",\"map_rerank\",\"refine\"],\"name\":\"chain_type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"_type\":\"load_qa_chain\"},\"description\":\"Load question answering chain.\",\"base_classes\":[\"BaseCombineDocumentsChain\",\"function\"],\"display_name\":\"CombineDocsChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"id\":\"CombineDocsChain-yk0JF\"},\"selected\":false,\"positionAbsolute\":{\"x\":2125.6202053356537,\"y\":199.07708924409633},\"dragging\":false},{\"width\":384,\"height\":417,\"id\":\"CustomComponent-y6Wg0\",\"type\":\"genericNode\",\"position\":{\"x\":39.400034102832365,\"y\":-499.03551482195707},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\n\\nimport subprocess\\n\\nfrom langchain.llms.base import BaseLLM\\nfrom langchain.chains import LLMChain\\nfrom langchain.prompts import PromptTemplate\\nfrom langchain.schema import Document\\n\\nfrom typing import List\\n\\n\\nclass YourComponent(CustomComponent):\\n display_name: str = \\\"PIP Install\\\"\\n description: str = \\\"Create any custom component you want!\\\"\\n\\n def build(self, libs: List[str]) -> Document:\\n def install_package(package_name):\\n subprocess.check_call([\\\"pip\\\", \\\"install\\\", package_name])\\n for lib in libs:\\n install_package(lib)\\n return \\\"\\\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"libs\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"libs\",\"display_name\":\"libs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true,\"value\":[\"requests\",\"pytube\"]}},\"description\":\"Create any custom component you want!\",\"base_classes\":[\"Document\"],\"display_name\":\"PIP Install\",\"custom_fields\":{\"libs\":null},\"output_types\":[],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-y6Wg0\"},\"selected\":false,\"dragging\":false,\"positionAbsolute\":{\"x\":39.400034102832365,\"y\":-499.03551482195707}}],\"edges\":[{\"source\":\"ChatOpenAI-q61Y2\",\"sourceHandle\":\"{œbaseClassesœ:[œChatOpenAIœ,œBaseChatModelœ,œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-q61Y2œ}\",\"target\":\"CustomComponent-h0NSI\",\"targetHandle\":\"{œfieldNameœ:œllmœ,œidœ:œCustomComponent-h0NSIœ,œinputTypesœ:null,œtypeœ:œBaseLLMœ}\",\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-ChatOpenAI-q61Y2{œbaseClassesœ:[œChatOpenAIœ,œBaseChatModelœ,œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-q61Y2œ}-CustomComponent-h0NSI{œfieldNameœ:œllmœ,œidœ:œCustomComponent-h0NSIœ,œinputTypesœ:null,œtypeœ:œBaseLLMœ}\",\"data\":{\"sourceHandle\":{\"baseClasses\":[\"ChatOpenAI\",\"BaseChatModel\",\"BaseLanguageModel\",\"BaseLLM\"],\"dataType\":\"ChatOpenAI\",\"id\":\"ChatOpenAI-q61Y2\"},\"targetHandle\":{\"fieldName\":\"llm\",\"id\":\"CustomComponent-h0NSI\",\"inputTypes\":null,\"type\":\"BaseLLM\"}}},{\"source\":\"CustomComponent-y6Wg0\",\"sourceHandle\":\"{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-y6Wg0œ}\",\"target\":\"CustomComponent-h0NSI\",\"targetHandle\":\"{œfieldNameœ:œdependenciesœ,œidœ:œCustomComponent-h0NSIœ,œinputTypesœ:null,œtypeœ:œDocumentœ}\",\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-y6Wg0{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-y6Wg0œ}-CustomComponent-h0NSI{œfieldNameœ:œdependenciesœ,œidœ:œCustomComponent-h0NSIœ,œinputTypesœ:null,œtypeœ:œDocumentœ}\",\"data\":{\"sourceHandle\":{\"baseClasses\":[\"Document\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-y6Wg0\"},\"targetHandle\":{\"fieldName\":\"dependencies\",\"id\":\"CustomComponent-h0NSI\",\"inputTypes\":null,\"type\":\"Document\"}}},{\"source\":\"CustomComponent-h0NSI\",\"sourceHandle\":\"{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-h0NSIœ}\",\"target\":\"RecursiveCharacterTextSplitter-1eaOX\",\"targetHandle\":\"{œfieldNameœ:œdocumentsœ,œidœ:œRecursiveCharacterTextSplitter-1eaOXœ,œinputTypesœ:null,œtypeœ:œDocumentœ}\",\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-h0NSI{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-h0NSIœ}-RecursiveCharacterTextSplitter-1eaOX{œfieldNameœ:œdocumentsœ,œidœ:œRecursiveCharacterTextSplitter-1eaOXœ,œinputTypesœ:null,œtypeœ:œDocumentœ}\",\"data\":{\"sourceHandle\":{\"baseClasses\":[\"Document\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-h0NSI\"},\"targetHandle\":{\"fieldName\":\"documents\",\"id\":\"RecursiveCharacterTextSplitter-1eaOX\",\"inputTypes\":null,\"type\":\"Document\"}},\"selected\":false},{\"source\":\"OpenAIEmbeddings-cduOO\",\"sourceHandle\":\"{œbaseClassesœ:[œEmbeddingsœ,œOpenAIEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-cduOOœ}\",\"target\":\"Chroma-gVhy7\",\"targetHandle\":\"{œfieldNameœ:œembeddingœ,œidœ:œChroma-gVhy7œ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}\",\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-OpenAIEmbeddings-cduOO{œbaseClassesœ:[œEmbeddingsœ,œOpenAIEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-cduOOœ}-Chroma-gVhy7{œfieldNameœ:œembeddingœ,œidœ:œChroma-gVhy7œ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}\",\"data\":{\"sourceHandle\":{\"baseClasses\":[\"Embeddings\",\"OpenAIEmbeddings\"],\"dataType\":\"OpenAIEmbeddings\",\"id\":\"OpenAIEmbeddings-cduOO\"},\"targetHandle\":{\"fieldName\":\"embedding\",\"id\":\"Chroma-gVhy7\",\"inputTypes\":null,\"type\":\"Embeddings\"}}},{\"source\":\"RecursiveCharacterTextSplitter-1eaOX\",\"sourceHandle\":\"{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-1eaOXœ}\",\"target\":\"Chroma-gVhy7\",\"targetHandle\":\"{œfieldNameœ:œdocumentsœ,œidœ:œChroma-gVhy7œ,œinputTypesœ:null,œtypeœ:œDocumentœ}\",\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-RecursiveCharacterTextSplitter-1eaOX{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-1eaOXœ}-Chroma-gVhy7{œfieldNameœ:œdocumentsœ,œidœ:œChroma-gVhy7œ,œinputTypesœ:null,œtypeœ:œDocumentœ}\",\"data\":{\"sourceHandle\":{\"baseClasses\":[\"Document\"],\"dataType\":\"RecursiveCharacterTextSplitter\",\"id\":\"RecursiveCharacterTextSplitter-1eaOX\"},\"targetHandle\":{\"fieldName\":\"documents\",\"id\":\"Chroma-gVhy7\",\"inputTypes\":null,\"type\":\"Document\"}}},{\"source\":\"ChatOpenAI-q61Y2\",\"sourceHandle\":\"{œbaseClassesœ:[œChatOpenAIœ,œBaseChatModelœ,œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-q61Y2œ}\",\"target\":\"CombineDocsChain-yk0JF\",\"targetHandle\":\"{œfieldNameœ:œllmœ,œidœ:œCombineDocsChain-yk0JFœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-ChatOpenAI-q61Y2{œbaseClassesœ:[œChatOpenAIœ,œBaseChatModelœ,œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-q61Y2œ}-CombineDocsChain-yk0JF{œfieldNameœ:œllmœ,œidœ:œCombineDocsChain-yk0JFœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"data\":{\"sourceHandle\":{\"baseClasses\":[\"ChatOpenAI\",\"BaseChatModel\",\"BaseLanguageModel\",\"BaseLLM\"],\"dataType\":\"ChatOpenAI\",\"id\":\"ChatOpenAI-q61Y2\"},\"targetHandle\":{\"fieldName\":\"llm\",\"id\":\"CombineDocsChain-yk0JF\",\"inputTypes\":null,\"type\":\"BaseLanguageModel\"}}},{\"source\":\"CombineDocsChain-yk0JF\",\"sourceHandle\":\"{œbaseClassesœ:[œBaseCombineDocumentsChainœ,œfunctionœ],œdataTypeœ:œCombineDocsChainœ,œidœ:œCombineDocsChain-yk0JFœ}\",\"target\":\"RetrievalQA-4PmTB\",\"targetHandle\":\"{œfieldNameœ:œcombine_documents_chainœ,œidœ:œRetrievalQA-4PmTBœ,œinputTypesœ:null,œtypeœ:œBaseCombineDocumentsChainœ}\",\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CombineDocsChain-yk0JF{œbaseClassesœ:[œBaseCombineDocumentsChainœ,œfunctionœ],œdataTypeœ:œCombineDocsChainœ,œidœ:œCombineDocsChain-yk0JFœ}-RetrievalQA-4PmTB{œfieldNameœ:œcombine_documents_chainœ,œidœ:œRetrievalQA-4PmTBœ,œinputTypesœ:null,œtypeœ:œBaseCombineDocumentsChainœ}\",\"data\":{\"sourceHandle\":{\"baseClasses\":[\"BaseCombineDocumentsChain\",\"function\"],\"dataType\":\"CombineDocsChain\",\"id\":\"CombineDocsChain-yk0JF\"},\"targetHandle\":{\"fieldName\":\"combine_documents_chain\",\"id\":\"RetrievalQA-4PmTB\",\"inputTypes\":null,\"type\":\"BaseCombineDocumentsChain\"}}},{\"source\":\"Chroma-gVhy7\",\"sourceHandle\":\"{œbaseClassesœ:[œVectorStoreœ,œChromaœ,œBaseRetrieverœ,œVectorStoreRetrieverœ],œdataTypeœ:œChromaœ,œidœ:œChroma-gVhy7œ}\",\"target\":\"RetrievalQA-4PmTB\",\"targetHandle\":\"{œfieldNameœ:œretrieverœ,œidœ:œRetrievalQA-4PmTBœ,œinputTypesœ:null,œtypeœ:œBaseRetrieverœ}\",\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-Chroma-gVhy7{œbaseClassesœ:[œVectorStoreœ,œChromaœ,œBaseRetrieverœ,œVectorStoreRetrieverœ],œdataTypeœ:œChromaœ,œidœ:œChroma-gVhy7œ}-RetrievalQA-4PmTB{œfieldNameœ:œretrieverœ,œidœ:œRetrievalQA-4PmTBœ,œinputTypesœ:null,œtypeœ:œBaseRetrieverœ}\",\"data\":{\"sourceHandle\":{\"baseClasses\":[\"VectorStore\",\"Chroma\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"dataType\":\"Chroma\",\"id\":\"Chroma-gVhy7\"},\"targetHandle\":{\"fieldName\":\"retriever\",\"id\":\"RetrievalQA-4PmTB\",\"inputTypes\":null,\"type\":\"BaseRetriever\"}}}],\"viewport\":{\"x\":189.54413265004274,\"y\":259.89949657174975,\"zoom\":0.291027508374689}},\"is_component\":false,\"updated_at\":\"2023-12-04T22:59:08.830269\",\"folder\":null,\"id\":\"bc7eb94b-6fc6-49cb-9b19-35347ba51afa\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Web Scraper - Content & Links\",\"description\":\"Fetch and parse text and links from a given URL.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":338,\"id\":\"LLMChain-H7PBy\",\"type\":\"genericNode\",\"position\":{\"x\":1682.494010974207,\"y\":275.5701585623092},\"data\":{\"type\":\"LLMChain\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Callable, Optional, Union\\n\\nfrom langchain.chains import LLMChain\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import (\\n BaseLanguageModel,\\n BaseMemory,\\n BasePromptTemplate,\\n Chain,\\n)\\n\\n\\nclass LLMChainComponent(CustomComponent):\\n display_name = \\\"LLMChain\\\"\\n description = \\\"Chain to run queries against LLMs\\\"\\n\\n def build_config(self):\\n return {\\n \\\"prompt\\\": {\\\"display_name\\\": \\\"Prompt\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n prompt: BasePromptTemplate,\\n llm: BaseLanguageModel,\\n memory: Optional[BaseMemory] = None,\\n ) -> Union[Chain, Callable]:\\n return LLMChain(prompt=prompt, llm=llm, memory=memory)\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false}},\"description\":\"Chain to run queries against LLMs\",\"base_classes\":[\"Chain\",\"Callable\"],\"display_name\":\"LLMChain\",\"custom_fields\":{\"llm\":null,\"memory\":null,\"prompt\":null},\"output_types\":[\"LLMChain\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"LLMChain-H7PBy\"},\"selected\":false,\"positionAbsolute\":{\"x\":1682.494010974207,\"y\":275.5701585623092},\"dragging\":false},{\"width\":384,\"height\":626,\"id\":\"ChatOpenAI-VSAdc\",\"type\":\"genericNode\",\"position\":{\"x\":1002.4263147022411,\"y\":-198.2305006886859},\"data\":{\"type\":\"ChatOpenAI\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"list\":true},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"default_query\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"http_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"http_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":2,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false,\"value\":\"\"},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"gpt-4-1106-preview\",\"password\":false,\"options\":[\"gpt-4-1106-preview\",\"gpt-4\",\"gpt-4-32k\",\"gpt-3.5-turbo\",\"gpt-3.5-turbo-16k\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"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\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"0.1\",\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatOpenAI\"},\"description\":\"`OpenAI` Chat large language models API.\",\"base_classes\":[\"BaseLanguageModel\",\"ChatOpenAI\",\"BaseChatModel\",\"BaseLLM\"],\"display_name\":\"ChatOpenAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"ChatOpenAI-VSAdc\"},\"selected\":false,\"dragging\":false,\"positionAbsolute\":{\"x\":1002.4263147022411,\"y\":-198.2305006886859}},{\"width\":384,\"height\":374,\"data\":{\"id\":\"GroupNode-WXPMk\",\"type\":\"PromptTemplate\",\"node\":{\"output_types\":[],\"display_name\":\"Web Scraper\",\"documentation\":\"\",\"base_classes\":[\"StringPromptTemplate\",\"BasePromptTemplate\",\"PromptTemplate\"],\"description\":\"Fetch and parse text and links from a given URL.\",\"template\":{\"code_CustomComponent-f6nOg\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport html2text\\n\\nclass ConvertHTMLToMarkdown(CustomComponent):\\n display_name: str = \\\"HTML to Markdown\\\"\\n description: str = \\\"Converts HTML content to Markdown format.\\\"\\n\\n def build(self, html_content: Data, ignore_links: bool = False) -> str:\\n converter = html2text.HTML2Text()\\n converter.ignore_links = ignore_links\\n markdown_text = converter.handle(html_content)\\n self.status = markdown_text\\n return markdown_text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-f6nOg\",\"field\":\"code\"},\"display_name\":\"Code\"},\"ignore_links_CustomComponent-f6nOg\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"ignore_links\",\"display_name\":\"ignore_links\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-f6nOg\",\"field\":\"ignore_links\"}},\"code_CustomComponent-FGzJJ\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport requests\\n\\nclass FetchHTMLContent(CustomComponent):\\n display_name: str = \\\"Fetch HTML\\\"\\n description: str = \\\"Fetches HTML content from a specified URL.\\\"\\n \\n def build_config(self):\\n return {\\\"url\\\": {\\\"input_types\\\": [\\\"Data\\\", \\\"str\\\"]}} \\n\\n def build(self, url: str) -> Data:\\n response = requests.get(url)\\n self.status = str(response) + \\\"\\\\n\\\\n\\\" + response.text\\n return response.text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-FGzJJ\",\"field\":\"code\"},\"display_name\":\"Code\"},\"code_CustomComponent-0XtUN\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from bs4 import BeautifulSoup\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\nclass ParseHTMLContent(CustomComponent):\\n display_name: str = \\\"Parse HTML\\\"\\n description: str = \\\"Parses HTML content using Beautiful Soup.\\\"\\n\\n def build(self, html_content: Data) -> Data:\\n soup = BeautifulSoup(html_content, 'html.parser')\\n self.status = soup\\n return soup\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-0XtUN\",\"field\":\"code\"},\"display_name\":\"Code\"},\"code_CustomComponent-ODIcp\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\nclass ExtractHyperlinks(CustomComponent):\\n display_name: str = \\\"Extract Hyperlinks\\\"\\n description: str = \\\"Extracts hyperlinks from parsed HTML content.\\\"\\n\\n def build(self, soup: Data) -> list:\\n links = [link.get('href') for link in soup.find_all('a')]\\n self.status = links\\n return links\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-ODIcp\",\"field\":\"code\"},\"display_name\":\"Code\"},\"output_parser_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"output_parser\"},\"display_name\":\"Output Parser\"},\"input_types_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_types\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"input_types\"},\"display_name\":\"Input Types\"},\"input_variables_PromptTemplate-H9Udy\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":true,\"value\":[\"url\",\"html_markdown\",\"html_links\",\"query\"],\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"input_variables\"},\"display_name\":\"Input Variables\"},\"partial_variables_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"partial_variables\"},\"display_name\":\"Partial Variables\"},\"template_PromptTemplate-H9Udy\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"template\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"prompt\",\"list\":false,\"value\":\"Below is the HTML content (as markdown) and hyperlinks extracted from {url}\\n\\n---\\n\\nContent:\\n\\n{html_markdown}\\n\\n---\\n\\nLinks:\\n\\n{html_links}\\n\\n---\\n\\nAnswer the user query as best as possible.\\n\\nUser query:\\n\\n{query}\\n\\nAnswer:\\n\",\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"template\"},\"display_name\":\"Template\"},\"template_format_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"f-string\",\"password\":false,\"name\":\"template_format\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"template_format\"},\"display_name\":\"Template Format\"},\"validate_template_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"validate_template\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"bool\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"validate_template\"},\"display_name\":\"Validate Template\"},\"query_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"give me links\",\"password\":false,\"name\":\"query\",\"display_name\":\"query\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"query\"}},\"code_CustomComponent-OACE0\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport requests\\n\\nclass FetchHTMLContent(CustomComponent):\\n display_name: str = \\\"URL Input\\\"\\n\\n def build(self, url: str) -> str:\\n return url\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-OACE0\",\"field\":\"code\"},\"display_name\":\"Code\"},\"url_CustomComponent-OACE0\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-OACE0\",\"field\":\"url\"},\"value\":\"https://paperswithcode.com/\"},\"code_CustomComponent-whsQ5\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\n\\nclass ListToMarkdownBullets(CustomComponent):\\n display_name: str = \\\"List to Bullets\\\"\\n description: str = \\\"Converts a Python list into Markdown bullet points.\\\"\\n\\n def build(self, items: list) -> str:\\n markdown_bullets = '\\\\n'.join(f'* {item}' for item in items)\\n return markdown_bullets\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-whsQ5\",\"field\":\"code\"},\"display_name\":\"Code\"}},\"flow\":{\"data\":{\"nodes\":[{\"width\":384,\"height\":405,\"id\":\"CustomComponent-f6nOg\",\"type\":\"genericNode\",\"position\":{\"x\":665.2835942536854,\"y\":-371.7823429271119},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport html2text\\n\\nclass ConvertHTMLToMarkdown(CustomComponent):\\n display_name: str = \\\"HTML to Markdown\\\"\\n description: str = \\\"Converts HTML content to Markdown format.\\\"\\n\\n def build(self, html_content: Data, ignore_links: bool = False) -> str:\\n converter = html2text.HTML2Text()\\n converter.ignore_links = ignore_links\\n markdown_text = converter.handle(html_content)\\n self.status = markdown_text\\n return markdown_text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"html_content\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"html_content\",\"display_name\":\"html_content\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false},\"ignore_links\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"ignore_links\",\"display_name\":\"ignore_links\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false}},\"description\":\"Converts HTML content to Markdown format.\",\"base_classes\":[\"str\"],\"display_name\":\"HTML to Markdown\",\"custom_fields\":{\"html_content\":null,\"ignore_links\":null},\"output_types\":[\"str\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-f6nOg\"},\"selected\":true,\"positionAbsolute\":{\"x\":665.2835942536854,\"y\":-371.7823429271119},\"dragging\":false},{\"width\":384,\"height\":375,\"id\":\"CustomComponent-FGzJJ\",\"type\":\"genericNode\",\"position\":{\"x\":-464.4553400967736,\"y\":-225.62715888255525},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport requests\\n\\nclass FetchHTMLContent(CustomComponent):\\n display_name: str = \\\"Fetch HTML\\\"\\n description: str = \\\"Fetches HTML content from a specified URL.\\\"\\n \\n def build_config(self):\\n return {\\\"url\\\": {\\\"input_types\\\": [\\\"Data\\\", \\\"str\\\"]}} \\n\\n def build(self, url: str) -> Data:\\n response = requests.get(url)\\n self.status = str(response) + \\\"\\\\n\\\\n\\\" + response.text\\n return response.text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"input_types\":[\"Data\",\"str\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"}},\"description\":\"Fetches HTML content from a specified URL.\",\"base_classes\":[\"Data\"],\"display_name\":\"Fetch HTML\",\"custom_fields\":{\"url\":null},\"output_types\":[\"Data\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-FGzJJ\"},\"selected\":true,\"dragging\":false,\"positionAbsolute\":{\"x\":-464.4553400967736,\"y\":-225.62715888255525}},{\"width\":384,\"height\":329,\"id\":\"CustomComponent-0XtUN\",\"type\":\"genericNode\",\"position\":{\"x\":214.00059169497604,\"y\":177.27071061129823},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from bs4 import BeautifulSoup\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\nclass ParseHTMLContent(CustomComponent):\\n display_name: str = \\\"Parse HTML\\\"\\n description: str = \\\"Parses HTML content using Beautiful Soup.\\\"\\n\\n def build(self, html_content: Data) -> Data:\\n soup = BeautifulSoup(html_content, 'html.parser')\\n self.status = soup\\n return soup\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"html_content\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"html_content\",\"display_name\":\"html_content\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false}},\"description\":\"Parses HTML content using Beautiful Soup.\",\"base_classes\":[\"Data\"],\"display_name\":\"Parse HTML\",\"custom_fields\":{\"html_content\":null},\"output_types\":[\"Data\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-0XtUN\"},\"selected\":true,\"dragging\":false,\"positionAbsolute\":{\"x\":214.00059169497604,\"y\":177.27071061129823}},{\"width\":384,\"height\":329,\"id\":\"CustomComponent-ODIcp\",\"type\":\"genericNode\",\"position\":{\"x\":845.0502195222412,\"y\":366.54344452019404},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\nclass ExtractHyperlinks(CustomComponent):\\n display_name: str = \\\"Extract Hyperlinks\\\"\\n description: str = \\\"Extracts hyperlinks from parsed HTML content.\\\"\\n\\n def build(self, soup: Data) -> list:\\n links = [link.get('href') for link in soup.find_all('a')]\\n self.status = links\\n return links\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"soup\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"soup\",\"display_name\":\"soup\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false}},\"description\":\"Extracts hyperlinks from parsed HTML content.\",\"base_classes\":[\"list\"],\"display_name\":\"Extract Hyperlinks\",\"custom_fields\":{\"soup\":null},\"output_types\":[\"list\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-ODIcp\"},\"selected\":true,\"positionAbsolute\":{\"x\":845.0502195222412,\"y\":366.54344452019404},\"dragging\":false},{\"width\":384,\"height\":657,\"id\":\"PromptTemplate-H9Udy\",\"type\":\"genericNode\",\"position\":{\"x\":2230.389721706283,\"y\":584.4905083765256},\"data\":{\"type\":\"PromptTemplate\",\"node\":{\"template\":{\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false},\"input_types\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_types\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"input_variables\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":true,\"value\":[\"url\",\"html_markdown\",\"html_links\",\"query\"]},\"partial_variables\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"template\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"prompt\",\"list\":false,\"value\":\"Below is the HTML content (as markdown) and hyperlinks extracted from {url}\\n\\n---\\n\\nContent:\\n\\n{html_markdown}\\n\\n---\\n\\nLinks:\\n\\n{html_links}\\n\\n---\\n\\nAnswer the user query as best as possible.\\n\\nUser query:\\n\\n{query}\\n\\nAnswer:\\n\"},\"template_format\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"f-string\",\"password\":false,\"name\":\"template_format\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":false},\"validate_template\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"validate_template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"PromptTemplate\",\"url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"html_markdown\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"html_markdown\",\"display_name\":\"html_markdown\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"html_links\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"html_links\",\"display_name\":\"html_links\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"query\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"query\",\"display_name\":\"query\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"A prompt template for a language model.\",\"base_classes\":[\"StringPromptTemplate\",\"BasePromptTemplate\",\"PromptTemplate\"],\"name\":\"\",\"display_name\":\"PromptTemplate\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/\",\"custom_fields\":{\"\":[\"url\",\"html_markdown\",\"html_links\",\"query\"]},\"output_types\":[],\"full_path\":null,\"field_formatters\":{},\"beta\":false,\"error\":null},\"id\":\"PromptTemplate-H9Udy\"},\"selected\":true,\"positionAbsolute\":{\"x\":2230.389721706283,\"y\":584.4905083765256},\"dragging\":false},{\"width\":384,\"height\":347,\"id\":\"CustomComponent-OACE0\",\"type\":\"genericNode\",\"position\":{\"x\":-1155.9497945157625,\"y\":198.13583204151553},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport requests\\n\\nclass FetchHTMLContent(CustomComponent):\\n display_name: str = \\\"URL Input\\\"\\n\\n def build(self, url: str) -> str:\\n return url\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":null,\"base_classes\":[\"str\"],\"display_name\":\"URL Input\",\"custom_fields\":{\"url\":null},\"output_types\":[\"str\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-OACE0\"},\"selected\":true,\"positionAbsolute\":{\"x\":-1155.9497945157625,\"y\":198.13583204151553},\"dragging\":false},{\"width\":384,\"height\":329,\"id\":\"CustomComponent-whsQ5\",\"type\":\"genericNode\",\"position\":{\"x\":1396.5574076376327,\"y\":694.8308714574463},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\n\\nclass ListToMarkdownBullets(CustomComponent):\\n display_name: str = \\\"List to Bullets\\\"\\n description: str = \\\"Converts a Python list into Markdown bullet points.\\\"\\n\\n def build(self, items: list) -> str:\\n markdown_bullets = '\\\\n'.join(f'* {item}' for item in items)\\n return markdown_bullets\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"items\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"items\",\"display_name\":\"items\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"list\",\"list\":true}},\"description\":\"Converts a Python list into Markdown bullet points.\",\"base_classes\":[\"str\"],\"display_name\":\"List to Bullets\",\"custom_fields\":{\"items\":null},\"output_types\":[\"str\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-whsQ5\"},\"selected\":true,\"positionAbsolute\":{\"x\":1396.5574076376327,\"y\":694.8308714574463},\"dragging\":false}],\"edges\":[{\"source\":\"CustomComponent-FGzJJ\",\"sourceHandle\":\"{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-FGzJJœ}\",\"target\":\"CustomComponent-f6nOg\",\"targetHandle\":\"{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-f6nOgœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_content\",\"id\":\"CustomComponent-f6nOg\",\"inputTypes\":null,\"type\":\"Data\"},\"sourceHandle\":{\"baseClasses\":[\"Data\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-FGzJJ\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-FGzJJ{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-FGzJJœ}-CustomComponent-f6nOg{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-f6nOgœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"selected\":true},{\"source\":\"CustomComponent-FGzJJ\",\"sourceHandle\":\"{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-FGzJJœ}\",\"target\":\"CustomComponent-0XtUN\",\"targetHandle\":\"{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-0XtUNœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_content\",\"id\":\"CustomComponent-0XtUN\",\"inputTypes\":null,\"type\":\"Data\"},\"sourceHandle\":{\"baseClasses\":[\"Data\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-FGzJJ\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-FGzJJ{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-FGzJJœ}-CustomComponent-0XtUN{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-0XtUNœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"selected\":true},{\"source\":\"CustomComponent-0XtUN\",\"sourceHandle\":\"{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-0XtUNœ}\",\"target\":\"CustomComponent-ODIcp\",\"targetHandle\":\"{œfieldNameœ:œsoupœ,œidœ:œCustomComponent-ODIcpœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"soup\",\"id\":\"CustomComponent-ODIcp\",\"inputTypes\":null,\"type\":\"Data\"},\"sourceHandle\":{\"baseClasses\":[\"Data\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-0XtUN\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-0XtUN{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-0XtUNœ}-CustomComponent-ODIcp{œfieldNameœ:œsoupœ,œidœ:œCustomComponent-ODIcpœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"selected\":true},{\"source\":\"CustomComponent-OACE0\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-OACE0œ}\",\"target\":\"CustomComponent-FGzJJ\",\"targetHandle\":\"{œfieldNameœ:œurlœ,œidœ:œCustomComponent-FGzJJœ,œinputTypesœ:[œDataœ,œstrœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"url\",\"id\":\"CustomComponent-FGzJJ\",\"inputTypes\":[\"Data\",\"str\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-OACE0\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-OACE0{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-OACE0œ}-CustomComponent-FGzJJ{œfieldNameœ:œurlœ,œidœ:œCustomComponent-FGzJJœ,œinputTypesœ:[œDataœ,œstrœ],œtypeœ:œstrœ}\",\"selected\":true},{\"source\":\"CustomComponent-OACE0\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-OACE0œ}\",\"target\":\"PromptTemplate-H9Udy\",\"targetHandle\":\"{œfieldNameœ:œurlœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"url\",\"id\":\"PromptTemplate-H9Udy\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-OACE0\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-OACE0{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-OACE0œ}-PromptTemplate-H9Udy{œfieldNameœ:œurlœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"selected\":true},{\"source\":\"CustomComponent-ODIcp\",\"sourceHandle\":\"{œbaseClassesœ:[œlistœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-ODIcpœ}\",\"target\":\"CustomComponent-whsQ5\",\"targetHandle\":\"{œfieldNameœ:œitemsœ,œidœ:œCustomComponent-whsQ5œ,œinputTypesœ:null,œtypeœ:œlistœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"items\",\"id\":\"CustomComponent-whsQ5\",\"inputTypes\":null,\"type\":\"list\"},\"sourceHandle\":{\"baseClasses\":[\"list\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-ODIcp\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-ODIcp{œbaseClassesœ:[œlistœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-ODIcpœ}-CustomComponent-whsQ5{œfieldNameœ:œitemsœ,œidœ:œCustomComponent-whsQ5œ,œinputTypesœ:null,œtypeœ:œlistœ}\",\"selected\":true},{\"source\":\"CustomComponent-whsQ5\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-whsQ5œ}\",\"target\":\"PromptTemplate-H9Udy\",\"targetHandle\":\"{œfieldNameœ:œhtml_linksœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_links\",\"id\":\"PromptTemplate-H9Udy\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-whsQ5\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-whsQ5{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-whsQ5œ}-PromptTemplate-H9Udy{œfieldNameœ:œhtml_linksœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"selected\":true},{\"source\":\"CustomComponent-f6nOg\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-f6nOgœ}\",\"target\":\"PromptTemplate-H9Udy\",\"targetHandle\":\"{œfieldNameœ:œhtml_markdownœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_markdown\",\"id\":\"PromptTemplate-H9Udy\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-f6nOg\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-f6nOg{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-f6nOgœ}-PromptTemplate-H9Udy{œfieldNameœ:œhtml_markdownœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"selected\":true}],\"viewport\":{\"x\":343.0346131188585,\"y\":341.89843948642147,\"zoom\":0.24148408223121196}},\"is_component\":false,\"name\":\"Fluffy Ptolemy\",\"description\":\"\",\"id\":\"W5oNW\"}}},\"id\":\"GroupNode-WXPMk\",\"position\":{\"x\":873.0737834322758,\"y\":598.8401015776466},\"type\":\"genericNode\",\"selected\":true,\"dragging\":false,\"positionAbsolute\":{\"x\":873.0737834322758,\"y\":598.8401015776466}}],\"edges\":[{\"source\":\"ChatOpenAI-VSAdc\",\"sourceHandle\":\"{œbaseClassesœ:[œBaseLanguageModelœ,œChatOpenAIœ,œBaseChatModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-VSAdcœ}\",\"target\":\"LLMChain-H7PBy\",\"targetHandle\":\"{œfieldNameœ:œllmœ,œidœ:œLLMChain-H7PByœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"llm\",\"id\":\"LLMChain-H7PBy\",\"inputTypes\":null,\"type\":\"BaseLanguageModel\"},\"sourceHandle\":{\"baseClasses\":[\"BaseLanguageModel\",\"ChatOpenAI\",\"BaseChatModel\",\"BaseLLM\"],\"dataType\":\"ChatOpenAI\",\"id\":\"ChatOpenAI-VSAdc\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-ChatOpenAI-VSAdc{œbaseClassesœ:[œBaseLanguageModelœ,œChatOpenAIœ,œBaseChatModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-VSAdcœ}-LLMChain-H7PBy{œfieldNameœ:œllmœ,œidœ:œLLMChain-H7PByœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\"},{\"source\":\"GroupNode-WXPMk\",\"sourceHandle\":\"{œbaseClassesœ:[œStringPromptTemplateœ,œBasePromptTemplateœ,œPromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œGroupNode-WXPMkœ}\",\"target\":\"LLMChain-H7PBy\",\"targetHandle\":\"{œfieldNameœ:œpromptœ,œidœ:œLLMChain-H7PByœ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"prompt\",\"id\":\"LLMChain-H7PBy\",\"inputTypes\":null,\"type\":\"BasePromptTemplate\"},\"sourceHandle\":{\"baseClasses\":[\"StringPromptTemplate\",\"BasePromptTemplate\",\"PromptTemplate\"],\"dataType\":\"PromptTemplate\",\"id\":\"GroupNode-WXPMk\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-GroupNode-WXPMk{œbaseClassesœ:[œStringPromptTemplateœ,œBasePromptTemplateœ,œPromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œGroupNode-WXPMkœ}-LLMChain-H7PBy{œfieldNameœ:œpromptœ,œidœ:œLLMChain-H7PByœ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\"}],\"viewport\":{\"x\":-348.6161822813733,\"y\":121.40545291211492,\"zoom\":0.6801854262029781}},\"is_component\":false,\"updated_at\":\"2023-12-04T23:22:27.784647\",\"folder\":null,\"id\":\"efc3bf27-3cf1-4561-9a83-3df347572440\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sad Joliot\",\"description\":\"Navigate the Networks of Conversation.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":338,\"id\":\"LLMChain-RQsU1\",\"type\":\"genericNode\",\"position\":{\"x\":514.4440482813261,\"y\":528.164086188516},\"data\":{\"type\":\"LLMChain\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Callable, Optional, Union\\n\\nfrom langchain.chains import LLMChain\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import (\\n BaseLanguageModel,\\n BaseMemory,\\n BasePromptTemplate,\\n Chain,\\n)\\n\\n\\nclass LLMChainComponent(CustomComponent):\\n display_name = \\\"LLMChain\\\"\\n description = \\\"Chain to run queries against LLMs\\\"\\n\\n def build_config(self):\\n return {\\n \\\"prompt\\\": {\\\"display_name\\\": \\\"Prompt\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n prompt: BasePromptTemplate,\\n llm: BaseLanguageModel,\\n memory: Optional[BaseMemory] = None,\\n ) -> Union[Chain, Callable]:\\n return LLMChain(prompt=prompt, llm=llm, memory=memory)\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false}},\"description\":\"Chain to run queries against LLMs\",\"base_classes\":[\"Chain\",\"Callable\"],\"display_name\":\"LLMChain\",\"custom_fields\":{\"llm\":null,\"memory\":null,\"prompt\":null},\"output_types\":[\"LLMChain\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"LLMChain-RQsU1\"},\"selected\":false,\"positionAbsolute\":{\"x\":514.4440482813261,\"y\":528.164086188516}},{\"width\":384,\"height\":626,\"id\":\"ChatOpenAI-NTTcv\",\"type\":\"genericNode\",\"position\":{\"x\":-221.33853765781578,\"y\":-35.48749923706055},\"data\":{\"type\":\"ChatOpenAI\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"list\":true},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"default_query\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"http_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"http_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":2,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"gpt-4-1106-preview\",\"password\":false,\"options\":[\"gpt-4-1106-preview\",\"gpt-4\",\"gpt-4-32k\",\"gpt-3.5-turbo\",\"gpt-3.5-turbo-16k\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"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\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"0.1\",\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatOpenAI\"},\"description\":\"`OpenAI` Chat large language models API.\",\"base_classes\":[\"BaseLanguageModel\",\"ChatOpenAI\",\"BaseChatModel\",\"BaseLLM\"],\"display_name\":\"ChatOpenAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"ChatOpenAI-NTTcv\"},\"selected\":false,\"positionAbsolute\":{\"x\":-221.33853765781578,\"y\":-35.48749923706055}},{\"width\":384,\"height\":374,\"id\":\"PromptTemplate-VELMV\",\"type\":\"genericNode\",\"position\":{\"x\":-222,\"y\":682.560723386973},\"data\":{\"id\":\"PromptTemplate-VELMV\",\"type\":\"PromptTemplate\",\"node\":{\"output_types\":[],\"display_name\":\"Web Scraper\",\"documentation\":\"\",\"base_classes\":[\"StringPromptTemplate\",\"BasePromptTemplate\",\"PromptTemplate\"],\"description\":\"Fetch and parse text and links from a given URL.\",\"template\":{\"code_CustomComponent-f6nOg\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport html2text\\n\\nclass ConvertHTMLToMarkdown(CustomComponent):\\n display_name: str = \\\"HTML to Markdown\\\"\\n description: str = \\\"Converts HTML content to Markdown format.\\\"\\n\\n def build(self, html_content: Data, ignore_links: bool = False) -> str:\\n converter = html2text.HTML2Text()\\n converter.ignore_links = ignore_links\\n markdown_text = converter.handle(html_content)\\n self.status = markdown_text\\n return markdown_text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-f6nOg\",\"field\":\"code\"},\"display_name\":\"Code\"},\"ignore_links_CustomComponent-f6nOg\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"ignore_links\",\"display_name\":\"ignore_links\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-f6nOg\",\"field\":\"ignore_links\"}},\"code_CustomComponent-FGzJJ\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport requests\\n\\nclass FetchHTMLContent(CustomComponent):\\n display_name: str = \\\"Fetch HTML\\\"\\n description: str = \\\"Fetches HTML content from a specified URL.\\\"\\n \\n def build_config(self):\\n return {\\\"url\\\": {\\\"input_types\\\": [\\\"Data\\\", \\\"str\\\"]}} \\n\\n def build(self, url: str) -> Data:\\n response = requests.get(url)\\n self.status = str(response) + \\\"\\\\n\\\\n\\\" + response.text\\n return response.text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-FGzJJ\",\"field\":\"code\"},\"display_name\":\"Code\"},\"code_CustomComponent-0XtUN\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from bs4 import BeautifulSoup\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\nclass ParseHTMLContent(CustomComponent):\\n display_name: str = \\\"Parse HTML\\\"\\n description: str = \\\"Parses HTML content using Beautiful Soup.\\\"\\n\\n def build(self, html_content: Data) -> Data:\\n soup = BeautifulSoup(html_content, 'html.parser')\\n self.status = soup\\n return soup\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-0XtUN\",\"field\":\"code\"},\"display_name\":\"Code\"},\"code_CustomComponent-ODIcp\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\nclass ExtractHyperlinks(CustomComponent):\\n display_name: str = \\\"Extract Hyperlinks\\\"\\n description: str = \\\"Extracts hyperlinks from parsed HTML content.\\\"\\n\\n def build(self, soup: Data) -> list:\\n links = [link.get('href') for link in soup.find_all('a')]\\n self.status = links\\n return links\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-ODIcp\",\"field\":\"code\"},\"display_name\":\"Code\"},\"output_parser_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"output_parser\"},\"display_name\":\"Output Parser\"},\"input_types_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_types\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"input_types\"},\"display_name\":\"Input Types\"},\"input_variables_PromptTemplate-H9Udy\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":true,\"value\":[\"url\",\"html_markdown\",\"html_links\",\"query\"],\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"input_variables\"},\"display_name\":\"Input Variables\"},\"partial_variables_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"partial_variables\"},\"display_name\":\"Partial Variables\"},\"template_PromptTemplate-H9Udy\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"template\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"prompt\",\"list\":false,\"value\":\"Below is the HTML content (as markdown) and hyperlinks extracted from {url}\\n\\n---\\n\\nContent:\\n\\n{html_markdown}\\n\\n---\\n\\nLinks:\\n\\n{html_links}\\n\\n---\\n\\nAnswer the user query as best as possible.\\n\\nUser query:\\n\\n{query}\\n\\nAnswer:\\n\",\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"template\"},\"display_name\":\"Template\"},\"template_format_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"f-string\",\"password\":false,\"name\":\"template_format\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"template_format\"},\"display_name\":\"Template Format\"},\"validate_template_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"validate_template\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"bool\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"validate_template\"},\"display_name\":\"Validate Template\"},\"query_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"query\",\"display_name\":\"query\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"query\"}},\"code_CustomComponent-OACE0\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport requests\\n\\nclass FetchHTMLContent(CustomComponent):\\n display_name: str = \\\"URL Input\\\"\\n\\n def build(self, url: str) -> str:\\n return url\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-OACE0\",\"field\":\"code\"},\"display_name\":\"Code\"},\"url_CustomComponent-OACE0\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-OACE0\",\"field\":\"url\"},\"value\":\"https://paperswithcode.com/\"},\"code_CustomComponent-whsQ5\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\n\\nclass ListToMarkdownBullets(CustomComponent):\\n display_name: str = \\\"List to Bullets\\\"\\n description: str = \\\"Converts a Python list into Markdown bullet points.\\\"\\n\\n def build(self, items: list) -> str:\\n markdown_bullets = '\\\\n'.join(f'* {item}' for item in items)\\n return markdown_bullets\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-whsQ5\",\"field\":\"code\"},\"display_name\":\"Code\"}},\"flow\":{\"data\":{\"nodes\":[{\"width\":384,\"height\":405,\"id\":\"CustomComponent-f6nOg\",\"type\":\"genericNode\",\"position\":{\"x\":665.2835942536854,\"y\":-371.7823429271119},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport html2text\\n\\nclass ConvertHTMLToMarkdown(CustomComponent):\\n display_name: str = \\\"HTML to Markdown\\\"\\n description: str = \\\"Converts HTML content to Markdown format.\\\"\\n\\n def build(self, html_content: Data, ignore_links: bool = False) -> str:\\n converter = html2text.HTML2Text()\\n converter.ignore_links = ignore_links\\n markdown_text = converter.handle(html_content)\\n self.status = markdown_text\\n return markdown_text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"html_content\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"html_content\",\"display_name\":\"html_content\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false},\"ignore_links\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"ignore_links\",\"display_name\":\"ignore_links\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false}},\"description\":\"Converts HTML content to Markdown format.\",\"base_classes\":[\"str\"],\"display_name\":\"HTML to Markdown\",\"custom_fields\":{\"html_content\":null,\"ignore_links\":null},\"output_types\":[\"str\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-f6nOg\"},\"selected\":true,\"positionAbsolute\":{\"x\":665.2835942536854,\"y\":-371.7823429271119},\"dragging\":false},{\"width\":384,\"height\":375,\"id\":\"CustomComponent-FGzJJ\",\"type\":\"genericNode\",\"position\":{\"x\":-464.4553400967736,\"y\":-225.62715888255525},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport requests\\n\\nclass FetchHTMLContent(CustomComponent):\\n display_name: str = \\\"Fetch HTML\\\"\\n description: str = \\\"Fetches HTML content from a specified URL.\\\"\\n \\n def build_config(self):\\n return {\\\"url\\\": {\\\"input_types\\\": [\\\"Data\\\", \\\"str\\\"]}} \\n\\n def build(self, url: str) -> Data:\\n response = requests.get(url)\\n self.status = str(response) + \\\"\\\\n\\\\n\\\" + response.text\\n return response.text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"input_types\":[\"Data\",\"str\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"}},\"description\":\"Fetches HTML content from a specified URL.\",\"base_classes\":[\"Data\"],\"display_name\":\"Fetch HTML\",\"custom_fields\":{\"url\":null},\"output_types\":[\"Data\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-FGzJJ\"},\"selected\":true,\"dragging\":false,\"positionAbsolute\":{\"x\":-464.4553400967736,\"y\":-225.62715888255525}},{\"width\":384,\"height\":329,\"id\":\"CustomComponent-0XtUN\",\"type\":\"genericNode\",\"position\":{\"x\":214.00059169497604,\"y\":177.27071061129823},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from bs4 import BeautifulSoup\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\nclass ParseHTMLContent(CustomComponent):\\n display_name: str = \\\"Parse HTML\\\"\\n description: str = \\\"Parses HTML content using Beautiful Soup.\\\"\\n\\n def build(self, html_content: Data) -> Data:\\n soup = BeautifulSoup(html_content, 'html.parser')\\n self.status = soup\\n return soup\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"html_content\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"html_content\",\"display_name\":\"html_content\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false}},\"description\":\"Parses HTML content using Beautiful Soup.\",\"base_classes\":[\"Data\"],\"display_name\":\"Parse HTML\",\"custom_fields\":{\"html_content\":null},\"output_types\":[\"Data\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-0XtUN\"},\"selected\":true,\"dragging\":false,\"positionAbsolute\":{\"x\":214.00059169497604,\"y\":177.27071061129823}},{\"width\":384,\"height\":329,\"id\":\"CustomComponent-ODIcp\",\"type\":\"genericNode\",\"position\":{\"x\":845.0502195222412,\"y\":366.54344452019404},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\nclass ExtractHyperlinks(CustomComponent):\\n display_name: str = \\\"Extract Hyperlinks\\\"\\n description: str = \\\"Extracts hyperlinks from parsed HTML content.\\\"\\n\\n def build(self, soup: Data) -> list:\\n links = [link.get('href') for link in soup.find_all('a')]\\n self.status = links\\n return links\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"soup\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"soup\",\"display_name\":\"soup\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false}},\"description\":\"Extracts hyperlinks from parsed HTML content.\",\"base_classes\":[\"list\"],\"display_name\":\"Extract Hyperlinks\",\"custom_fields\":{\"soup\":null},\"output_types\":[\"list\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-ODIcp\"},\"selected\":true,\"positionAbsolute\":{\"x\":845.0502195222412,\"y\":366.54344452019404},\"dragging\":false},{\"width\":384,\"height\":657,\"id\":\"PromptTemplate-H9Udy\",\"type\":\"genericNode\",\"position\":{\"x\":2230.389721706283,\"y\":584.4905083765256},\"data\":{\"type\":\"PromptTemplate\",\"node\":{\"template\":{\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false},\"input_types\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_types\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"input_variables\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":true,\"value\":[\"url\",\"html_markdown\",\"html_links\",\"query\"]},\"partial_variables\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"template\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"prompt\",\"list\":false,\"value\":\"Below is the HTML content (as markdown) and hyperlinks extracted from {url}\\n\\n---\\n\\nContent:\\n\\n{html_markdown}\\n\\n---\\n\\nLinks:\\n\\n{html_links}\\n\\n---\\n\\nAnswer the user query as best as possible.\\n\\nUser query:\\n\\n{query}\\n\\nAnswer:\\n\"},\"template_format\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"f-string\",\"password\":false,\"name\":\"template_format\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":false},\"validate_template\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"validate_template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"PromptTemplate\",\"url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"html_markdown\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"html_markdown\",\"display_name\":\"html_markdown\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"html_links\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"html_links\",\"display_name\":\"html_links\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"query\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"query\",\"display_name\":\"query\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"A prompt template for a language model.\",\"base_classes\":[\"StringPromptTemplate\",\"BasePromptTemplate\",\"PromptTemplate\"],\"name\":\"\",\"display_name\":\"PromptTemplate\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/\",\"custom_fields\":{\"\":[\"url\",\"html_markdown\",\"html_links\",\"query\"]},\"output_types\":[],\"full_path\":null,\"field_formatters\":{},\"beta\":false,\"error\":null},\"id\":\"PromptTemplate-H9Udy\"},\"selected\":true,\"positionAbsolute\":{\"x\":2230.389721706283,\"y\":584.4905083765256},\"dragging\":false},{\"width\":384,\"height\":347,\"id\":\"CustomComponent-OACE0\",\"type\":\"genericNode\",\"position\":{\"x\":-1155.9497945157625,\"y\":198.13583204151553},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport requests\\n\\nclass FetchHTMLContent(CustomComponent):\\n display_name: str = \\\"URL Input\\\"\\n\\n def build(self, url: str) -> str:\\n return url\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":null,\"base_classes\":[\"str\"],\"display_name\":\"URL Input\",\"custom_fields\":{\"url\":null},\"output_types\":[\"str\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-OACE0\"},\"selected\":true,\"positionAbsolute\":{\"x\":-1155.9497945157625,\"y\":198.13583204151553},\"dragging\":false},{\"width\":384,\"height\":329,\"id\":\"CustomComponent-whsQ5\",\"type\":\"genericNode\",\"position\":{\"x\":1396.5574076376327,\"y\":694.8308714574463},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\n\\nclass ListToMarkdownBullets(CustomComponent):\\n display_name: str = \\\"List to Bullets\\\"\\n description: str = \\\"Converts a Python list into Markdown bullet points.\\\"\\n\\n def build(self, items: list) -> str:\\n markdown_bullets = '\\\\n'.join(f'* {item}' for item in items)\\n return markdown_bullets\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"items\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"items\",\"display_name\":\"items\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"list\",\"list\":true}},\"description\":\"Converts a Python list into Markdown bullet points.\",\"base_classes\":[\"str\"],\"display_name\":\"List to Bullets\",\"custom_fields\":{\"items\":null},\"output_types\":[\"str\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-whsQ5\"},\"selected\":true,\"positionAbsolute\":{\"x\":1396.5574076376327,\"y\":694.8308714574463},\"dragging\":false}],\"edges\":[{\"source\":\"CustomComponent-FGzJJ\",\"sourceHandle\":\"{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-FGzJJœ}\",\"target\":\"CustomComponent-f6nOg\",\"targetHandle\":\"{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-f6nOgœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_content\",\"id\":\"CustomComponent-f6nOg\",\"inputTypes\":null,\"type\":\"Data\"},\"sourceHandle\":{\"baseClasses\":[\"Data\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-FGzJJ\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-FGzJJ{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-FGzJJœ}-CustomComponent-f6nOg{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-f6nOgœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"selected\":true},{\"source\":\"CustomComponent-FGzJJ\",\"sourceHandle\":\"{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-FGzJJœ}\",\"target\":\"CustomComponent-0XtUN\",\"targetHandle\":\"{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-0XtUNœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_content\",\"id\":\"CustomComponent-0XtUN\",\"inputTypes\":null,\"type\":\"Data\"},\"sourceHandle\":{\"baseClasses\":[\"Data\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-FGzJJ\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-FGzJJ{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-FGzJJœ}-CustomComponent-0XtUN{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-0XtUNœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"selected\":true},{\"source\":\"CustomComponent-0XtUN\",\"sourceHandle\":\"{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-0XtUNœ}\",\"target\":\"CustomComponent-ODIcp\",\"targetHandle\":\"{œfieldNameœ:œsoupœ,œidœ:œCustomComponent-ODIcpœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"soup\",\"id\":\"CustomComponent-ODIcp\",\"inputTypes\":null,\"type\":\"Data\"},\"sourceHandle\":{\"baseClasses\":[\"Data\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-0XtUN\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-0XtUN{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-0XtUNœ}-CustomComponent-ODIcp{œfieldNameœ:œsoupœ,œidœ:œCustomComponent-ODIcpœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"selected\":true},{\"source\":\"CustomComponent-OACE0\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-OACE0œ}\",\"target\":\"CustomComponent-FGzJJ\",\"targetHandle\":\"{œfieldNameœ:œurlœ,œidœ:œCustomComponent-FGzJJœ,œinputTypesœ:[œDataœ,œstrœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"url\",\"id\":\"CustomComponent-FGzJJ\",\"inputTypes\":[\"Data\",\"str\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-OACE0\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-OACE0{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-OACE0œ}-CustomComponent-FGzJJ{œfieldNameœ:œurlœ,œidœ:œCustomComponent-FGzJJœ,œinputTypesœ:[œDataœ,œstrœ],œtypeœ:œstrœ}\",\"selected\":true},{\"source\":\"CustomComponent-OACE0\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-OACE0œ}\",\"target\":\"PromptTemplate-H9Udy\",\"targetHandle\":\"{œfieldNameœ:œurlœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"url\",\"id\":\"PromptTemplate-H9Udy\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-OACE0\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-OACE0{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-OACE0œ}-PromptTemplate-H9Udy{œfieldNameœ:œurlœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"selected\":true},{\"source\":\"CustomComponent-ODIcp\",\"sourceHandle\":\"{œbaseClassesœ:[œlistœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-ODIcpœ}\",\"target\":\"CustomComponent-whsQ5\",\"targetHandle\":\"{œfieldNameœ:œitemsœ,œidœ:œCustomComponent-whsQ5œ,œinputTypesœ:null,œtypeœ:œlistœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"items\",\"id\":\"CustomComponent-whsQ5\",\"inputTypes\":null,\"type\":\"list\"},\"sourceHandle\":{\"baseClasses\":[\"list\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-ODIcp\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-ODIcp{œbaseClassesœ:[œlistœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-ODIcpœ}-CustomComponent-whsQ5{œfieldNameœ:œitemsœ,œidœ:œCustomComponent-whsQ5œ,œinputTypesœ:null,œtypeœ:œlistœ}\",\"selected\":true},{\"source\":\"CustomComponent-whsQ5\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-whsQ5œ}\",\"target\":\"PromptTemplate-H9Udy\",\"targetHandle\":\"{œfieldNameœ:œhtml_linksœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_links\",\"id\":\"PromptTemplate-H9Udy\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-whsQ5\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-whsQ5{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-whsQ5œ}-PromptTemplate-H9Udy{œfieldNameœ:œhtml_linksœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"selected\":true},{\"source\":\"CustomComponent-f6nOg\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-f6nOgœ}\",\"target\":\"PromptTemplate-H9Udy\",\"targetHandle\":\"{œfieldNameœ:œhtml_markdownœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_markdown\",\"id\":\"PromptTemplate-H9Udy\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-f6nOg\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-f6nOg{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-f6nOgœ}-PromptTemplate-H9Udy{œfieldNameœ:œhtml_markdownœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"selected\":true}],\"viewport\":{\"x\":343.0346131188585,\"y\":341.89843948642147,\"zoom\":0.24148408223121196}},\"is_component\":false,\"name\":\"Fluffy Ptolemy\",\"description\":\"\",\"id\":\"W5oNW\"}}},\"selected\":false,\"positionAbsolute\":{\"x\":-222,\"y\":682.560723386973}}],\"edges\":[{\"source\":\"ChatOpenAI-NTTcv\",\"target\":\"LLMChain-RQsU1\",\"sourceHandle\":\"{œbaseClassesœ:[œBaseLanguageModelœ,œChatOpenAIœ,œBaseChatModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-NTTcvœ}\",\"targetHandle\":\"{œfieldNameœ:œllmœ,œidœ:œLLMChain-RQsU1œ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"id\":\"reactflow__edge-ChatOpenAI-NTTcv{œbaseClassesœ:[œBaseLanguageModelœ,œChatOpenAIœ,œBaseChatModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-NTTcvœ}-LLMChain-RQsU1{œfieldNameœ:œllmœ,œidœ:œLLMChain-RQsU1œ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"llm\",\"id\":\"LLMChain-RQsU1\",\"inputTypes\":null,\"type\":\"BaseLanguageModel\"},\"sourceHandle\":{\"baseClasses\":[\"BaseLanguageModel\",\"ChatOpenAI\",\"BaseChatModel\",\"BaseLLM\"],\"dataType\":\"ChatOpenAI\",\"id\":\"ChatOpenAI-NTTcv\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 \",\"animated\":false,\"selected\":false},{\"source\":\"PromptTemplate-VELMV\",\"target\":\"LLMChain-RQsU1\",\"sourceHandle\":\"{œbaseClassesœ:[œStringPromptTemplateœ,œBasePromptTemplateœ,œPromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-VELMVœ}\",\"targetHandle\":\"{œfieldNameœ:œpromptœ,œidœ:œLLMChain-RQsU1œ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\",\"id\":\"reactflow__edge-PromptTemplate-VELMV{œbaseClassesœ:[œStringPromptTemplateœ,œBasePromptTemplateœ,œPromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-VELMVœ}-LLMChain-RQsU1{œfieldNameœ:œpromptœ,œidœ:œLLMChain-RQsU1œ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"prompt\",\"id\":\"LLMChain-RQsU1\",\"inputTypes\":null,\"type\":\"BasePromptTemplate\"},\"sourceHandle\":{\"baseClasses\":[\"StringPromptTemplate\",\"BasePromptTemplate\",\"PromptTemplate\"],\"dataType\":\"PromptTemplate\",\"id\":\"PromptTemplate-VELMV\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 \",\"animated\":false,\"selected\":false}],\"viewport\":{\"x\":298.29888454238517,\"y\":96.95765543775741,\"zoom\":0.5140569133280332}},\"is_component\":false,\"updated_at\":\"2023-12-04T23:23:08.584967\",\"folder\":null,\"id\":\"5df79f1d-f592-4d59-8c0f-9f3c2f6465b1\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Silly Pasteur\",\"description\":\"Nurture NLP Nodes Here.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":338,\"id\":\"LLMChain-cHel8\",\"type\":\"genericNode\",\"position\":{\"x\":547.3876889720291,\"y\":299.2057833881307},\"data\":{\"type\":\"LLMChain\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Callable, Optional, Union\\n\\nfrom langchain.chains import LLMChain\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import (\\n BaseLanguageModel,\\n BaseMemory,\\n BasePromptTemplate,\\n Chain,\\n)\\n\\n\\nclass LLMChainComponent(CustomComponent):\\n display_name = \\\"LLMChain\\\"\\n description = \\\"Chain to run queries against LLMs\\\"\\n\\n def build_config(self):\\n return {\\n \\\"prompt\\\": {\\\"display_name\\\": \\\"Prompt\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n prompt: BasePromptTemplate,\\n llm: BaseLanguageModel,\\n memory: Optional[BaseMemory] = None,\\n ) -> Union[Chain, Callable]:\\n return LLMChain(prompt=prompt, llm=llm, memory=memory)\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false}},\"description\":\"Chain to run queries against LLMs\",\"base_classes\":[\"Chain\",\"Callable\"],\"display_name\":\"LLMChain\",\"custom_fields\":{\"llm\":null,\"memory\":null,\"prompt\":null},\"output_types\":[\"LLMChain\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"LLMChain-cHel8\"},\"selected\":false,\"positionAbsolute\":{\"x\":547.3876889720291,\"y\":299.2057833881307},\"dragging\":false},{\"width\":384,\"height\":626,\"id\":\"ChatOpenAI-LA6y0\",\"type\":\"genericNode\",\"position\":{\"x\":-272.94405331278074,\"y\":-603.148171441675},\"data\":{\"type\":\"ChatOpenAI\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"list\":true},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"default_query\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"http_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"http_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":2,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"gpt-4-1106-preview\",\"password\":false,\"options\":[\"gpt-4-1106-preview\",\"gpt-4\",\"gpt-4-32k\",\"gpt-3.5-turbo\",\"gpt-3.5-turbo-16k\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"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\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"0.1\",\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatOpenAI\"},\"description\":\"`OpenAI` Chat large language models API.\",\"base_classes\":[\"BaseLanguageModel\",\"ChatOpenAI\",\"BaseChatModel\",\"BaseLLM\"],\"display_name\":\"ChatOpenAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"ChatOpenAI-LA6y0\"},\"selected\":false,\"dragging\":false,\"positionAbsolute\":{\"x\":-272.94405331278074,\"y\":-603.148171441675}},{\"width\":384,\"height\":404,\"id\":\"CustomComponent-ZNoRM\",\"type\":\"genericNode\",\"position\":{\"x\":-103.65741264289085,\"y\":134.62332404764658},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport html2text\\n\\nclass ConvertHTMLToMarkdown(CustomComponent):\\n display_name: str = \\\"HTML to Markdown\\\"\\n description: str = \\\"Converts HTML content to Markdown format.\\\"\\n\\n def build(self, html_content: Data, ignore_links: bool = False) -> str:\\n converter = html2text.HTML2Text()\\n converter.ignore_links = ignore_links\\n markdown_text = converter.handle(html_content)\\n self.status = markdown_text\\n return markdown_text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"display_name\":\"code\"},\"_type\":\"CustomComponent\",\"html_content\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"html_content\",\"display_name\":\"html_content\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false},\"ignore_links\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"ignore_links\",\"display_name\":\"ignore_links\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false}},\"description\":\"Converts HTML content to Markdown format.\",\"base_classes\":[\"str\"],\"display_name\":\"HTML to Markdown\",\"custom_fields\":{\"html_content\":null,\"ignore_links\":null},\"output_types\":[\"str\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-ZNoRM\"},\"selected\":false,\"positionAbsolute\":{\"x\":-103.65741264289085,\"y\":134.62332404764658},\"dragging\":false},{\"width\":384,\"height\":374,\"id\":\"CustomComponent-zNSTv\",\"type\":\"genericNode\",\"position\":{\"x\":-1233.3963469933499,\"y\":280.77850809220325},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport requests\\n\\nclass FetchHTMLContent(CustomComponent):\\n display_name: str = \\\"Fetch HTML\\\"\\n description: str = \\\"Fetches HTML content from a specified URL.\\\"\\n \\n def build_config(self):\\n return {\\\"url\\\": {\\\"input_types\\\": [\\\"Data\\\", \\\"str\\\"]}} \\n\\n def build(self, url: str) -> Data:\\n response = requests.get(url)\\n self.status = str(response) + \\\"\\\\n\\\\n\\\" + response.text\\n return response.text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"display_name\":\"code\"},\"_type\":\"CustomComponent\",\"url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"input_types\":[\"Data\",\"str\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"}},\"description\":\"Fetches HTML content from a specified URL.\",\"base_classes\":[\"Data\"],\"display_name\":\"Fetch HTML\",\"custom_fields\":{\"url\":null},\"output_types\":[\"Data\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-zNSTv\"},\"selected\":false,\"dragging\":false,\"positionAbsolute\":{\"x\":-1233.3963469933499,\"y\":280.77850809220325}},{\"width\":384,\"height\":328,\"id\":\"CustomComponent-Ihm2o\",\"type\":\"genericNode\",\"position\":{\"x\":-554.9404152016002,\"y\":683.6763775860567},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from bs4 import BeautifulSoup\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\nclass ParseHTMLContent(CustomComponent):\\n display_name: str = \\\"Parse HTML\\\"\\n description: str = \\\"Parses HTML content using Beautiful Soup.\\\"\\n\\n def build(self, html_content: Data) -> Data:\\n soup = BeautifulSoup(html_content, 'html.parser')\\n self.status = soup\\n return soup\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"display_name\":\"code\"},\"_type\":\"CustomComponent\",\"html_content\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"html_content\",\"display_name\":\"html_content\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false}},\"description\":\"Parses HTML content using Beautiful Soup.\",\"base_classes\":[\"Data\"],\"display_name\":\"Parse HTML\",\"custom_fields\":{\"html_content\":null},\"output_types\":[\"Data\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-Ihm2o\"},\"selected\":false,\"dragging\":false,\"positionAbsolute\":{\"x\":-554.9404152016002,\"y\":683.6763775860567}},{\"width\":384,\"height\":328,\"id\":\"CustomComponent-6ST9l\",\"type\":\"genericNode\",\"position\":{\"x\":76.10921262566495,\"y\":872.9491114949525},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\nclass ExtractHyperlinks(CustomComponent):\\n display_name: str = \\\"Extract Hyperlinks\\\"\\n description: str = \\\"Extracts hyperlinks from parsed HTML content.\\\"\\n\\n def build(self, soup: Data) -> list:\\n links = [link.get('href') for link in soup.find_all('a')]\\n self.status = links\\n return links\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"display_name\":\"code\"},\"_type\":\"CustomComponent\",\"soup\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"soup\",\"display_name\":\"soup\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false}},\"description\":\"Extracts hyperlinks from parsed HTML content.\",\"base_classes\":[\"list\"],\"display_name\":\"Extract Hyperlinks\",\"custom_fields\":{\"soup\":null},\"output_types\":[\"list\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-6ST9l\"},\"selected\":false,\"positionAbsolute\":{\"x\":76.10921262566495,\"y\":872.9491114949525},\"dragging\":false},{\"width\":384,\"height\":654,\"id\":\"PromptTemplate-l35W1\",\"type\":\"genericNode\",\"position\":{\"x\":1461.4487148097069,\"y\":1090.896175351284},\"data\":{\"type\":\"PromptTemplate\",\"node\":{\"template\":{\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false,\"display_name\":\"output_parser\"},\"input_types\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_types\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false,\"display_name\":\"input_types\"},\"input_variables\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":true,\"value\":[\"url\",\"html_markdown\",\"html_links\",\"query\"],\"display_name\":\"input_variables\"},\"partial_variables\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false,\"display_name\":\"partial_variables\"},\"template\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"prompt\",\"list\":false,\"value\":\"Below is the HTML content (as markdown) and hyperlinks extracted from {url}\\n\\n---\\n\\nContent:\\n\\n{html_markdown}\\n\\n---\\n\\nLinks:\\n\\n{html_links}\\n\\n---\\n\\nAnswer the user query as best as possible.\\n\\nUser query:\\n\\n{query}\\n\\nAnswer:\\n\",\"display_name\":\"template\"},\"template_format\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"f-string\",\"password\":false,\"name\":\"template_format\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":false,\"display_name\":\"template_format\"},\"validate_template\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"validate_template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"bool\",\"list\":false,\"display_name\":\"validate_template\"},\"_type\":\"PromptTemplate\",\"url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"html_markdown\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"html_markdown\",\"display_name\":\"html_markdown\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"html_links\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"html_links\",\"display_name\":\"html_links\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"query\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"query\",\"display_name\":\"query\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"A prompt template for a language model.\",\"base_classes\":[\"StringPromptTemplate\",\"BasePromptTemplate\",\"PromptTemplate\"],\"name\":\"\",\"display_name\":\"PromptTemplate\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/\",\"custom_fields\":{\"\":[\"url\",\"html_markdown\",\"html_links\",\"query\"]},\"output_types\":[],\"full_path\":null,\"field_formatters\":{},\"beta\":false,\"error\":null},\"id\":\"PromptTemplate-l35W1\"},\"selected\":false,\"positionAbsolute\":{\"x\":1461.4487148097069,\"y\":1090.896175351284},\"dragging\":false},{\"width\":384,\"height\":346,\"id\":\"CustomComponent-iTLf4\",\"type\":\"genericNode\",\"position\":{\"x\":-1924.8908014123388,\"y\":704.5414990162741},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport requests\\n\\nclass FetchHTMLContent(CustomComponent):\\n display_name: str = \\\"URL Input\\\"\\n\\n def build(self, url: str) -> str:\\n return url\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"display_name\":\"code\"},\"_type\":\"CustomComponent\",\"url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"https://paperswithcode.com/\"}},\"description\":null,\"base_classes\":[\"str\"],\"display_name\":\"URL Input\",\"custom_fields\":{\"url\":null},\"output_types\":[\"str\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-iTLf4\"},\"selected\":false,\"positionAbsolute\":{\"x\":-1924.8908014123388,\"y\":704.5414990162741},\"dragging\":false},{\"width\":384,\"height\":328,\"id\":\"CustomComponent-jWLUz\",\"type\":\"genericNode\",\"position\":{\"x\":627.6164007410564,\"y\":1201.2365384322047},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\n\\nclass ListToMarkdownBullets(CustomComponent):\\n display_name: str = \\\"List to Bullets\\\"\\n description: str = \\\"Converts a Python list into Markdown bullet points.\\\"\\n\\n def build(self, items: list) -> str:\\n markdown_bullets = '\\\\n'.join(f'* {item}' for item in items)\\n return markdown_bullets\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"display_name\":\"code\"},\"_type\":\"CustomComponent\",\"items\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"items\",\"display_name\":\"items\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"list\",\"list\":true}},\"description\":\"Converts a Python list into Markdown bullet points.\",\"base_classes\":[\"str\"],\"display_name\":\"List to Bullets\",\"custom_fields\":{\"items\":null},\"output_types\":[\"str\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-jWLUz\"},\"selected\":false,\"positionAbsolute\":{\"x\":627.6164007410564,\"y\":1201.2365384322047},\"dragging\":false}],\"edges\":[{\"source\":\"ChatOpenAI-LA6y0\",\"target\":\"LLMChain-cHel8\",\"sourceHandle\":\"{œbaseClassesœ:[œBaseLanguageModelœ,œChatOpenAIœ,œBaseChatModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-LA6y0œ}\",\"targetHandle\":\"{œfieldNameœ:œllmœ,œidœ:œLLMChain-cHel8œ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"id\":\"reactflow__edge-ChatOpenAI-LA6y0{œbaseClassesœ:[œBaseLanguageModelœ,œChatOpenAIœ,œBaseChatModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-LA6y0œ}-LLMChain-cHel8{œfieldNameœ:œllmœ,œidœ:œLLMChain-cHel8œ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"llm\",\"id\":\"LLMChain-cHel8\",\"inputTypes\":null,\"type\":\"BaseLanguageModel\"},\"sourceHandle\":{\"baseClasses\":[\"BaseLanguageModel\",\"ChatOpenAI\",\"BaseChatModel\",\"BaseLLM\"],\"dataType\":\"ChatOpenAI\",\"id\":\"ChatOpenAI-LA6y0\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"selected\":false},{\"source\":\"CustomComponent-zNSTv\",\"sourceHandle\":\"{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-zNSTvœ}\",\"target\":\"CustomComponent-ZNoRM\",\"targetHandle\":\"{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-ZNoRMœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_content\",\"id\":\"CustomComponent-ZNoRM\",\"inputTypes\":null,\"type\":\"Data\"},\"sourceHandle\":{\"baseClasses\":[\"Data\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-zNSTv\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-zNSTv{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-zNSTvœ}-CustomComponent-ZNoRM{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-ZNoRMœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"selected\":false},{\"source\":\"CustomComponent-zNSTv\",\"sourceHandle\":\"{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-zNSTvœ}\",\"target\":\"CustomComponent-Ihm2o\",\"targetHandle\":\"{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-Ihm2oœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_content\",\"id\":\"CustomComponent-Ihm2o\",\"inputTypes\":null,\"type\":\"Data\"},\"sourceHandle\":{\"baseClasses\":[\"Data\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-zNSTv\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-zNSTv{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-zNSTvœ}-CustomComponent-Ihm2o{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-Ihm2oœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"selected\":false},{\"source\":\"CustomComponent-Ihm2o\",\"sourceHandle\":\"{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-Ihm2oœ}\",\"target\":\"CustomComponent-6ST9l\",\"targetHandle\":\"{œfieldNameœ:œsoupœ,œidœ:œCustomComponent-6ST9lœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"soup\",\"id\":\"CustomComponent-6ST9l\",\"inputTypes\":null,\"type\":\"Data\"},\"sourceHandle\":{\"baseClasses\":[\"Data\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-Ihm2o\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-Ihm2o{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-Ihm2oœ}-CustomComponent-6ST9l{œfieldNameœ:œsoupœ,œidœ:œCustomComponent-6ST9lœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"selected\":false},{\"source\":\"CustomComponent-iTLf4\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-iTLf4œ}\",\"target\":\"CustomComponent-zNSTv\",\"targetHandle\":\"{œfieldNameœ:œurlœ,œidœ:œCustomComponent-zNSTvœ,œinputTypesœ:[œDataœ,œstrœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"url\",\"id\":\"CustomComponent-zNSTv\",\"inputTypes\":[\"Data\",\"str\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-iTLf4\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-iTLf4{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-iTLf4œ}-CustomComponent-zNSTv{œfieldNameœ:œurlœ,œidœ:œCustomComponent-zNSTvœ,œinputTypesœ:[œDataœ,œstrœ],œtypeœ:œstrœ}\",\"selected\":false},{\"source\":\"CustomComponent-iTLf4\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-iTLf4œ}\",\"target\":\"PromptTemplate-l35W1\",\"targetHandle\":\"{œfieldNameœ:œurlœ,œidœ:œPromptTemplate-l35W1œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"url\",\"id\":\"PromptTemplate-l35W1\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-iTLf4\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-iTLf4{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-iTLf4œ}-PromptTemplate-l35W1{œfieldNameœ:œurlœ,œidœ:œPromptTemplate-l35W1œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"selected\":false},{\"source\":\"CustomComponent-6ST9l\",\"sourceHandle\":\"{œbaseClassesœ:[œlistœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-6ST9lœ}\",\"target\":\"CustomComponent-jWLUz\",\"targetHandle\":\"{œfieldNameœ:œitemsœ,œidœ:œCustomComponent-jWLUzœ,œinputTypesœ:null,œtypeœ:œlistœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"items\",\"id\":\"CustomComponent-jWLUz\",\"inputTypes\":null,\"type\":\"list\"},\"sourceHandle\":{\"baseClasses\":[\"list\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-6ST9l\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-6ST9l{œbaseClassesœ:[œlistœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-6ST9lœ}-CustomComponent-jWLUz{œfieldNameœ:œitemsœ,œidœ:œCustomComponent-jWLUzœ,œinputTypesœ:null,œtypeœ:œlistœ}\",\"selected\":false},{\"source\":\"CustomComponent-jWLUz\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-jWLUzœ}\",\"target\":\"PromptTemplate-l35W1\",\"targetHandle\":\"{œfieldNameœ:œhtml_linksœ,œidœ:œPromptTemplate-l35W1œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_links\",\"id\":\"PromptTemplate-l35W1\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-jWLUz\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-jWLUz{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-jWLUzœ}-PromptTemplate-l35W1{œfieldNameœ:œhtml_linksœ,œidœ:œPromptTemplate-l35W1œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"selected\":false},{\"source\":\"CustomComponent-ZNoRM\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-ZNoRMœ}\",\"target\":\"PromptTemplate-l35W1\",\"targetHandle\":\"{œfieldNameœ:œhtml_markdownœ,œidœ:œPromptTemplate-l35W1œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_markdown\",\"id\":\"PromptTemplate-l35W1\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-ZNoRM\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-ZNoRM{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-ZNoRMœ}-PromptTemplate-l35W1{œfieldNameœ:œhtml_markdownœ,œidœ:œPromptTemplate-l35W1œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"selected\":false},{\"source\":\"PromptTemplate-l35W1\",\"sourceHandle\":\"{œbaseClassesœ:[œStringPromptTemplateœ,œBasePromptTemplateœ,œPromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-l35W1œ}\",\"target\":\"LLMChain-cHel8\",\"targetHandle\":\"{œfieldNameœ:œpromptœ,œidœ:œLLMChain-cHel8œ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"prompt\",\"id\":\"LLMChain-cHel8\",\"inputTypes\":null,\"type\":\"BasePromptTemplate\"},\"sourceHandle\":{\"baseClasses\":[\"StringPromptTemplate\",\"BasePromptTemplate\",\"PromptTemplate\"],\"dataType\":\"PromptTemplate\",\"id\":\"PromptTemplate-l35W1\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-PromptTemplate-mJqEg{œbaseClassesœ:[œStringPromptTemplateœ,œBasePromptTemplateœ,œPromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-mJqEgœ}-LLMChain-cHel8{œfieldNameœ:œpromptœ,œidœ:œLLMChain-cHel8œ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\"}],\"viewport\":{\"x\":206.6159172940935,\"y\":79.94375811155385,\"zoom\":0.5140569133280333}},\"is_component\":false,\"updated_at\":\"2023-12-06T00:23:04.515144\",\"folder\":null,\"id\":\"ecfb377a-7e88-405d-8d66-7560735ce446\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Lonely Lovelace\",\"description\":\"Smart Chains, Smarter Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-04T23:43:25.925753\",\"folder\":null,\"id\":\"30e71767-6128-40e4-9a6d-b9197b679971\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Custom Component\",\"description\":\"Create any custom component you want!\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\n\\nclass Component(CustomComponent):\\n display_name: str = \\\"Custom Component\\\"\\n description: str = \\\"Create any custom component you want!\\\"\\n documentation: str = \\\"http://docs.langflow.org/components/custom\\\"\\n\\n def build_config(self):\\n return {\\\"param\\\": {\\\"display_name\\\": \\\"Parameter\\\"}}\\n\\n def build(self, param: Data) -> Data:\\n return param\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"param\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"param\",\"display_name\":\"Parameter\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false}},\"description\":\"Create any custom component you want!\",\"base_classes\":[\"Data\"],\"display_name\":\"Custom Component\",\"custom_fields\":{\"param\":null},\"output_types\":[\"CustomComponent\"],\"documentation\":\"http://docs.langflow.org/components/custom\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"CustomComponent-IxJqc\"},\"id\":\"CustomComponent-IxJqc\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-05T22:08:27.080204\",\"folder\":null,\"id\":\"f3c56abb-96d8-4a09-80d3-f60181661b3c\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Amazing Wilson\",\"description\":\"Unfolding Linguistic Possibilities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-05T23:49:58.272677\",\"folder\":null,\"id\":\"324499a6-17a6-49de-96b1-b88955ed2c3d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Nauseous Kowalevski\",\"description\":\"Create, Curate, Communicate with Langflow.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:45:27.084387\",\"folder\":null,\"id\":\"e3168485-d31b-472a-907f-faf833bf7824\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Silly Fermi\",\"description\":\"Craft Meaningful Interactions, Generate Value.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:45:57.134463\",\"folder\":null,\"id\":\"d0ecea5d-bcf9-4b8e-88f0-3c243d309336\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Friendly Ardinghelli\",\"description\":\"Smart Chains, Smarter Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:45:57.138184\",\"folder\":null,\"id\":\"a406912d-b0e2-4954-bef3-ee80c29eca3c\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Gleeful Easley\",\"description\":\"Bridging Prompts for Brilliance.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:45:57.162908\",\"folder\":null,\"id\":\"57b32155-4c6e-4823-ad03-8dd09d8abc62\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Effervescent Varahamihira\",\"description\":\"Create, Chain, Communicate.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:45:57.135644\",\"folder\":null,\"id\":\"18daa0ff-2e5d-457a-95d7-710affec5c4d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Jubilant Joliot\",\"description\":\"Language Architect at Work!\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:45:57.139496\",\"folder\":null,\"id\":\"9255e938-280e-4ce7-91cd-8622126a7d02\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Adoring Carroll\",\"description\":\"Nurture NLP Nodes Here.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:45:57.162240\",\"folder\":null,\"id\":\"a7a680b9-30b1-4438-ac24-da597de443aa\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Berserk Herschel\",\"description\":\"Your Hub for Text Generation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:46:02.593504\",\"folder\":null,\"id\":\"37a439b0-7b1d-45e3-b4fa-5c73669bae3b\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Distracted Joliot\",\"description\":\"Conversational Cartography Unlocked.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:47:04.845238\",\"folder\":null,\"id\":\"4d23fd0a-8ad5-46b9-bb99-eed4138e7426\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Happy Babbage\",\"description\":\"Flow into the Future of Language.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:47:41.899665\",\"folder\":null,\"id\":\"1c8ad5b9-5524-41fe-a762-52c75126b832\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Adoring Brown\",\"description\":\"Unlock the Power of AI in Your Business Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:47:42.702031\",\"folder\":null,\"id\":\"9d4c8e79-4904-4a51-a4bb-146c3d8db10e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Focused Mahavira\",\"description\":\"Design, Develop, Dialogize.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:47:42.786331\",\"folder\":null,\"id\":\"4ba370d1-1f8e-4a23-99aa-99e2cd9b7cbf\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Mad Gates\",\"description\":\"Where Language Meets Logic.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:47:42.838989\",\"folder\":null,\"id\":\"992c3cd3-1d79-4986-8c04-88a34130fa30\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Serene Mcclintock\",\"description\":\"Unlock the Power of AI in Your Business Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:47:42.932723\",\"folder\":null,\"id\":\"a65bfd13-44df-4090-aca0-5057e21e9997\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Berserk Feynman\",\"description\":\"Text Generation Meets Business Transformation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:47:42.931359\",\"folder\":null,\"id\":\"7a05dac5-c005-411d-9994-19d61e71ce78\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sprightly Perlman\",\"description\":\"Interactive Language Weaving.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:47:43.054687\",\"folder\":null,\"id\":\"6db24541-7211-48e6-a792-1a4a99a0ef90\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Jolly Colden\",\"description\":\"Flow into the Future of Language.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:47:43.078384\",\"folder\":null,\"id\":\"13ac42e8-9124-4bf4-9ea2-28671ef2d9a4\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Gleeful Kaku\",\"description\":\"The Power of Language at Your Fingertips.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:50:33.156776\",\"folder\":null,\"id\":\"79262d75-5c62-4b14-b067-f4297f5c912f\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Jovial Khayyam\",\"description\":\"Empowering Communication, Enabling Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:51:13.295375\",\"folder\":null,\"id\":\"3b397e74-d8be-4728-9d6c-05f7c78106a7\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Goofy Mccarthy\",\"description\":\"Building Powerful Solutions with Language Models.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:51:27.632685\",\"folder\":null,\"id\":\"13f4e1fd-45eb-4271-92fd-0d70a31c61d2\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Cocky Lalande\",\"description\":\"Building Intelligent Interactions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:51:54.628983\",\"folder\":null,\"id\":\"9cfd60d8-9311-47b0-b71b-f488f1940bc7\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Romantic Mirzakhani\",\"description\":\"Innovation in Interaction with Langflow.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:52:52.622698\",\"folder\":null,\"id\":\"0d849403-0f75-455d-b4e4-0d622ee3305a\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Grinning Brown\",\"description\":\"Building Powerful Solutions with Language Models.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:53:06.056636\",\"folder\":null,\"id\":\"8643ba14-52d6-4d36-9981-5fd37de5dd76\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Kickass Watt\",\"description\":\"Transform Your Business with Smart Dialogues.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:53:23.338727\",\"folder\":null,\"id\":\"818d3066-bf08-4bf9-adcd-739f8abbfa5d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Goofy Bose\",\"description\":\"Unravel the Art of Articulation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:53:41.218861\",\"folder\":null,\"id\":\"28eff43e-0ecf-47bf-9851-f1492589978e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Optimistic Jennings\",\"description\":\"Create Powerful Connections, Boost Business Value.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:55:03.681106\",\"folder\":null,\"id\":\"68f59069-bc2a-464f-a983-4b61e32e01af\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Pedantic Mirzakhani\",\"description\":\"Language Chainlink Master.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:55:31.761949\",\"folder\":null,\"id\":\"5d981c0e-81b3-44cc-a5be-8f55b92bfed5\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Peppy Murdock\",\"description\":\"Create Powerful Connections, Boost Business Value.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:56:45.548598\",\"folder\":null,\"id\":\"e812ad47-47e8-422b-b94c-84fd0263c9c8\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Berserk Fermat\",\"description\":\"Maximize Impact with Intelligent Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:58:50.234270\",\"folder\":null,\"id\":\"82aaf449-e737-4699-9360-929ab6108dc7\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Tiny Albattani\",\"description\":\"Your Toolkit for Text Generation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:59:53.946035\",\"folder\":null,\"id\":\"097cb080-274b-40ca-8dde-7900a949568a\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Cranky Kirch\",\"description\":\"Sculpting Language with Precision.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T23:00:51.745350\",\"folder\":null,\"id\":\"837d7b5f-8495-46a9-b00e-ad1b7ebb52f4\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Effervescent Kowalevski\",\"description\":\"Empowering Language Engineering.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T23:02:53.336102\",\"folder\":null,\"id\":\"3ff079c2-d9f9-4da6-a22a-423fa35670ff\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Radiant Stallman\",\"description\":\"Navigate the Linguistic Landscape, Discover Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T23:03:28.268357\",\"folder\":null,\"id\":\"c8b204dd-3d57-4bc8-aa13-1d559672e75b\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Grinning Swirles\",\"description\":\"Unleashing Linguistic Creativity.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T23:03:51.194455\",\"folder\":null,\"id\":\"a097f083-5880-4cf7-986b-51898bc68e11\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Dreamy Williams\",\"description\":\"Interactive Language Weaving.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T23:04:36.678251\",\"folder\":null,\"id\":\"d9585f63-ce76-42ce-87bc-8e69601ea5c6\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Zany Hawking\",\"description\":\"Flow into the Future of Language.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T23:05:13.672479\",\"folder\":null,\"id\":\"612a01d5-8c8d-4cc1-8c54-35626b7f4a6d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Amazing Kilby\",\"description\":\"Your Toolkit for Text Generation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T23:10:37.047955\",\"folder\":null,\"id\":\"2d05a598-3cdf-452a-bd5d-b0e569e562e3\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Insane Cori\",\"description\":\"Empowering Communication, Enabling Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T23:18:12.426578\",\"folder\":null,\"id\":\"d1769a4a-c1e9-4d74-9273-1e76cfcf21f3\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Compassionate Tesla\",\"description\":\"Graph Your Way to Great Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T23:18:52.806238\",\"folder\":null,\"id\":\"28c5d9dd-0466-4c80-9e58-b1e061cd358d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Jubilant Goldstine\",\"description\":\"Harness the Power of Conversational AI.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T23:21:44.488510\",\"folder\":null,\"id\":\"b3c57e4f-28a1-4580-bf08-a9484bdd66e8\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Elegant Curie\",\"description\":\"Building Intelligent Interactions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:23:47.610237\",\"folder\":null,\"id\":\"28fb024e-6ba1-4f5b-83b3-4742b3b8117c\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Twinkly Chandrasekhar\",\"description\":\"Maximize Impact with Intelligent Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:24:29.824530\",\"folder\":null,\"id\":\"d2b87cf7-9167-4030-8e9f-b8d9aa0cadee\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Pensive Nobel\",\"description\":\"Crafting Dialogues that Drive Business Success.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:26:51.210699\",\"folder\":null,\"id\":\"c2c9fbac-7d97-40c2-8055-dff608df9414\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Elated Edison\",\"description\":\"Advanced NLP for Groundbreaking Business Solutions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:27:35.822482\",\"folder\":null,\"id\":\"a55561cd-4b25-473f-bde5-884cbabb0d24\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Dazzling Lichterman\",\"description\":\"Building Linguistic Labyrinths.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:29:09.259381\",\"folder\":null,\"id\":\"9c6fe6d4-8311-4fc6-8b02-2a816d7c059d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Distracted Bhaskara\",\"description\":\"Navigate the Networks of Conversation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:31:36.053452\",\"folder\":null,\"id\":\"ef6fc1ab-aff8-45a1-8cd5-bc8e38565e4d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Evil Watt\",\"description\":\"Building Intelligent Interactions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:32:26.765250\",\"folder\":null,\"id\":\"499b5351-9c09-4934-9f9d-a24be2fd8b24\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Jolly Wien\",\"description\":\"Bridging Prompts for Brilliance.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:33:23.648859\",\"folder\":null,\"id\":\"a57eda91-7f6e-410d-9990-385fe0c724f3\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Nostalgic Spence\",\"description\":\"Mapping Meaningful Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:34:22.576407\",\"folder\":null,\"id\":\"685f685e-8a1b-4b7e-9e21-6cdd72163c91\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Comical Fermat\",\"description\":\"Create, Connect, Converse.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:35:44.920540\",\"folder\":null,\"id\":\"3e061766-b834-4fa4-ba9c-b060925d9703\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Grave Volta\",\"description\":\"Maximize Impact with Intelligent Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:36:33.001572\",\"folder\":null,\"id\":\"135f2fd9-e005-40bc-a9bf-c25107388415\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Gleeful Davinci\",\"description\":\"Create Powerful Connections, Boost Business Value.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:37:16.208823\",\"folder\":null,\"id\":\"167cdb24-7e1c-475b-893a-cca2f125426c\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Hilarious Sinoussi\",\"description\":\"Language Chainlink Master.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:38:07.215401\",\"folder\":null,\"id\":\"48113cab-2c04-493f-8c2e-c8d067826aa2\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Grave Sagan\",\"description\":\"Connect the Dots, Craft Language.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:39:19.479656\",\"folder\":null,\"id\":\"28d292dc-e094-4ffe-a657-178892933267\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Cranky Hoover\",\"description\":\"Crafting Dialogues that Drive Business Success.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:40:02.895859\",\"folder\":null,\"id\":\"0c9849b7-b2d6-4d0e-8334-abfb3ae183dd\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Evil Mendeleev\",\"description\":\"Unfolding Linguistic Possibilities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:40:27.867247\",\"folder\":null,\"id\":\"d78c52e9-1941-4555-9bb9-abd01f176705\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Effervescent Dubinsky\",\"description\":\"Where Language Meets Logic.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:41:23.177402\",\"folder\":null,\"id\":\"5277a04c-b5da-4597-aaa2-a6b66ea11d02\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Cocky Poitras\",\"description\":\"Where Language Meets Logic.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:42:08.731865\",\"folder\":null,\"id\":\"b0d0c8de-4eea-484a-a068-b13e63f7e71c\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Pedantic Ptolemy\",\"description\":\"Crafting Conversations, One Node at a Time.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:42:25.658996\",\"folder\":null,\"id\":\"b6f155e2-03eb-4232-9bab-145463382fe9\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Loving Cray\",\"description\":\"Maximize Impact with Intelligent Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:43:57.530112\",\"folder\":null,\"id\":\"b75c10ca-9ecb-432b-88ab-e1847e836e22\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Effervescent Pasteur\",\"description\":\"Language Models, Mapped and Mastered.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:44:58.979393\",\"folder\":null,\"id\":\"3710eccb-e7a7-41d5-9339-d9c301515d17\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Pensive Gates\",\"description\":\"The Power of Language at Your Fingertips.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:45:42.744174\",\"folder\":null,\"id\":\"fdd2271e-92f6-4349-8c01-2ec0e9e73f13\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Determined Khorana\",\"description\":\"Beyond Text Generation - Unleashing Business Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:46:10.084684\",\"folder\":null,\"id\":\"88b49a97-0888-4fea-8d9b-6ac2cc6d158e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Lonely Booth\",\"description\":\"Design Dialogues with Langflow.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:46:41.577750\",\"folder\":null,\"id\":\"629afe54-8796-45be-a570-e3ac79c60792\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Jubilant Mendeleev\",\"description\":\"Chain the Words, Master Language!\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:47:22.631243\",\"folder\":null,\"id\":\"7bf481b0-73fe-4f5b-a3d4-1263d9d8e827\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Clever Varahamihira\",\"description\":\"Building Powerful Solutions with Language Models.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:49:51.026960\",\"folder\":null,\"id\":\"df9e86b6-56c9-4848-9010-102615314766\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sleepy Stallman\",\"description\":\"Empowering Language Engineering.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:50:14.932236\",\"folder\":null,\"id\":\"3e66994c-9b7a-4b85-a917-65d1959d7352\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Modest Wozniak\",\"description\":\"Advanced NLP for Groundbreaking Business Solutions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:51:13.811894\",\"folder\":null,\"id\":\"d10f924a-5780-4255-9f41-3e102ae03e84\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Hopeful Mirzakhani\",\"description\":\"Graph Your Way to Great Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:52:23.906908\",\"folder\":null,\"id\":\"f3378fa8-ccaf-4a3b-90d6-b8ab0c4e481f\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Awesome Joule\",\"description\":\"Design, Develop, Dialogize.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:52:43.863440\",\"folder\":null,\"id\":\"deaa50e7-a8b1-46b1-856c-334ee781e1c2\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Peppy Shockley\",\"description\":\"Generate, Innovate, Communicate.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:53:43.299699\",\"folder\":null,\"id\":\"c72eac2c-d924-40c6-a102-da524216d090\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Zany Dewey\",\"description\":\"Flow into the Future of Language.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:54:18.848827\",\"folder\":null,\"id\":\"d9425324-bb60-462e-b431-90a536f2bc76\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Gloomy Joliot\",\"description\":\"Language Engineering Excellence.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:58:12.348608\",\"folder\":null,\"id\":\"621ba134-3fac-487c-98cd-96941439f1be\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Backstabbing Franklin\",\"description\":\"Craft Language Connections Here.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:58:13.491824\",\"folder\":null,\"id\":\"86ca9773-c7b7-4a1a-859a-6cbe0ddff206\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sharp Swartz\",\"description\":\"Beyond Text Generation - Unleashing Business Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:58:13.524414\",\"folder\":null,\"id\":\"da1c02b7-d608-4498-9946-7d02f55fa103\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Serene Volta\",\"description\":\"Connect the Dots, Craft Language.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:58:13.641432\",\"folder\":null,\"id\":\"8d5dd998-6b51-4f65-8331-086a7f3b11d7\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Focused Lichterman\",\"description\":\"Crafting Dialogues that Drive Business Success.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:58:13.746120\",\"folder\":null,\"id\":\"942027d3-e2ea-48c6-8279-0a41b54e8862\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Lively Einstein\",\"description\":\"Unleashing Linguistic Creativity.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:58:13.746904\",\"folder\":null,\"id\":\"9e9b6298-1073-4297-8ecc-3c620b432e70\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Cocky Planck\",\"description\":\"Empowering Language Engineering.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:58:13.747420\",\"folder\":null,\"id\":\"7cd60c83-b797-4e60-af6d-cbc540657943\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Suspicious Zobell\",\"description\":\"Innovation in Interaction, Revolution in Revenue.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:58:13.749951\",\"folder\":null,\"id\":\"dab08306-9521-4e15-aa11-e6a6a4e210f8\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Small Knuth\",\"description\":\"Nurture NLP Nodes Here.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:59:16.772119\",\"folder\":null,\"id\":\"c9149590-636a-44f5-aaae-45a4e78fe4df\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Evil Wright\",\"description\":\"Unfolding Linguistic Possibilities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:59:52.954568\",\"folder\":null,\"id\":\"6b23b2ad-c07c-46f6-b9ad-268783d1712e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Vibrant Lalande\",\"description\":\"Language Models, Mapped and Mastered.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:59:53.656156\",\"folder\":null,\"id\":\"ece3bcf6-a147-4559-862e-cacff9db5f48\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Happy Gauss\",\"description\":\"The Pinnacle of Prompt Generation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:59:53.717991\",\"folder\":null,\"id\":\"252b6021-ecad-4eaf-9e2f-106c4c89c496\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Gigantic Rosalind\",\"description\":\"Building Powerful Solutions with Language Models.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:59:53.736261\",\"folder\":null,\"id\":\"b8cb6d8d-c0fb-4e8d-a46e-2c608dc8a714\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Adoring Hubble\",\"description\":\"Powerful Prompts, Perfectly Positioned.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:59:53.769546\",\"folder\":null,\"id\":\"553a67db-7225-474c-978e-8a40cde2bfb2\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Pensive Mclean\",\"description\":\"Unlock the Power of AI in Your Business Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:59:53.981063\",\"folder\":null,\"id\":\"e0865007-4d80-4edf-87ab-9e8d2892d719\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Trusting Murdock\",\"description\":\"Uncover Business Opportunities with NLP.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:59:53.982286\",\"folder\":null,\"id\":\"1021cd20-66e0-4b81-9c79-bfe729774d20\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Perky Riemann\",\"description\":\"Powerful Prompts, Perfectly Positioned.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:59:53.983216\",\"folder\":null,\"id\":\"089074d3-8a1e-4d85-a59d-b4717090e4d3\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Small Tesla\",\"description\":\"Language Models, Unleashed.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:00:35.704142\",\"folder\":null,\"id\":\"beb49d88-255e-4db4-931b-4ab4358f1097\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Effervescent Boyd\",\"description\":\"Smart Chains, Smarter Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:01:13.569507\",\"folder\":null,\"id\":\"75b5ab8d-e0c0-43cf-912b-8578550e198a\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Small Babbage\",\"description\":\"Interactive Language Weaving.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:01:27.944868\",\"folder\":null,\"id\":\"503edd55-8f70-43e5-87fb-2324eaf62336\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Lonely Bose\",\"description\":\"Crafting Dialogues that Drive Business Success.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:02:40.122079\",\"folder\":null,\"id\":\"ae4f2992-1a23-4a43-bec6-68b823935762\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Mirthful Coulomb\",\"description\":\"Craft Meaningful Interactions, Generate Value.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:03:15.263237\",\"folder\":null,\"id\":\"a2a464db-b02a-4440-ad9e-7b552ee6c027\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Drunk Newton\",\"description\":\"Craft Meaningful Interactions, Generate Value.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:03:15.883766\",\"folder\":null,\"id\":\"1a5d9af7-5a96-4035-a09c-e15741785828\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Jovial Pasteur\",\"description\":\"Your Hub for Text Generation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:03:15.933083\",\"folder\":null,\"id\":\"04b94873-0828-41dc-a850-fd4132c9b9f1\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Playful Spence\",\"description\":\"Connect the Dots, Craft Language.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:03:15.967685\",\"folder\":null,\"id\":\"47003dc2-7884-48a3-aa66-e4185079f4d9\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Cheerful Noyce\",\"description\":\"Your Passport to Linguistic Landscapes.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:03:15.983198\",\"folder\":null,\"id\":\"13769cb4-2e15-4d54-a28a-c97dc15db58c\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Awesome Edison\",\"description\":\"Unfolding Linguistic Possibilities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:03:16.018879\",\"folder\":null,\"id\":\"453dacde-6b10-406b-a5b3-f90f44be6899\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Upbeat Snyder\",\"description\":\"Powerful Prompts, Perfectly Positioned.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:03:16.205689\",\"folder\":null,\"id\":\"9544fac9-3002-47aa-86b9-102844fe9649\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Tender Khorana\",\"description\":\"Chain the Words, Master Language!\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:03:16.243434\",\"folder\":null,\"id\":\"90498ef6-34f9-45c8-8cd0-fe6a36a26f41\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Elated Albattani\",\"description\":\"Interactive Language Weaving.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:05:07.775497\",\"folder\":null,\"id\":\"9577c416-8ce8-48f6-ad6d-ab2e003bb415\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Charming Goodall\",\"description\":\"Maximize Impact with Intelligent Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:07:52.139318\",\"folder\":null,\"id\":\"f10dc08e-b9c7-44b3-8837-b95aee2f6dbb\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Hilarious Ramanujan\",\"description\":\"Harness the Power of Conversational AI.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:08:22.448480\",\"folder\":null,\"id\":\"c864bd8c-67cd-465f-bf7d-7a35c7df37f3\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Tender Mclean\",\"description\":\"Unleashing Linguistic Creativity.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:09:56.507826\",\"folder\":null,\"id\":\"f0c13b19-ae23-40e6-88d6-d135897ee100\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Grave Hawking\",\"description\":\"Building Intelligent Interactions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:10:27.169757\",\"folder\":null,\"id\":\"0afb91b4-8f41-4270-900a-f5de647d45ad\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Gloomy Lovelace\",\"description\":\"Your Passport to Linguistic Landscapes.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:11:02.292233\",\"folder\":null,\"id\":\"0f1e5dcf-8769-4157-b495-5f215b490107\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Fervent Kilby\",\"description\":\"Empowering Enterprises with Intelligent Interactions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:11:46.960966\",\"folder\":null,\"id\":\"310d03d9-dd50-4946-9a27-38ee06906212\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Admiring Almeida\",\"description\":\"Language Models, Mapped and Mastered.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:12:24.475101\",\"folder\":null,\"id\":\"3cbc8fc2-a86f-4177-9a1c-a833b2a24283\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Elated Roentgen\",\"description\":\"Empowering Enterprises with Intelligent Interactions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:06.753571\",\"folder\":null,\"id\":\"c508e922-29e9-4234-84ae-505c5bdf41c1\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Perky Poitras\",\"description\":\"Chain the Words, Master Language!\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:07.754605\",\"folder\":null,\"id\":\"1431df05-1b6f-41af-a063-a18d26a946ef\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Tender Khayyam\",\"description\":\"Navigate the Linguistic Landscape, Discover Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:07.791627\",\"folder\":null,\"id\":\"344e03fb-fd49-4e87-be67-7dce04ba655b\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Zealous Mayer\",\"description\":\"Navigate the Linguistic Landscape, Discover Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:07.803889\",\"folder\":null,\"id\":\"8b3ff1cb-3a6c-46ee-b09a-0e9f9f13a8b9\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Tiny Ramanujan\",\"description\":\"Empowering Communication, Enabling Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:07.822583\",\"folder\":null,\"id\":\"18961e76-f4b1-4968-926a-fed22cb04f69\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Cheerful Franklin\",\"description\":\"Building Intelligent Interactions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:07.854440\",\"folder\":null,\"id\":\"0e0ee854-ab46-4333-a848-2e1239a24334\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Serene Swirles\",\"description\":\"Maximize Impact with Intelligent Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:07.988932\",\"folder\":null,\"id\":\"cc7b8238-3d15-4f78-bd0c-8311691c9ff8\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sleepy Swartz\",\"description\":\"Uncover Business Opportunities with NLP.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:08.028688\",\"folder\":null,\"id\":\"123369f9-c83c-4ed3-93b6-78ca29c271cf\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Cranky Kowalevski\",\"description\":\"Unleashing Linguistic Creativity.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:59.097607\",\"folder\":null,\"id\":\"ed4b1490-e9e5-46bf-b337-166b48eaadd6\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sprightly Golick\",\"description\":\"Building Powerful Solutions with Language Models.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:59.617772\",\"folder\":null,\"id\":\"9d1be311-c618-4e3e-aeb1-4161ab37850e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Boring Newton\",\"description\":\"Generate, Innovate, Communicate.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:59.646124\",\"folder\":null,\"id\":\"63a75f99-1745-40d3-9e27-3d19a143be45\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sprightly Noyce\",\"description\":\"Beyond Text Generation - Unleashing Business Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:59.685781\",\"folder\":null,\"id\":\"b418ca87-eb17-42e8-b789-3fcb0cab3ddb\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Fluffy Fermat\",\"description\":\"Text Generation Meets Business Transformation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:59.705984\",\"folder\":null,\"id\":\"93802b07-eee9-4a2b-8691-7d9a231bd67e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Stoic Payne\",\"description\":\"Beyond Text Generation - Unleashing Business Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:59.723990\",\"folder\":null,\"id\":\"d62df661-0ae5-4b41-a9fb-71cb2e46ad52\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Effervescent Darwin\",\"description\":\"Unleashing Linguistic Creativity.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:59.818343\",\"folder\":null,\"id\":\"d1f62248-415c-474a-bfa6-3509a528a33b\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Focused Thompson\",\"description\":\"Transform Your Business with Smart Dialogues.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:59.925999\",\"folder\":null,\"id\":\"d2267176-f020-4c52-90a4-7f944d7c1749\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Admiring Dewey\",\"description\":\"Navigate the Networks of Conversation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:15:07.400402\",\"folder\":null,\"id\":\"8cf1ac8d-d34d-4e8a-a9da-be44672e1dfb\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Prickly Zuse\",\"description\":\"Where Language Meets Logic.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:15:08.384611\",\"folder\":null,\"id\":\"bae3cb5b-0f1b-4e95-bf58-7eab38da3d73\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Hungry Zuse\",\"description\":\"The Pinnacle of Prompt Generation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:15:08.436591\",\"folder\":null,\"id\":\"4dfafe3e-e9ef-405d-be72-550084411d69\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Lonely Khayyam\",\"description\":\"Design Dialogues with Langflow.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:15:08.435958\",\"folder\":null,\"id\":\"e0f81215-dc55-4b5a-b8cf-6e2fcbf297a7\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Big Mendel\",\"description\":\"Innovation in Interaction with Langflow.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:15:08.470080\",\"folder\":null,\"id\":\"5022a71c-da47-4975-a4d0-6d2d9e760d3d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Inquisitive Poitras\",\"description\":\"Navigate the Networks of Conversation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:15:08.484430\",\"folder\":null,\"id\":\"4f1ff9e3-3500-404c-80af-2010bc46cdcb\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Joyous Jones\",\"description\":\"The Power of Language at Your Fingertips.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:15:08.661306\",\"folder\":null,\"id\":\"77af3e47-c2cc-42f6-99e1-78589439a447\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Exuberant Khayyam\",\"description\":\"Empowering Communication, Enabling Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:15:08.662877\",\"folder\":null,\"id\":\"6f3e2e56-b329-47e3-86cc-024c29203016\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Dazzling Visvesvaraya\",\"description\":\"Maximize Impact with Intelligent Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:16:10.092917\",\"folder\":null,\"id\":\"449ac0ee-ee29-4a9f-9aff-fd6a86624457\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Nostalgic Tesla\",\"description\":\"Nurture NLP Nodes Here.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:16:10.630382\",\"folder\":null,\"id\":\"a2136ed3-dc75-4a8f-ab34-6887ff955b23\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Goofy Noether\",\"description\":\"Language Models, Unleashed.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:16:10.652058\",\"folder\":null,\"id\":\"fd1080f8-db07-481a-b2ba-60f67fcb20a6\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Dazzling Pasteur\",\"description\":\"Language Models, Unleashed.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:16:10.688661\",\"folder\":null,\"id\":\"d534d5e1-92aa-4fb2-a795-7071c4feba47\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Comical Sinoussi\",\"description\":\"Bridging Prompts for Brilliance.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:16:10.741385\",\"folder\":null,\"id\":\"85323170-c066-4d8f-acb0-1b142241389e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Small Ramanujan\",\"description\":\"Uncover Business Opportunities with NLP.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:16:10.790086\",\"folder\":null,\"id\":\"b0d18432-21ab-404b-acb6-57ef97353fad\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sick Joliot\",\"description\":\"Text Generation Meets Business Transformation.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":442,\"id\":\"OpenAIEmbeddings-rVj1B\",\"type\":\"genericNode\",\"position\":{\"x\":-100.23754663035719,\"y\":-718.7575880388187},\"data\":{\"type\":\"OpenAIEmbeddings\",\"node\":{\"template\":{\"allowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[],\"password\":false,\"name\":\"allowed_special\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"chunk_size\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1000,\"password\":false,\"name\":\"chunk_size\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_headers\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"default_headers\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"default_query\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"default_query\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"deployment\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-embedding-ada-002\",\"password\":false,\"name\":\"deployment\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"disallowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"all\",\"password\":false,\"name\":\"disallowed_special\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"embedding_ctx_length\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":8191,\"password\":false,\"name\":\"embedding_ctx_length\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"{\\\"Authorization\\\": \\\"Bearer \\\"}\",\"password\":false,\"name\":\"headers\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"http_client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"http_client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":2,\"password\":false,\"name\":\"max_retries\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"model\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-embedding-ada-002\",\"password\":false,\"name\":\"model\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_api_type\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_type\",\"display_name\":\"OpenAI API Type\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_api_version\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_version\",\"display_name\":\"OpenAI API Version\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"show_progress_bar\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"show_progress_bar\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"skip_empty\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"skip_empty\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tiktoken_enabled\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":true,\"name\":\"tiktoken_enabled\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"OpenAIEmbeddings\"},\"description\":\"OpenAI embedding models.\",\"base_classes\":[\"OpenAIEmbeddings\",\"Embeddings\"],\"display_name\":\"OpenAIEmbeddings\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"OpenAIEmbeddings-rVj1B\"},\"selected\":true,\"dragging\":false,\"positionAbsolute\":{\"x\":-100.23754663035719,\"y\":-718.7575880388187}}],\"edges\":[],\"viewport\":{\"x\":267.7156633365312,\"y\":716.9644817529361,\"zoom\":0.7169776240079139}},\"is_component\":false,\"updated_at\":\"2023-12-08T18:52:26.999440\",\"folder\":null,\"id\":\"be39958a-ef42-4fa8-8e54-b611e56b5c97\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Graceful Lumiere\",\"description\":\"Conversational Cartography Unlocked.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:16:11.012069\",\"folder\":null,\"id\":\"f7dcecfd-533c-4f40-9dcb-f213962ed1a2\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"aaaaa\",\"description\":\"Text Generation Meets Business Transformation.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":442,\"id\":\"OpenAIEmbeddings-P6Z0D\",\"type\":\"genericNode\",\"position\":{\"x\":-100.23754663035719,\"y\":-718.7575880388187},\"data\":{\"type\":\"OpenAIEmbeddings\",\"node\":{\"template\":{\"allowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[],\"password\":false,\"name\":\"allowed_special\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"chunk_size\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1000,\"password\":false,\"name\":\"chunk_size\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_headers\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"default_headers\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"default_query\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"default_query\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"deployment\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-embedding-ada-002\",\"password\":false,\"name\":\"deployment\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"disallowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"all\",\"password\":false,\"name\":\"disallowed_special\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"embedding_ctx_length\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":8191,\"password\":false,\"name\":\"embedding_ctx_length\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"{\\\"Authorization\\\": \\\"Bearer \\\"}\",\"password\":false,\"name\":\"headers\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"http_client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"http_client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":2,\"password\":false,\"name\":\"max_retries\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"model\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-embedding-ada-002\",\"password\":false,\"name\":\"model\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_api_type\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_type\",\"display_name\":\"OpenAI API Type\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"openai_api_version\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_version\",\"display_name\":\"OpenAI API Version\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"show_progress_bar\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"show_progress_bar\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"skip_empty\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"skip_empty\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tiktoken_enabled\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":true,\"name\":\"tiktoken_enabled\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"_type\":\"OpenAIEmbeddings\"},\"description\":\"OpenAI embedding models.\",\"base_classes\":[\"OpenAIEmbeddings\",\"Embeddings\"],\"display_name\":\"OpenAIEmbeddings\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"OpenAIEmbeddings-P6Z0D\"},\"selected\":true,\"dragging\":false,\"positionAbsolute\":{\"x\":-100.23754663035719,\"y\":-718.7575880388187}}],\"edges\":[],\"viewport\":{\"x\":266.7156633365312,\"y\":657.9644817529361,\"zoom\":0.7169776240079139}},\"is_component\":false,\"updated_at\":\"2023-12-08T19:05:14.503144\",\"folder\":null,\"id\":\"c2411a20-57c6-44cc-a0d0-2c857453633d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Lively Aryabhata\",\"description\":\"Design, Develop, Dialogize.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":459,\"id\":\"CustomComponent-Qhbd7\",\"type\":\"genericNode\",\"position\":{\"x\":-224.36198532285903,\"y\":-39.03047722134913},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nfrom openai import OpenAI\\n\\n\\nclass Component(CustomComponent):\\n display_name: str = \\\"OpenAI STT english translator\\\"\\n description: str = \\\"Transcript and translate any audio to english\\\"\\n documentation: str = \\\"http://docs.langflow.org/components/custom\\\"\\n\\n def build_config(self):\\n return {\\\"audio_path\\\":{\\\"display_name\\\":\\\"Audio path\\\",\\\"input_types\\\":[\\\"str\\\"]},\\\"openAI_key\\\":{\\\"display_name\\\":\\\"OpenAI key\\\",\\\"password\\\":True}}\\n\\n def build(self,audio_path:str,openAI_key:str) -> str:\\n client = OpenAI(api_key=openAI_key)\\n audio_file= open(audio_path, \\\"rb\\\")\\n transcript = client.audio.translations.create(\\n model=\\\"whisper-1\\\", \\n file=audio_file\\n )\\n self.status = transcript.text\\n return transcript.text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"audio_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"audio_path\",\"display_name\":\"Audio path\",\"advanced\":false,\"input_types\":[\"str\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"aaaaa\"},\"openAI_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openAI_key\",\"display_name\":\"OpenAI key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"}},\"description\":\"Transcript and translate any audio to english\",\"base_classes\":[\"str\"],\"display_name\":\"OpenAI STT english tra\",\"custom_fields\":{\"audio_path\":null,\"openAI_key\":null},\"output_types\":[\"str\"],\"documentation\":\"http://docs.langflow.org/components/custom\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"CustomComponent-Qhbd7\"},\"selected\":true,\"dragging\":false,\"positionAbsolute\":{\"x\":-224.36198532285903,\"y\":-39.03047722134913}}],\"edges\":[],\"viewport\":{\"x\":433.8372868055629,\"y\":250.9611989970935,\"zoom\":0.8467453123625275}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:16:56.971879\",\"folder\":null,\"id\":\"122eee5d-9734-4e51-9da5-b39bead64a8d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Goofy Wing\",\"description\":\"Your Toolkit for Text Generation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:18:23.227017\",\"folder\":null,\"id\":\"171d0063-6446-4c6a-8f5b-786a38951d44\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Mad Boyd\",\"description\":\"Empowering Language Engineering.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:18:24.063781\",\"folder\":null,\"id\":\"3a7af50c-6555-4004-a86e-1ea37e477900\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Boring Dewey\",\"description\":\"Unleashing Linguistic Creativity.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:18:24.065404\",\"folder\":null,\"id\":\"dc4b4235-a550-41e2-9ddb-bcb352a1bc03\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Nauseous Carroll\",\"description\":\"Navigate the Networks of Conversation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:18:24.087501\",\"folder\":null,\"id\":\"9550e2bf-db7a-41f5-84e5-177a181bbeda\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Silly Engelbart\",\"description\":\"Generate, Innovate, Communicate.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:18:24.112543\",\"folder\":null,\"id\":\"6a3a24bb-01cb-4d8e-8d17-0dc92d257322\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sassy Khayyam\",\"description\":\"Innovation in Interaction, Revolution in Revenue.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:18:24.147631\",\"folder\":null,\"id\":\"8d372f5e-ca12-4ea8-a1a1-8846afa72692\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Mirthful Bell\",\"description\":\"Connect the Dots, Craft Language.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:18:24.212921\",\"folder\":null,\"id\":\"800f8785-0f41-4db3-aef8-9e3de5250526\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sleepy Bassi\",\"description\":\"Unleashing Business Potential through Language Engineering.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:18:24.395729\",\"folder\":null,\"id\":\"4589607a-065b-4a8f-ba52-5045d7b04086\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Lonely Volhard\",\"description\":\"Where Language Meets Logic.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:20:33.263971\",\"folder\":null,\"id\":\"b2440ed8-44fa-4684-adf7-b5e84bff6577\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Ecstatic Poincare\",\"description\":\"Your Passport to Linguistic Landscapes.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:20:33.377270\",\"folder\":null,\"id\":\"b996f514-e0b8-432f-969b-7276630a8f4b\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Grave Zuse\",\"description\":\"Text Generation Meets Business Transformation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:20:33.406385\",\"folder\":null,\"id\":\"6e2e9c12-0afc-499e-acdd-adf4b5f7d4fc\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Big Hopper\",\"description\":\"Conversation Catalyst Engine.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:20:33.413014\",\"folder\":null,\"id\":\"e020f1a5-aa12-45e7-ba50-6eb64a735e60\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Gleeful Jang\",\"description\":\"Conversation Catalyst Engine.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:20:33.433045\",\"folder\":null,\"id\":\"ee58f892-b7b2-408e-b4b9-9d862fc315aa\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Twinkly Ohm\",\"description\":\"Promptly Ingenious!\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:20:33.403404\",\"folder\":null,\"id\":\"023a1fc3-8807-4167-b6b2-f4e5daf036f1\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Distracted Degrasse\",\"description\":\"Bridging Prompts for Brilliance.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:20:33.411655\",\"folder\":null,\"id\":\"085f106f-c1e9-4a0f-ba31-d2fafe685d9c\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Exuberant Volta\",\"description\":\"Catalyzing Business Growth through Conversational AI.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:20:33.408697\",\"folder\":null,\"id\":\"14481bb5-1353-452f-9359-d38c9419d79c\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Goofy Bose\",\"description\":\"Conversational Cartography Unlocked.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:22:21.774940\",\"folder\":null,\"id\":\"e9316292-4ee1-441b-8327-0b09a7831fe9\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Thirsty Easley\",\"description\":\"Language Models, Mapped and Mastered.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:22:22.416556\",\"folder\":null,\"id\":\"668806ba-3efa-44de-aeb7-4ac082ba9172\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Joyous Mestorf\",\"description\":\"Generate, Innovate, Communicate.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:22:22.484048\",\"folder\":null,\"id\":\"3fc0a371-aada-4450-9d17-33d3cc05c870\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Playful Franklin\",\"description\":\"Empowering Language Engineering.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:22:22.509095\",\"folder\":null,\"id\":\"af967c98-5f08-4ee2-b1ce-6c16b4f9ebe2\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Goofy Bhaskara\",\"description\":\"Empowering Enterprises with Intelligent Interactions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:22:22.508465\",\"folder\":null,\"id\":\"758c4164-b521-45d0-a15f-d49480e312eb\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Funky Edison\",\"description\":\"Unravel the Art of Articulation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:22:22.602296\",\"folder\":null,\"id\":\"f9d3d30f-8859-433f-bafc-ccf1a7196e35\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Silly Ride\",\"description\":\"Unfolding Linguistic Possibilities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:22:22.604061\",\"folder\":null,\"id\":\"0142bca5-eb61-42ed-9917-70c4c0f54eb0\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Goofy Noyce\",\"description\":\"Text Generation Meets Business Transformation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:22:22.603113\",\"folder\":null,\"id\":\"0b63d036-4669-4ceb-8ea4-34035340df77\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Cocky Bhabha\",\"description\":\"Language Engineering Excellence.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:23:07.345767\",\"folder\":null,\"id\":\"8b9a66d4-a924-4b84-a2b5-5dd0645ac07a\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Thirsty Zobell\",\"description\":\"Unleashing Business Potential through Language Engineering.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:23:07.722319\",\"folder\":null,\"id\":\"c912fd6b-b32d-409f-a0e5-c6249b066429\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Fervent Shaw\",\"description\":\"Language Architect at Work!\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:23:07.750779\",\"folder\":null,\"id\":\"9d755cd4-c652-43e0-a68d-75a5475ce7a3\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Giggly Newton\",\"description\":\"Navigate the Linguistic Landscape, Discover Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:23:07.786602\",\"folder\":null,\"id\":\"0d3af7de-1ada-4c43-a69f-1bfad370ccfc\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Modest Yalow\",\"description\":\"Text Generation Meets Business Transformation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:23:07.792245\",\"folder\":null,\"id\":\"b6444376-4162-436b-8b40-f5a6afc850db\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sad Bhabha\",\"description\":\"Empowering Enterprises with Intelligent Interactions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:23:07.890349\",\"folder\":null,\"id\":\"d90af439-fb34-4d27-98f2-06f7f9a9ed8c\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Spirited Hoover\",\"description\":\"The Pinnacle of Prompt Generation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:23:07.905750\",\"folder\":null,\"id\":\"31597ce2-de3c-490b-9ead-3f702f63cfd9\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Trusting Davinci\",\"description\":\"Design, Develop, Dialogize.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:23:08.001400\",\"folder\":null,\"id\":\"b43c63e9-a257-4a53-8acc-049e13706ac2\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"23\",\"description\":\"23\",\"data\":{\"nodes\":[{\"width\":384,\"height\":467,\"id\":\"PromptTemplate-K7xiS\",\"type\":\"genericNode\",\"position\":{\"x\":-658.2250903773149,\"y\":809.352046606987},\"data\":{\"type\":\"PromptTemplate\",\"node\":{\"template\":{\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false},\"input_types\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_types\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"input_variables\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":true,\"value\":[\"dasdas\",\"dasdasd\"]},\"partial_variables\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"template\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"prompt\",\"list\":false,\"value\":\"{dasdas}\\n{dasdasd}\\n\"},\"template_format\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"f-string\",\"password\":false,\"name\":\"template_format\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":false},\"validate_template\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"validate_template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"PromptTemplate\",\"dasdas\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"dasdas\",\"display_name\":\"dasdas\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"dasdasd\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"dasdasd\",\"display_name\":\"dasdasd\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"A prompt template for a language model.\",\"base_classes\":[\"PromptTemplate\",\"BasePromptTemplate\",\"StringPromptTemplate\"],\"name\":\"\",\"display_name\":\"PromptTemplate\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/\",\"custom_fields\":{\"\":[\"dasdas\",\"dasdasd\"]},\"output_types\":[],\"full_path\":null,\"field_formatters\":{},\"beta\":false,\"error\":null},\"id\":\"PromptTemplate-K7xiS\"},\"selected\":false,\"positionAbsolute\":{\"x\":-658.2250903773149,\"y\":809.352046606987},\"dragging\":false},{\"width\":384,\"height\":366,\"id\":\"AirbyteJSONLoader-DXfcM\",\"type\":\"genericNode\",\"position\":{\"x\":-1110.8267574563533,\"y\":569.1107380883907},\"data\":{\"type\":\"AirbyteJSONLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".json\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"json\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"\":\"\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"AirbyteJSONLoader\"},\"description\":\"Load local `Airbyte` json files.\",\"base_classes\":[\"Document\"],\"display_name\":\"AirbyteJSONLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"AirbyteJSONLoader-DXfcM\"},\"selected\":false,\"positionAbsolute\":{\"x\":-1110.8267574563533,\"y\":569.1107380883907},\"dragging\":false},{\"width\":384,\"height\":376,\"id\":\"PromptRunner-ckWMH\",\"type\":\"genericNode\",\"position\":{\"x\":-1149.4746387825978,\"y\":992.3970573758324},\"data\":{\"type\":\"PromptRunner\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\n\\nfrom langchain.llms.base import BaseLLM\\nfrom langchain.prompts import PromptTemplate\\nfrom langchain.schema import Document\\n\\n\\nclass PromptRunner(CustomComponent):\\n display_name: str = \\\"Prompt Runner\\\"\\n description: str = \\\"Run a Chain with the given PromptTemplate\\\"\\n beta: bool = True\\n field_config = {\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"prompt\\\": {\\n \\\"display_name\\\": \\\"Prompt Template\\\",\\n \\\"info\\\": \\\"Make sure the prompt has all variables filled.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(self, llm: BaseLLM, prompt: PromptTemplate, inputs: dict = {}) -> Document:\\n chain = prompt | llm\\n # The input is an empty dict because the prompt is already filled\\n result = chain.invoke(input=inputs)\\n if hasattr(result, \\\"content\\\"):\\n result = result.content\\n self.repr_value = result\\n return Document(page_content=str(result))\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"inputs\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"\":\"\"}],\"password\":false,\"name\":\"inputs\",\"display_name\":\"inputs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLLM\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt Template\",\"advanced\":false,\"dynamic\":false,\"info\":\"Make sure the prompt has all variables filled.\",\"type\":\"PromptTemplate\",\"list\":false}},\"description\":\"Run a Chain with the given PromptTemplate\",\"base_classes\":[\"Document\"],\"display_name\":\"Prompt Runner\",\"custom_fields\":{\"inputs\":null,\"llm\":null,\"prompt\":null},\"output_types\":[\"PromptRunner\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"PromptRunner-ckWMH\"},\"selected\":false,\"positionAbsolute\":{\"x\":-1149.4746387825978,\"y\":992.3970573758324},\"dragging\":false}],\"edges\":[{\"source\":\"PromptRunner-ckWMH\",\"sourceHandle\":\"{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œPromptRunnerœ,œidœ:œPromptRunner-ckWMHœ}\",\"target\":\"PromptTemplate-K7xiS\",\"targetHandle\":\"{œfieldNameœ:œdasdasdœ,œidœ:œPromptTemplate-K7xiSœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"dasdasd\",\"id\":\"PromptTemplate-K7xiS\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"Document\"],\"dataType\":\"PromptRunner\",\"id\":\"PromptRunner-ckWMH\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-PromptRunner-ckWMH{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œPromptRunnerœ,œidœ:œPromptRunner-ckWMHœ}-PromptTemplate-K7xiS{œfieldNameœ:œdasdasdœ,œidœ:œPromptTemplate-K7xiSœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\"},{\"source\":\"AirbyteJSONLoader-DXfcM\",\"sourceHandle\":\"{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œAirbyteJSONLoaderœ,œidœ:œAirbyteJSONLoader-DXfcMœ}\",\"target\":\"PromptTemplate-K7xiS\",\"targetHandle\":\"{œfieldNameœ:œdasdasœ,œidœ:œPromptTemplate-K7xiSœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"dasdas\",\"id\":\"PromptTemplate-K7xiS\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"Document\"],\"dataType\":\"AirbyteJSONLoader\",\"id\":\"AirbyteJSONLoader-DXfcM\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-AirbyteJSONLoader-DXfcM{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œAirbyteJSONLoaderœ,œidœ:œAirbyteJSONLoader-DXfcMœ}-PromptTemplate-K7xiS{œfieldNameœ:œdasdasœ,œidœ:œPromptTemplate-K7xiSœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\"}],\"viewport\":{\"x\":721.09842496776,\"y\":-303.59762799439625,\"zoom\":0.6417129487814537}},\"is_component\":false,\"updated_at\":\"2023-12-08T22:52:14.560323\",\"folder\":null,\"id\":\"8533c46e-21fd-4b92-b68e-1086ea86c72d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Twinkly Stonebraker\",\"description\":\"Navigate the Linguistic Landscape, Discover Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-09T13:26:42.332360\",\"folder\":null,\"id\":\"92bc0875-4a73-44f2-9410-3b8342e404bf\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Metaphor Search (1)\",\"description\":\"Search in Metaphor with a custom string.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom typing import List, Union\\nfrom langchain.llms.base import BaseLLM\\nfrom langchain.chains import LLMChain\\nfrom langchain import PromptTemplate\\nfrom langchain.schema import Document\\nfrom metaphor_python import Metaphor\\nimport json\\n\\nfrom typing import List\\nfrom langflow.field_typing import Data\\n\\nclass MetaphorSearch(CustomComponent):\\n display_name: str = \\\"Metaphor Search\\\"\\n description: str = \\\"Search in Metaphor with a custom string.\\\"\\n beta = True\\n \\n def build_config(self):\\n return {\\n \\\"metaphor_client\\\": {\\\"display_name\\\": \\\"Metaphor Wrapper\\\"}, \\n \\\"search_num_results\\\": {\\\"display_name\\\": \\\"Number of Results (per domain)\\\"},\\n \\\"include_domains\\\": {\\\"display_name\\\": \\\"Include Domains\\\", \\\"is_list\\\": True},\\n \\\"start_date\\\": {\\\"display_name\\\": \\\"Start Date\\\"},\\n \\\"use_autoprompt\\\": {\\\"display_name\\\": \\\"Use Autoprompt\\\", \\\"type\\\": \\\"boolean\\\"},\\n \\\"search_type\\\": {\\\"display_name\\\": \\\"Search Type\\\", \\\"options\\\": [\\\"neural\\\", \\\"keyword\\\"]},\\n \\\"start_date\\\": {\\\"input_types\\\": [\\\"Data\\\"]}\\n }\\n\\n def build(\\n self,\\n methaphor_client: Data,\\n query: str,\\n search_type: str='keyword',\\n search_num_results: int = 5,\\n include_domains: List[str]= [\\\"youtube.com\\\"],\\n use_autoprompt: bool = False,\\n start_date: str=\\\"2023-01-01\\\",\\n \\n ) -> Data:\\n \\n results = []\\n for domain in include_domains:\\n response = methaphor_client.search(\\n query,\\n num_results=int(search_num_results),\\n include_domains=[domain],\\n use_autoprompt=use_autoprompt,\\n type=search_type,\\n # start_crawl_date=start_date,\\n start_published_date=start_date\\n )\\n results.extend(response.results)\\n \\n self.repr_value = results\\n\\n return results\\n\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"include_domains\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[\"yout\"],\"password\":false,\"name\":\"include_domains\",\"display_name\":\"Include Domains\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"methaphor_client\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"methaphor_client\",\"display_name\":\"methaphor_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false},\"query\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"query\",\"display_name\":\"query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"pasteldasdasdas\"},\"search_num_results\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":5,\"password\":false,\"name\":\"search_num_results\",\"display_name\":\"Number of Results (per domain)\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"search_type\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"keyword\",\"password\":false,\"options\":[\"neural\",\"keyword\"],\"name\":\"search_type\",\"display_name\":\"Search Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"start_date\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"start_date\",\"display_name\":\"start_date\",\"advanced\":false,\"input_types\":[\"Data\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"use_autoprompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"use_autoprompt\",\"display_name\":\"Use Autoprompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false}},\"description\":\"Search in Metaphor with a custom string.\",\"base_classes\":[\"Data\"],\"display_name\":\"Metaphor Search\",\"custom_fields\":{\"include_domains\":null,\"methaphor_client\":null,\"query\":null,\"search_num_results\":null,\"search_type\":null,\"start_date\":null,\"use_autoprompt\":null},\"output_types\":[\"Data\"],\"documentation\":\"\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"CustomComponent-dMB5d\"},\"id\":\"CustomComponent-dMB5d\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T13:26:43.668665\",\"folder\":null,\"id\":\"912265df-9b87-4b30-a585-1ca59b944391\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Metaphor Search (2)\",\"description\":\"Search in Metaphor with a custom string.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom typing import List, Union\\nfrom langchain.llms.base import BaseLLM\\nfrom langchain.chains import LLMChain\\nfrom langchain import PromptTemplate\\nfrom langchain.schema import Document\\nfrom metaphor_python import Metaphor\\nimport json\\n\\nfrom typing import List\\nfrom langflow.field_typing import Data\\n\\nclass MetaphorSearch(CustomComponent):\\n display_name: str = \\\"Metaphor Search\\\"\\n description: str = \\\"Search in Metaphor with a custom string.\\\"\\n beta = True\\n \\n def build_config(self):\\n return {\\n \\\"metaphor_client\\\": {\\\"display_name\\\": \\\"Metaphor Wrapper\\\"}, \\n \\\"search_num_results\\\": {\\\"display_name\\\": \\\"Number of Results (per domain)\\\"},\\n \\\"include_domains\\\": {\\\"display_name\\\": \\\"Include Domains\\\", \\\"is_list\\\": True},\\n \\\"start_date\\\": {\\\"display_name\\\": \\\"Start Date\\\"},\\n \\\"use_autoprompt\\\": {\\\"display_name\\\": \\\"Use Autoprompt\\\", \\\"type\\\": \\\"boolean\\\"},\\n \\\"search_type\\\": {\\\"display_name\\\": \\\"Search Type\\\", \\\"options\\\": [\\\"neural\\\", \\\"keyword\\\"]},\\n \\\"start_date\\\": {\\\"input_types\\\": [\\\"Data\\\"]}\\n }\\n\\n def build(\\n self,\\n methaphor_client: Data,\\n query: str,\\n search_type: str='keyword',\\n search_num_results: int = 5,\\n include_domains: List[str]= [\\\"youtube.com\\\"],\\n use_autoprompt: bool = False,\\n start_date: str=\\\"2023-01-01\\\",\\n \\n ) -> Data:\\n \\n results = []\\n for domain in include_domains:\\n response = methaphor_client.search(\\n query,\\n num_results=int(search_num_results),\\n include_domains=[domain],\\n use_autoprompt=use_autoprompt,\\n type=search_type,\\n # start_crawl_date=start_date,\\n start_published_date=start_date\\n )\\n results.extend(response.results)\\n \\n self.repr_value = results\\n\\n return results\\n\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"include_domains\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[\"yout\"],\"password\":false,\"name\":\"include_domains\",\"display_name\":\"Include Domains\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"methaphor_client\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"methaphor_client\",\"display_name\":\"methaphor_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false},\"query\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"query\",\"display_name\":\"query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"pasteldasdasdas\"},\"search_num_results\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":5,\"password\":false,\"name\":\"search_num_results\",\"display_name\":\"Number of Results (per domain)\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"search_type\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"keyword\",\"password\":false,\"options\":[\"neural\",\"keyword\"],\"name\":\"search_type\",\"display_name\":\"Search Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"start_date\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"start_date\",\"display_name\":\"start_date\",\"advanced\":false,\"input_types\":[\"Data\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"use_autoprompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"use_autoprompt\",\"display_name\":\"Use Autoprompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false}},\"description\":\"Search in Metaphor with a custom string.\",\"base_classes\":[\"Data\"],\"display_name\":\"Metaphor Search\",\"custom_fields\":{\"include_domains\":null,\"methaphor_client\":null,\"query\":null,\"search_num_results\":null,\"search_type\":null,\"start_date\":null,\"use_autoprompt\":null},\"output_types\":[\"Data\"],\"documentation\":\"\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"CustomComponent-ipifC\"},\"id\":\"CustomComponent-ipifC\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T13:26:49.799612\",\"folder\":null,\"id\":\"ca83ee08-2f93-427d-9897-80fef6dd5447\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Metaphor Search\",\"description\":\"Search in Metaphor with a custom string.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom typing import List, Union\\nfrom langchain.llms.base import BaseLLM\\nfrom langchain.chains import LLMChain\\nfrom langchain import PromptTemplate\\nfrom langchain.schema import Document\\nfrom metaphor_python import Metaphor\\nimport json\\n\\nfrom typing import List\\nfrom langflow.field_typing import Data\\n\\nclass MetaphorSearch(CustomComponent):\\n display_name: str = \\\"Metaphor Search\\\"\\n description: str = \\\"Search in Metaphor with a custom string.\\\"\\n beta = True\\n \\n def build_config(self):\\n return {\\n \\\"metaphor_client\\\": {\\\"display_name\\\": \\\"Metaphor Wrapper\\\"}, \\n \\\"search_num_results\\\": {\\\"display_name\\\": \\\"Number of Results (per domain)\\\"},\\n \\\"include_domains\\\": {\\\"display_name\\\": \\\"Include Domains\\\", \\\"is_list\\\": True},\\n \\\"start_date\\\": {\\\"display_name\\\": \\\"Start Date\\\"},\\n \\\"use_autoprompt\\\": {\\\"display_name\\\": \\\"Use Autoprompt\\\", \\\"type\\\": \\\"boolean\\\"},\\n \\\"search_type\\\": {\\\"display_name\\\": \\\"Search Type\\\", \\\"options\\\": [\\\"neural\\\", \\\"keyword\\\"]},\\n \\\"start_date\\\": {\\\"input_types\\\": [\\\"Data\\\"]}\\n }\\n\\n def build(\\n self,\\n methaphor_client: Data,\\n query: str,\\n search_type: str='keyword',\\n search_num_results: int = 5,\\n include_domains: List[str]= [\\\"youtube.com\\\"],\\n use_autoprompt: bool = False,\\n start_date: str=\\\"2023-01-01\\\",\\n \\n ) -> Data:\\n \\n results = []\\n for domain in include_domains:\\n response = methaphor_client.search(\\n query,\\n num_results=int(search_num_results),\\n include_domains=[domain],\\n use_autoprompt=use_autoprompt,\\n type=search_type,\\n # start_crawl_date=start_date,\\n start_published_date=start_date\\n )\\n results.extend(response.results)\\n \\n self.repr_value = results\\n\\n return results\\n\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"include_domains\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[\"yout\"],\"password\":false,\"name\":\"include_domains\",\"display_name\":\"Include Domains\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"methaphor_client\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"methaphor_client\",\"display_name\":\"methaphor_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false},\"query\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"query\",\"display_name\":\"query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"pasteldasdasdas\"},\"search_num_results\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":5,\"password\":false,\"name\":\"search_num_results\",\"display_name\":\"Number of Results (per domain)\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"search_type\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"keyword\",\"password\":false,\"options\":[\"neural\",\"keyword\"],\"name\":\"search_type\",\"display_name\":\"Search Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"start_date\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"start_date\",\"display_name\":\"start_date\",\"advanced\":false,\"input_types\":[\"Data\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"use_autoprompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"use_autoprompt\",\"display_name\":\"Use Autoprompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false}},\"description\":\"Search in Metaphor with a custom string.\",\"base_classes\":[\"Data\"],\"display_name\":\"Metaphor Search\",\"custom_fields\":{\"include_domains\":null,\"methaphor_client\":null,\"query\":null,\"search_num_results\":null,\"search_type\":null,\"start_date\":null,\"use_autoprompt\":null},\"output_types\":[\"Data\"],\"documentation\":\"\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"CustomComponent-Y4qL7\"},\"id\":\"CustomComponent-Y4qL7\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T13:26:53.719960\",\"folder\":null,\"id\":\"5847602b-769a-4a82-82e8-a70f54a59929\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Lively Williams\",\"description\":\"Conversational Cartography Unlocked.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-09T13:27:45.691254\",\"folder\":null,\"id\":\"0e3bdba9-127a-4399-81a4-2865b70a4a47\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Shared Component\",\"description\":\"Conversational Cartography Unlocked.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":328,\"id\":\"CSVAgent-TK9Ea\",\"type\":\"genericNode\",\"position\":{\"x\":251.25514772667083,\"y\":160.7424529887874},\"data\":{\"type\":\"CSVAgent\",\"node\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"_type\":\"csv_agent\"},\"description\":\"Construct a CSV agent from a CSV and tools.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"CSVAgent\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/agents/toolkits/csv\",\"beta\":false,\"error\":null},\"id\":\"CSVAgent-TK9Ea\"},\"positionAbsolute\":{\"x\":251.25514772667083,\"y\":160.7424529887874}}],\"edges\":[],\"viewport\":{\"x\":104.85568116317398,\"y\":88.26375874183478,\"zoom\":0.7169776240079145}},\"is_component\":false,\"updated_at\":\"2023-12-09T13:28:34.119070\",\"folder\":null,\"id\":\"29c1a247-47b0-457b-8666-7c0a67dc72b0\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"teste cristhian\",\"description\":\"11111\",\"data\":{\"nodes\":[{\"width\":384,\"height\":328,\"id\":\"CSVAgent-P5wrB\",\"type\":\"genericNode\",\"position\":{\"x\":251.25514772667083,\"y\":160.7424529887874},\"data\":{\"type\":\"CSVAgent\",\"node\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"_type\":\"csv_agent\"},\"description\":\"Construct a CSV agent from a CSV and tools.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"CSVAgent\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/agents/toolkits/csv\",\"beta\":false,\"error\":null},\"id\":\"CSVAgent-P5wrB\"},\"positionAbsolute\":{\"x\":251.25514772667083,\"y\":160.7424529887874}},{\"width\":384,\"height\":626,\"id\":\"OpenAI-zpihD\",\"type\":\"genericNode\",\"position\":{\"x\":-56.983202536768374,\"y\":61.715652677908665},\"data\":{\"type\":\"OpenAI\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"list\":true},\"allowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":[],\"password\":false,\"name\":\"allowed_special\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"batch_size\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":20,\"password\":false,\"name\":\"batch_size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"best_of\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"best_of\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"default_query\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"disallowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"all\",\"password\":false,\"name\":\"disallowed_special\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"frequency_penalty\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":0,\"password\":false,\"name\":\"frequency_penalty\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"http_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"http_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"logit_bias\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"logit_bias\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":2,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-babbage-001\",\"password\":false,\"options\":[\"text-davinci-003\",\"text-davinci-002\",\"text-curie-001\",\"text-babbage-001\",\"text-ada-001\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false,\"value\":\"dasdasdasdsadasdas\"},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"presence_penalty\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":0,\"password\":false,\"name\":\"presence_penalty\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"1.4\",\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"top_p\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"top_p\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"OpenAI\"},\"description\":\"OpenAI large language models.\",\"base_classes\":[\"OpenAI\",\"BaseLanguageModel\",\"BaseLLM\",\"BaseOpenAI\"],\"display_name\":\"OpenAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"OpenAI-zpihD\"},\"selected\":true,\"dragging\":false,\"positionAbsolute\":{\"x\":-56.983202536768374,\"y\":61.715652677908665}}],\"edges\":[],\"viewport\":{\"x\":104.85568116317398,\"y\":88.26375874183478,\"zoom\":0.7169776240079145}},\"is_component\":false,\"updated_at\":\"2023-12-09T13:40:48.453096\",\"folder\":null,\"id\":\"2e59d013-2acb-49a6-915b-9231a7e6eb58\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"CSVAgent (1)\",\"description\":\"Construct a CSV agent from a CSV and tools.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CSVAgent\",\"node\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"_type\":\"csv_agent\"},\"description\":\"Construct a CSV agent from a CSV and tools.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"CSVAgent\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/agents/toolkits/csv\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"CSVAgent-jsHqy\"},\"id\":\"CSVAgent-jsHqy\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T13:31:17.317322\",\"folder\":null,\"id\":\"ab1034a9-9b5b-4c65-bf6e-9f098a403942\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Suspicious Wilsonfasdfsd\",\"description\":\"Building Linguistic Labyrinths.fasdfads\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-09T14:43:45.298121\",\"folder\":null,\"id\":\"7d91c0c5-fba6-4c60-b4d1-11d430ef357a\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"AirbyteJSONLoader (1)\",\"description\":\"Load local `Airbyte` json files.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AirbyteJSONLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".json\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"\":\"\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"AirbyteJSONLoader\"},\"description\":\"Load local `Airbyte` json files.\",\"base_classes\":[\"Document\"],\"display_name\":\"AirbyteJSONLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"AirbyteJSONLoader-pAHh6\"},\"id\":\"AirbyteJSONLoader-pAHh6\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T13:47:58.573137\",\"folder\":null,\"id\":\"f0cc4292-97cc-4748-803d-949e30dcf661\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"AirbyteJSONLoader\",\"description\":\"Load local `Airbyte` json files.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AirbyteJSONLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".json\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"ff2\":\"d3bbd\"},{\"w\":\"bvd\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"AirbyteJSONLoader\"},\"description\":\"Load local `Airbyte` json files.\",\"base_classes\":[\"Document\"],\"display_name\":\"AirbyteJSONLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"AirbyteJSONLoader-0zU2Q\"},\"id\":\"AirbyteJSONLoader-0zU2Q\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T13:50:09.035318\",\"folder\":null,\"id\":\"5e3c0d55-1a00-4e15-9821-0c625c6415ff\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"CSVAgent\",\"description\":\"Construct a CSV agent from a CSV and tools.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CSVAgent\",\"node\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"_type\":\"csv_agent\"},\"description\":\"Construct a CSV agent from a CSV and tools.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"CSVAgent\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/agents/toolkits/csv\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"CSVAgent-Ub1Xe\"},\"id\":\"CSVAgent-Ub1Xe\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T13:50:16.985419\",\"folder\":null,\"id\":\"baee8061-4788-4ce6-928f-c6ce1ecb443c\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Lively Heisenberg\",\"description\":\"Maximize Impact with Intelligent Conversations.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":366,\"id\":\"CSVLoader-RMUx9\",\"type\":\"genericNode\",\"position\":{\"x\":111,\"y\":345.51250076293945},\"data\":{\"type\":\"CSVLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"z2b\":\"z9\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"CSVLoader\"},\"description\":\"Load a `CSV` file into a list of Documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"CSVLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv\",\"beta\":false,\"error\":null},\"id\":\"CSVLoader-RMUx9\"},\"positionAbsolute\":{\"x\":111,\"y\":345.51250076293945}}],\"edges\":[],\"viewport\":{\"x\":0,\"y\":0,\"zoom\":1}},\"is_component\":false,\"updated_at\":\"2023-12-09T14:38:40.291137\",\"folder\":null,\"id\":\"109e9629-d569-4555-9d40-42203a5ed035\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"CohereEmbeddings\",\"description\":\"Cohere embedding models.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CohereEmbeddings\",\"node\":{\"template\":{\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cohere_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"cohere_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"max_retries\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"model\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"embed-english-v2.0\",\"password\":false,\"name\":\"model\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"truncate\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"truncate\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"user_agent\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"langchain\",\"password\":false,\"name\":\"user_agent\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"CohereEmbeddings\"},\"description\":\"Cohere embedding models.\",\"base_classes\":[\"CohereEmbeddings\",\"Embeddings\"],\"display_name\":\"CohereEmbeddings\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/cohere\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"CohereEmbeddings-HFUAf\"},\"id\":\"CohereEmbeddings-HFUAf\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T13:50:46.172344\",\"folder\":null,\"id\":\"662d8040-d47c-40db-bda1-66489a1c9d24\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"CSVLoader (1)\",\"description\":\"Load a `CSV` file into a list of Documents.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CSVLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"cddscz23\":\"aaqd\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"CSVLoader\"},\"description\":\"Load a `CSV` file into a list of Documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"CSVLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"CSVLoader-3wrib\"},\"id\":\"CSVLoader-3wrib\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T13:56:11.662526\",\"folder\":null,\"id\":\"4fae6aec-ddbd-498d-a4d1-ca0290f6a5ad\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"CSVLoader (2)\",\"description\":\"Load a `CSV` file into a list of Documents.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CSVLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"cddscz23\":\"aaqd\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"CSVLoader\"},\"description\":\"Load a `CSV` file into a list of Documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"CSVLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"CSVLoader-VEjyx\"},\"id\":\"CSVLoader-VEjyx\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T13:57:37.560784\",\"folder\":null,\"id\":\"d80635ee-966c-41f2-981c-afa2c388ac6e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"CSVLoader (3)\",\"description\":\"Load a `CSV` file into a list of Documents.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CSVLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"cddscz23\":\"aaqd\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"CSVLoader\"},\"description\":\"Load a `CSV` file into a list of Documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"CSVLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"CSVLoader-PIhOc\"},\"id\":\"CSVLoader-PIhOc\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:00:27.991966\",\"folder\":null,\"id\":\"c5cc4c32-77c8-4889-a9f8-2632466b7366\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"CSVLoader (4)\",\"description\":\"Load a `CSV` file into a list of Documents.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CSVLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"cddscz23\":\"aaqd\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"CSVLoader\"},\"description\":\"Load a `CSV` file into a list of Documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"CSVLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"CSVLoader-deB43\"},\"id\":\"CSVLoader-deB43\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:00:42.509243\",\"folder\":null,\"id\":\"0ab59938-ccf4-4bb9-b285-1f5da38648f0\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"CSVLoader\",\"description\":\"Load a `CSV` file into a list of Documents.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CSVLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"\":\"aaqd\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"CSVLoader\"},\"description\":\"Load a `CSV` file into a list of Documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"CSVLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"CSVLoader-mdkLm\"},\"id\":\"CSVLoader-mdkLm\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:02:17.458354\",\"folder\":null,\"id\":\"f127b7e7-4149-492e-8998-6b1b35ec4153\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"CSVLoader (5)\",\"description\":\"Load a `CSV` file into a list of Documents.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CSVLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"cddscz23\":\"aaqd\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"CSVLoader\"},\"description\":\"Load a `CSV` file into a list of Documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"CSVLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"CSVLoader-FRc6Y\"},\"id\":\"CSVLoader-FRc6Y\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:02:26.958867\",\"folder\":null,\"id\":\"a870a096-725c-4914-add0-8d57d710b7e5\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"AirbyteJSONLoader (2)\",\"description\":\"Load local `Airbyte` json files.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AirbyteJSONLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".json\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"\":\"\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"AirbyteJSONLoader\"},\"description\":\"Load local `Airbyte` json files.\",\"base_classes\":[\"Document\"],\"display_name\":\"AirbyteJSONLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"AirbyteJSONLoader-Z0uol\"},\"id\":\"AirbyteJSONLoader-Z0uol\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:03:33.764117\",\"folder\":null,\"id\":\"2a37c76c-65c8-4c01-a72d-eb46f3d41a56\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Amazon Bedrock Embeddings\",\"description\":\"Embeddings model from Amazon Bedrock.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AmazonBedrockEmbeddings\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.embeddings import BedrockEmbeddings\\nfrom langchain.embeddings.base import Embeddings\\nfrom langflow import CustomComponent\\n\\n\\nclass AmazonBedrockEmeddingsComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing an Embeddings Model using Amazon Bedrock.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"Amazon Bedrock Embeddings\\\"\\n description: str = \\\"Embeddings model from Amazon Bedrock.\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\\\"\\n beta = True\\n\\n def build_config(self):\\n return {\\n \\\"model_id\\\": {\\n \\\"display_name\\\": \\\"Model Id\\\",\\n \\\"options\\\": [\\\"amazon.titan-embed-text-v1\\\"],\\n },\\n \\\"credentials_profile_name\\\": {\\\"display_name\\\": \\\"Credentials Profile Name\\\"},\\n \\\"endpoint_url\\\": {\\\"display_name\\\": \\\"Bedrock Endpoint URL\\\"},\\n \\\"region_name\\\": {\\\"display_name\\\": \\\"AWS Region\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model_id: str = \\\"amazon.titan-embed-text-v1\\\",\\n credentials_profile_name: Optional[str] = None,\\n endpoint_url: Optional[str] = None,\\n region_name: Optional[str] = None,\\n ) -> Embeddings:\\n try:\\n output = BedrockEmbeddings(\\n credentials_profile_name=credentials_profile_name,\\n model_id=model_id,\\n endpoint_url=endpoint_url,\\n region_name=region_name,\\n ) # type: ignore\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to AmazonBedrock API.\\\") from e\\n return output\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"credentials_profile_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"credentials_profile_name\",\"display_name\":\"Credentials Profile Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"endpoint_url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"endpoint_url\",\"display_name\":\"Bedrock Endpoint URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"amazon.titan-embed-text-v1\",\"password\":false,\"options\":[\"amazon.titan-embed-text-v1\"],\"name\":\"model_id\",\"display_name\":\"Model Id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"region_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"region_name\",\"display_name\":\"AWS Region\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"Embeddings model from Amazon Bedrock.\",\"base_classes\":[\"Embeddings\"],\"display_name\":\"Amazon Bedrock Embeddings\",\"custom_fields\":{\"credentials_profile_name\":null,\"endpoint_url\":null,\"model_id\":null,\"region_name\":null},\"output_types\":[\"AmazonBedrockEmbeddings\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"AmazonBedrockEmbeddings-28ASv\"},\"id\":\"AmazonBedrockEmbeddings-28ASv\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:03:41.066618\",\"folder\":null,\"id\":\"850ba4e6-96ca-49a7-9b0c-4b7048fd52c8\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"ConversationBufferMemory\",\"description\":\"Buffer for storing conversation memory.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"ConversationBufferMemory\",\"node\":{\"template\":{\"chat_memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMessageHistory\",\"list\":false},\"ai_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"AI\",\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"human_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"Human\",\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"input_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"type\":\"str\",\"list\":false},\"memory_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"output_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\"type\":\"str\",\"list\":false},\"return_messages\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ConversationBufferMemory\"},\"description\":\"Buffer for storing conversation memory.\",\"base_classes\":[\"BaseMemory\",\"BaseChatMemory\",\"ConversationBufferMemory\"],\"display_name\":\"ConversationBufferMemory\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/buffer\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"ConversationBufferMemory-WaoLx\"},\"id\":\"ConversationBufferMemory-WaoLx\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:04:46.058568\",\"folder\":null,\"id\":\"be650940-0449-4eb0-a708-056310f924fd\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Amazon Bedrock Embeddings (1)\",\"description\":\"Embeddings model from Amazon Bedrock.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AmazonBedrockEmbeddings\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.embeddings import BedrockEmbeddings\\nfrom langchain.embeddings.base import Embeddings\\nfrom langflow import CustomComponent\\n\\n\\nclass AmazonBedrockEmeddingsComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing an Embeddings Model using Amazon Bedrock.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"Amazon Bedrock Embeddings\\\"\\n description: str = \\\"Embeddings model from Amazon Bedrock.\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\\\"\\n beta = True\\n\\n def build_config(self):\\n return {\\n \\\"model_id\\\": {\\n \\\"display_name\\\": \\\"Model Id\\\",\\n \\\"options\\\": [\\\"amazon.titan-embed-text-v1\\\"],\\n },\\n \\\"credentials_profile_name\\\": {\\\"display_name\\\": \\\"Credentials Profile Name\\\"},\\n \\\"endpoint_url\\\": {\\\"display_name\\\": \\\"Bedrock Endpoint URL\\\"},\\n \\\"region_name\\\": {\\\"display_name\\\": \\\"AWS Region\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model_id: str = \\\"amazon.titan-embed-text-v1\\\",\\n credentials_profile_name: Optional[str] = None,\\n endpoint_url: Optional[str] = None,\\n region_name: Optional[str] = None,\\n ) -> Embeddings:\\n try:\\n output = BedrockEmbeddings(\\n credentials_profile_name=credentials_profile_name,\\n model_id=model_id,\\n endpoint_url=endpoint_url,\\n region_name=region_name,\\n ) # type: ignore\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to AmazonBedrock API.\\\") from e\\n return output\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"credentials_profile_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"credentials_profile_name\",\"display_name\":\"Credentials Profile Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"endpoint_url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"endpoint_url\",\"display_name\":\"Bedrock Endpoint URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"amazon.titan-embed-text-v1\",\"password\":false,\"options\":[\"amazon.titan-embed-text-v1\"],\"name\":\"model_id\",\"display_name\":\"Model Id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"region_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"region_name\",\"display_name\":\"AWS Region\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"Embeddings model from Amazon Bedrock.\",\"base_classes\":[\"Embeddings\"],\"display_name\":\"Amazon Bedrock Embeddings\",\"custom_fields\":{\"credentials_profile_name\":null,\"endpoint_url\":null,\"model_id\":null,\"region_name\":null},\"output_types\":[\"AmazonBedrockEmbeddings\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"AmazonBedrockEmbeddings-Bs5PG\"},\"id\":\"AmazonBedrockEmbeddings-Bs5PG\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:05:50.570800\",\"folder\":null,\"id\":\"53ad1fe2-f3af-4354-86e0-eb141ce12859\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Amazon Bedrock Embeddings (2)\",\"description\":\"Embeddings model from Amazon Bedrock.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AmazonBedrockEmbeddings\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.embeddings import BedrockEmbeddings\\nfrom langchain.embeddings.base import Embeddings\\nfrom langflow import CustomComponent\\n\\n\\nclass AmazonBedrockEmeddingsComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing an Embeddings Model using Amazon Bedrock.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"Amazon Bedrock Embeddings\\\"\\n description: str = \\\"Embeddings model from Amazon Bedrock.\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\\\"\\n beta = True\\n\\n def build_config(self):\\n return {\\n \\\"model_id\\\": {\\n \\\"display_name\\\": \\\"Model Id\\\",\\n \\\"options\\\": [\\\"amazon.titan-embed-text-v1\\\"],\\n },\\n \\\"credentials_profile_name\\\": {\\\"display_name\\\": \\\"Credentials Profile Name\\\"},\\n \\\"endpoint_url\\\": {\\\"display_name\\\": \\\"Bedrock Endpoint URL\\\"},\\n \\\"region_name\\\": {\\\"display_name\\\": \\\"AWS Region\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model_id: str = \\\"amazon.titan-embed-text-v1\\\",\\n credentials_profile_name: Optional[str] = None,\\n endpoint_url: Optional[str] = None,\\n region_name: Optional[str] = None,\\n ) -> Embeddings:\\n try:\\n output = BedrockEmbeddings(\\n credentials_profile_name=credentials_profile_name,\\n model_id=model_id,\\n endpoint_url=endpoint_url,\\n region_name=region_name,\\n ) # type: ignore\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to AmazonBedrock API.\\\") from e\\n return output\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"credentials_profile_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"credentials_profile_name\",\"display_name\":\"Credentials Profile Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"endpoint_url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"endpoint_url\",\"display_name\":\"Bedrock Endpoint URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"amazon.titan-embed-text-v1\",\"password\":false,\"options\":[\"amazon.titan-embed-text-v1\"],\"name\":\"model_id\",\"display_name\":\"Model Id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"region_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"region_name\",\"display_name\":\"AWS Region\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"Embeddings model from Amazon Bedrock.\",\"base_classes\":[\"Embeddings\"],\"display_name\":\"Amazon Bedrock Embeddings\",\"custom_fields\":{\"credentials_profile_name\":null,\"endpoint_url\":null,\"model_id\":null,\"region_name\":null},\"output_types\":[\"AmazonBedrockEmbeddings\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"AmazonBedrockEmbeddings-zSZ4t\"},\"id\":\"AmazonBedrockEmbeddings-zSZ4t\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:06:11.415088\",\"folder\":null,\"id\":\"e9ed1c90-146e-452d-8ccd-ebf2e0da4331\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Amazon Bedrock Embeddings (3)\",\"description\":\"Embeddings model from Amazon Bedrock.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AmazonBedrockEmbeddings\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.embeddings import BedrockEmbeddings\\nfrom langchain.embeddings.base import Embeddings\\nfrom langflow import CustomComponent\\n\\n\\nclass AmazonBedrockEmeddingsComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing an Embeddings Model using Amazon Bedrock.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"Amazon Bedrock Embeddings\\\"\\n description: str = \\\"Embeddings model from Amazon Bedrock.\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\\\"\\n beta = True\\n\\n def build_config(self):\\n return {\\n \\\"model_id\\\": {\\n \\\"display_name\\\": \\\"Model Id\\\",\\n \\\"options\\\": [\\\"amazon.titan-embed-text-v1\\\"],\\n },\\n \\\"credentials_profile_name\\\": {\\\"display_name\\\": \\\"Credentials Profile Name\\\"},\\n \\\"endpoint_url\\\": {\\\"display_name\\\": \\\"Bedrock Endpoint URL\\\"},\\n \\\"region_name\\\": {\\\"display_name\\\": \\\"AWS Region\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model_id: str = \\\"amazon.titan-embed-text-v1\\\",\\n credentials_profile_name: Optional[str] = None,\\n endpoint_url: Optional[str] = None,\\n region_name: Optional[str] = None,\\n ) -> Embeddings:\\n try:\\n output = BedrockEmbeddings(\\n credentials_profile_name=credentials_profile_name,\\n model_id=model_id,\\n endpoint_url=endpoint_url,\\n region_name=region_name,\\n ) # type: ignore\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to AmazonBedrock API.\\\") from e\\n return output\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"credentials_profile_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"credentials_profile_name\",\"display_name\":\"Credentials Profile Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"endpoint_url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"endpoint_url\",\"display_name\":\"Bedrock Endpoint URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"amazon.titan-embed-text-v1\",\"password\":false,\"options\":[\"amazon.titan-embed-text-v1\"],\"name\":\"model_id\",\"display_name\":\"Model Id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"region_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"region_name\",\"display_name\":\"AWS Region\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"Embeddings model from Amazon Bedrock.\",\"base_classes\":[\"Embeddings\"],\"display_name\":\"Amazon Bedrock Embeddings\",\"custom_fields\":{\"credentials_profile_name\":null,\"endpoint_url\":null,\"model_id\":null,\"region_name\":null},\"output_types\":[\"AmazonBedrockEmbeddings\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"AmazonBedrockEmbeddings-Bxhlt\"},\"id\":\"AmazonBedrockEmbeddings-Bxhlt\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:07:06.411108\",\"folder\":null,\"id\":\"83783c57-64e3-41a6-aa7e-ed82f80f1e53\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"CSVLoader (6)\",\"description\":\"Load a `CSV` file into a list of Documents.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CSVLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"z2b\":\"z9\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"CSVLoader\"},\"description\":\"Load a `CSV` file into a list of Documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"CSVLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"CSVLoader-2pn2o\"},\"id\":\"CSVLoader-2pn2o\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:38:30.706258\",\"folder\":null,\"id\":\"c4ae9de6-9df3-4d3c-afc9-1752af4fecd7\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Agent Initializer\",\"description\":\"Initialize a Langchain Agent.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AgentInitializer\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from typing import Callable, List, Union\\n\\nfrom langchain.agents import AgentExecutor, AgentType, initialize_agent, types\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import BaseChatMemory, BaseLanguageModel, Tool\\n\\n\\nclass AgentInitializerComponent(CustomComponent):\\n display_name: str = \\\"Agent Initializer\\\"\\n description: str = \\\"Initialize a Langchain Agent.\\\"\\n documentation: str = \\\"https://python.langchain.com/docs/modules/agents/agent_types/\\\"\\n\\n def build_config(self):\\n agents = list(types.AGENT_TO_CLASS.keys())\\n # field_type and required are optional\\n return {\\n \\\"agent\\\": {\\\"options\\\": agents, \\\"value\\\": agents[0], \\\"display_name\\\": \\\"Agent Type\\\"},\\n \\\"max_iterations\\\": {\\\"display_name\\\": \\\"Max Iterations\\\", \\\"value\\\": 10},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"tools\\\": {\\\"display_name\\\": \\\"Tools\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"Language Model\\\"},\\n }\\n\\n def build(\\n self, agent: str, llm: BaseLanguageModel, memory: BaseChatMemory, tools: List[Tool], max_iterations: int\\n ) -> Union[AgentExecutor, Callable]:\\n agent = AgentType(agent)\\n return initialize_agent(\\n tools=tools,\\n llm=llm,\\n agent=agent,\\n memory=memory,\\n return_intermediate_steps=True,\\n handle_parsing_errors=True,\\n max_iterations=max_iterations,\\n )\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"agent\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"zero-shot-react-description\",\"password\":false,\"options\":[\"zero-shot-react-description\",\"react-docstore\",\"self-ask-with-search\",\"conversational-react-description\",\"chat-zero-shot-react-description\",\"chat-conversational-react-description\",\"structured-chat-zero-shot-react-description\",\"openai-functions\",\"openai-multi-functions\",\"JsonAgent\",\"CSVAgent\",\"AgentInitializer\",\"VectorStoreAgent\",\"VectorStoreRouterAgent\",\"SQLAgent\"],\"name\":\"agent\",\"display_name\":\"Agent Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"Language Model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"max_iterations\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"max_iterations\",\"display_name\":\"Max Iterations\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"memory\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMemory\",\"list\":false},\"tools\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"tools\",\"display_name\":\"Tools\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Tool\",\"list\":true}},\"description\":\"Initialize a Langchain Agent.\",\"base_classes\":[\"Chain\",\"AgentExecutor\",\"Callable\"],\"display_name\":\"Agent Initializer\",\"custom_fields\":{\"agent\":null,\"llm\":null,\"max_iterations\":null,\"memory\":null,\"tools\":null},\"output_types\":[\"AgentInitializer\"],\"documentation\":\"https://python.langchain.com/docs/modules/agents/agent_types/\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"AgentInitializer-MJrAC\"},\"id\":\"AgentInitializer-MJrAC\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:42:54.715163\",\"folder\":null,\"id\":\"ff84b5f9-5680-4084-a9f1-7d94fe675486\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Agent Initializer (1)\",\"description\":\"Initialize a Langchain Agent.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AgentInitializer\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from typing import Callable, List, Union\\n\\nfrom langchain.agents import AgentExecutor, AgentType, initialize_agent, types\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import BaseChatMemory, BaseLanguageModel, Tool\\n\\n\\nclass AgentInitializerComponent(CustomComponent):\\n display_name: str = \\\"Agent Initializer\\\"\\n description: str = \\\"Initialize a Langchain Agent.\\\"\\n documentation: str = \\\"https://python.langchain.com/docs/modules/agents/agent_types/\\\"\\n\\n def build_config(self):\\n agents = list(types.AGENT_TO_CLASS.keys())\\n # field_type and required are optional\\n return {\\n \\\"agent\\\": {\\\"options\\\": agents, \\\"value\\\": agents[0], \\\"display_name\\\": \\\"Agent Type\\\"},\\n \\\"max_iterations\\\": {\\\"display_name\\\": \\\"Max Iterations\\\", \\\"value\\\": 10},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"tools\\\": {\\\"display_name\\\": \\\"Tools\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"Language Model\\\"},\\n }\\n\\n def build(\\n self, agent: str, llm: BaseLanguageModel, memory: BaseChatMemory, tools: List[Tool], max_iterations: int\\n ) -> Union[AgentExecutor, Callable]:\\n agent = AgentType(agent)\\n return initialize_agent(\\n tools=tools,\\n llm=llm,\\n agent=agent,\\n memory=memory,\\n return_intermediate_steps=True,\\n handle_parsing_errors=True,\\n max_iterations=max_iterations,\\n )\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"agent\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"zero-shot-react-description\",\"password\":false,\"options\":[\"zero-shot-react-description\",\"react-docstore\",\"self-ask-with-search\",\"conversational-react-description\",\"chat-zero-shot-react-description\",\"chat-conversational-react-description\",\"structured-chat-zero-shot-react-description\",\"openai-functions\",\"openai-multi-functions\",\"JsonAgent\",\"CSVAgent\",\"AgentInitializer\",\"VectorStoreAgent\",\"VectorStoreRouterAgent\",\"SQLAgent\"],\"name\":\"agent\",\"display_name\":\"Agent Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"Language Model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"max_iterations\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"max_iterations\",\"display_name\":\"Max Iterations\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"memory\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMemory\",\"list\":false},\"tools\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"tools\",\"display_name\":\"Tools\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Tool\",\"list\":true}},\"description\":\"Initialize a Langchain Agent.\",\"base_classes\":[\"Chain\",\"AgentExecutor\",\"Callable\"],\"display_name\":\"Agent Initializer\",\"custom_fields\":{\"agent\":null,\"llm\":null,\"max_iterations\":null,\"memory\":null,\"tools\":null},\"output_types\":[\"AgentInitializer\"],\"documentation\":\"https://python.langchain.com/docs/modules/agents/agent_types/\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"AgentInitializer-3lcZ4\"},\"id\":\"AgentInitializer-3lcZ4\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:43:20.819934\",\"folder\":null,\"id\":\"97f5db9f-d6cc-4555-88fb-3be102c67814\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Exuberant Banach\",\"description\":\"Unfolding Linguistic Possibilities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-09T14:52:49.951416\",\"folder\":null,\"id\":\"a600acd1-213a-4ce7-8648-ab2ee59f5918\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"AirbyteJSONLoader (3)\",\"description\":\"Load local `Airbyte` json files.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AirbyteJSONLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".json\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"\":\"\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"AirbyteJSONLoader\"},\"description\":\"Load local `Airbyte` json files.\",\"base_classes\":[\"Document\"],\"display_name\":\"AirbyteJSONLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"AirbyteJSONLoader-JhQtx\"},\"id\":\"AirbyteJSONLoader-JhQtx\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:44:49.587337\",\"folder\":null,\"id\":\"07c52ad7-ca52-4cdd-ab40-74a91dbad0dd\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"AirbyteJSONLoader (4)\",\"description\":\"Load local `Airbyte` json files.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AirbyteJSONLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".json\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"\":\"\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"AirbyteJSONLoader\"},\"description\":\"Load local `Airbyte` json files.\",\"base_classes\":[\"Document\"],\"display_name\":\"AirbyteJSONLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"AirbyteJSONLoader-bIvDc\"},\"id\":\"AirbyteJSONLoader-bIvDc\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:45:13.458500\",\"folder\":null,\"id\":\"53e4d256-3653-444b-b8e0-fb97b3ae5c7e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"AirbyteJSONLoader (5)\",\"description\":\"Load local `Airbyte` json files.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AirbyteJSONLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".json\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"\":\"\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"AirbyteJSONLoader\"},\"description\":\"Load local `Airbyte` json files.\",\"base_classes\":[\"Document\"],\"display_name\":\"AirbyteJSONLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"AirbyteJSONLoader-2BLS8\"},\"id\":\"AirbyteJSONLoader-2BLS8\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:45:44.461699\",\"folder\":null,\"id\":\"b5158cc1-c6b8-4e25-907a-bc7e7637aa38\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Amazon Bedrock Embeddings (4)\",\"description\":\"Embeddings model from Amazon Bedrock.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AmazonBedrockEmbeddings\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.embeddings import BedrockEmbeddings\\nfrom langchain.embeddings.base import Embeddings\\nfrom langflow import CustomComponent\\n\\n\\nclass AmazonBedrockEmeddingsComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing an Embeddings Model using Amazon Bedrock.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"Amazon Bedrock Embeddings\\\"\\n description: str = \\\"Embeddings model from Amazon Bedrock.\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\\\"\\n beta = True\\n\\n def build_config(self):\\n return {\\n \\\"model_id\\\": {\\n \\\"display_name\\\": \\\"Model Id\\\",\\n \\\"options\\\": [\\\"amazon.titan-embed-text-v1\\\"],\\n },\\n \\\"credentials_profile_name\\\": {\\\"display_name\\\": \\\"Credentials Profile Name\\\"},\\n \\\"endpoint_url\\\": {\\\"display_name\\\": \\\"Bedrock Endpoint URL\\\"},\\n \\\"region_name\\\": {\\\"display_name\\\": \\\"AWS Region\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model_id: str = \\\"amazon.titan-embed-text-v1\\\",\\n credentials_profile_name: Optional[str] = None,\\n endpoint_url: Optional[str] = None,\\n region_name: Optional[str] = None,\\n ) -> Embeddings:\\n try:\\n output = BedrockEmbeddings(\\n credentials_profile_name=credentials_profile_name,\\n model_id=model_id,\\n endpoint_url=endpoint_url,\\n region_name=region_name,\\n ) # type: ignore\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to AmazonBedrock API.\\\") from e\\n return output\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"credentials_profile_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"credentials_profile_name\",\"display_name\":\"Credentials Profile Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"endpoint_url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"endpoint_url\",\"display_name\":\"Bedrock Endpoint URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"amazon.titan-embed-text-v1\",\"password\":false,\"options\":[\"amazon.titan-embed-text-v1\"],\"name\":\"model_id\",\"display_name\":\"Model Id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"region_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"region_name\",\"display_name\":\"AWS Region\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"Embeddings model from Amazon Bedrock.\",\"base_classes\":[\"Embeddings\"],\"display_name\":\"Amazon Bedrock Embeddings\",\"custom_fields\":{\"credentials_profile_name\":null,\"endpoint_url\":null,\"model_id\":null,\"region_name\":null},\"output_types\":[\"AmazonBedrockEmbeddings\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"AmazonBedrockEmbeddings-NsBzN\"},\"id\":\"AmazonBedrockEmbeddings-NsBzN\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:50:29.791575\",\"folder\":null,\"id\":\"3fb77f72-1be7-4ce0-ae89-a82b0eee9acf\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Evil Golick\",\"description\":\"Where Language Meets Logic.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:19:02.707854\",\"folder\":null,\"id\":\"9c5d21c2-ea82-4cf4-a591-c3d3fd3f13e6\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Gleeful Davinci (1)\",\"description\":\"Unleashing Linguistic Creativity.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:19:02.761150\",\"folder\":null,\"id\":\"f8e2e16e-129c-4116-9626-2c6b524d97b7\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Effervescent Degrasse\",\"description\":\"Language Models, Mapped and Mastered.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:19:02.764440\",\"folder\":null,\"id\":\"d7f4f7b5-effe-4834-8cab-4f2fc4f6e9de\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Prickly Archimedes\",\"description\":\"Language Chainlink Master.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:19:02.767546\",\"folder\":null,\"id\":\"2265d813-4a87-410f-b06a-65e35db8219e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Boring Heyrovsky\",\"description\":\"Building Intelligent Interactions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:19:02.765105\",\"folder\":null,\"id\":\"10bfd881-e67e-4c33-965d-ee041695edce\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Nostalgic Carroll\",\"description\":\"Unfolding Linguistic Possibilities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:19:02.811794\",\"folder\":null,\"id\":\"63fa5653-4513-47f2-8dfa-baeb1d981f93\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Pensive Perlman\",\"description\":\"Empowering Communication, Enabling Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:19:02.814993\",\"folder\":null,\"id\":\"f4bc5945-20d9-4703-a5bb-6a4a3ef7fc8e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Hilarious Stallman\",\"description\":\"Connect the Dots, Craft Language.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:19:02.813144\",\"folder\":null,\"id\":\"8d8b80f9-4b74-4cd5-bc5c-08a32c4d2b95\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Awesome Einstein\",\"description\":\"The Power of Language at Your Fingertips.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:20:19.518262\",\"folder\":null,\"id\":\"21808fd7-a492-4e29-8863-301c2785f542\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Funky Swanson\",\"description\":\"Sculpting Language with Precision.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:20:20.185637\",\"folder\":null,\"id\":\"7752299a-4aa9-44db-8d9c-5c4a2ac1b071\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Romantic Pascal\",\"description\":\"Unlock the Power of AI in Your Business Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:20:20.223064\",\"folder\":null,\"id\":\"78f48a13-6a9f-42ce-9d58-ec9b63b381d2\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Twinkly Bartik\",\"description\":\"Sculpting Language with Precision.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:20:20.239758\",\"folder\":null,\"id\":\"38074091-3d7c-4d42-b61b-ae195c1b8a4e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Condescending Joliot\",\"description\":\"Language Models, Unleashed.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:20:20.271996\",\"folder\":null,\"id\":\"773020aa-5c2d-4632-ad9e-21276861ab93\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Gleeful Kalam\",\"description\":\"The Power of Language at Your Fingertips.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:20:20.283929\",\"folder\":null,\"id\":\"0092d077-6d31-401f-a739-b164476b90ed\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Grinning Bhabha\",\"description\":\"Your Passport to Linguistic Landscapes.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:20:20.467739\",\"folder\":null,\"id\":\"4a395211-712d-4dd2-b3bb-e6b19200f15d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Mirthful Babbage\",\"description\":\"Design Dialogues with Langflow.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:20:20.742990\",\"folder\":null,\"id\":\"3f086a82-3e29-4328-9da8-e02dc9201744\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Tiny Volta\",\"description\":\"Nurture NLP Nodes Here.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:24:18.594247\",\"folder\":null,\"id\":\"659b8289-c8e8-413d-9b2b-51e68411f170\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Radiant Jennings\",\"description\":\"Design, Develop, Dialogize.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:24:42.640309\",\"folder\":null,\"id\":\"f55f0640-d47e-4471-b1ba-3411d38ecac5\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Distracted Shaw\",\"description\":\"Interactive Language Weaving.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:24:58.376247\",\"folder\":null,\"id\":\"a039811e-449a-4244-83ed-4ddd731a0921\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Peppy Murdock (1)\",\"description\":\"Language Chainlink Master.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:25:14.674524\",\"folder\":null,\"id\":\"0faf6144-6ca6-4f64-a343-665ae54774ef\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Lively Volta\",\"description\":\"Language Models, Mapped and Mastered.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:26:50.418029\",\"folder\":null,\"id\":\"c5d149d0-c401-4f5e-b79f-2abcc7b8d98d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Bubbly Curie\",\"description\":\"Navigate the Networks of Conversation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:27:10.900899\",\"folder\":null,\"id\":\"7bb2d226-9740-433d-861f-a499632c5a13\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Radiant Nobel\",\"description\":\"Navigate the Linguistic Landscape, Discover Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:29:11.541630\",\"folder\":null,\"id\":\"947906d8-75ea-470c-a8e2-cc80c5d3bdbb\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Cheerful Northcutt\",\"description\":\"Unleashing Business Potential through Language Engineering.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:29:37.169791\",\"folder\":null,\"id\":\"56f109fd-2c18-42ed-a9b2-089a678a0c11\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Suspicious Khayyam\",\"description\":\"Crafting Dialogues that Drive Business Success.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:30:24.686359\",\"folder\":null,\"id\":\"551d7bfa-49aa-43f1-a779-e47eabc2aaf2\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Twinkly Spence\",\"description\":\"Create, Curate, Communicate with Langflow.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:30:47.738472\",\"folder\":null,\"id\":\"12aef128-130e-492e-bf84-62d4cb4cd456\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Mirthful Lavoisier\",\"description\":\"Nurture NLP Nodes Here.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:31:43.272365\",\"folder\":null,\"id\":\"9952c044-6903-4fba-a270-6ed0b90c93c9\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Upbeat Bose\",\"description\":\"Unravel the Art of Articulation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:32:23.972035\",\"folder\":null,\"id\":\"65f49582-c9fa-4c67-a2bc-4e8fa73ca731\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Tender Perlman\",\"description\":\"Maximize Impact with Intelligent Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:33:26.984083\",\"folder\":null,\"id\":\"9ae57fe2-c677-47fa-a059-7d3d915a1178\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sharp Cori\",\"description\":\"Conversation Catalyst Engine.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:33:52.377359\",\"folder\":null,\"id\":\"2920dde2-5c24-4fe0-9c06-ef86b5a16a99\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"}]" - }, - "headersSize": -1, - "bodySize": -1, - "redirectURL": "" - }, - "cache": {}, - "timings": { "send": -1, "wait": -1, "receive": 1.03 } - }, - { - "startedDateTime": "2023-12-11T18:54:58.423Z", - "time": 0.901, - "request": { - "method": "GET", - "url": "http://localhost:3000/api/v1/all", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Accept", "value": "application/json, text/plain, */*" }, - { "name": "Accept-Encoding", "value": "gzip, deflate, br" }, - { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, - { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkMjUzYmZiYS02MzY4LTQ0ZGMtODVmNy0wZDZkYTllNDU5NjgiLCJleHAiOjE3MzM4NTY4OTh9.5MFFb0JCck3ITSKXbxhwO9yAscnXcwXNTV70ZYBRB20" }, - { "name": "Connection", "value": "keep-alive" }, - { "name": "Cookie", "value": "access_token_lf=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkMjUzYmZiYS02MzY4LTQ0ZGMtODVmNy0wZDZkYTllNDU5NjgiLCJleHAiOjE3MzM4NTY4OTh9.5MFFb0JCck3ITSKXbxhwO9yAscnXcwXNTV70ZYBRB20; refresh_tkn_lflw=auto" }, - { "name": "Host", "value": "localhost:3000" }, - { "name": "Referer", "value": "http://localhost:3000/flows" }, - { "name": "Sec-Fetch-Dest", "value": "empty" }, - { "name": "Sec-Fetch-Mode", "value": "cors" }, - { "name": "Sec-Fetch-Site", "value": "same-origin" }, - { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36" }, - { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"119\", \"Not?A_Brand\";v=\"24\"" }, - { "name": "sec-ch-ua-mobile", "value": "?0" }, - { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } - ], - "queryString": [], - "headersSize": -1, - "bodySize": -1 - }, - "response": { - "status": 200, - "statusText": "OK", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Access-Control-Allow-Origin", "value": "*" }, - { "name": "connection", "value": "close" }, - { "name": "content-length", "value": "331584" }, - { "name": "content-type", "value": "application/json" }, - { "name": "date", "value": "Mon, 11 Dec 2023 18:54:58 GMT" }, - { "name": "server", "value": "uvicorn" } - ], - "content": { - "size": -1, - "mimeType": "application/json", - "text": "{\"chains\":{\"ConversationalRetrievalChain\":{\"template\":{\"callbacks\":{\"type\":\"Callbacks\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"condense_question_llm\":{\"type\":\"BaseLanguageModel\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"condense_question_llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"condense_question_prompt\":{\"type\":\"BasePromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"chat_history\",\"question\"],\"input_types\":{},\"output_parser\":null,\"partial_variables\":{},\"template\":\"Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question, in its original language.\\n\\nChat History:\\n{chat_history}\\nFollow Up Input: {question}\\nStandalone question:\",\"template_format\":\"f-string\",\"validate_template\":false},\"fileTypes\":[],\"password\":false,\"name\":\"condense_question_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"memory\":{\"type\":\"BaseChatMemory\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"retriever\":{\"type\":\"BaseRetriever\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"chain_type\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"stuff\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"stuff\",\"map_reduce\",\"map_rerank\",\"refine\"],\"name\":\"chain_type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"combine_docs_chain_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"combine_docs_chain_kwargs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"return_source_documents\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_source_documents\",\"display_name\":\"Return source documents\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"verbose\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"_type\":\"ConversationalRetrievalChain\"},\"description\":\"Convenience method to load chain from LLM and retriever.\",\"base_classes\":[\"BaseConversationalRetrievalChain\",\"ConversationalRetrievalChain\",\"Chain\",\"Callable\"],\"display_name\":\"ConversationalRetrievalChain\",\"documentation\":\"https://python.langchain.com/docs/modules/chains/popular/chat_vector_db\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"LLMCheckerChain\":{\"template\":{\"check_assertions_prompt\":{\"type\":\"PromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"assertions\"],\"input_types\":{},\"output_parser\":null,\"partial_variables\":{},\"template\":\"Here is a bullet point list of assertions:\\n{assertions}\\nFor each assertion, determine whether it is true or false. If it is false, explain why.\\n\\n\",\"template_format\":\"f-string\",\"validate_template\":false},\"fileTypes\":[],\"password\":false,\"name\":\"check_assertions_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"create_draft_answer_prompt\":{\"type\":\"PromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"question\"],\"input_types\":{},\"output_parser\":null,\"partial_variables\":{},\"template\":\"{question}\\n\\n\",\"template_format\":\"f-string\",\"validate_template\":false},\"fileTypes\":[],\"password\":false,\"name\":\"create_draft_answer_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"list_assertions_prompt\":{\"type\":\"PromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"statement\"],\"input_types\":{},\"output_parser\":null,\"partial_variables\":{},\"template\":\"Here is a statement:\\n{statement}\\nMake a bullet point list of the assumptions you made when producing the above statement.\\n\\n\",\"template_format\":\"f-string\",\"validate_template\":false},\"fileTypes\":[],\"password\":false,\"name\":\"list_assertions_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"revised_answer_prompt\":{\"type\":\"PromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"checked_assertions\",\"question\"],\"input_types\":{},\"output_parser\":null,\"partial_variables\":{},\"template\":\"{checked_assertions}\\n\\nQuestion: In light of the above assertions and checks, how would you answer the question '{question}'?\\n\\nAnswer:\",\"template_format\":\"f-string\",\"validate_template\":false},\"fileTypes\":[],\"password\":false,\"name\":\"revised_answer_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"LLMCheckerChain\"},\"description\":\"\",\"base_classes\":[\"Chain\",\"LLMCheckerChain\",\"Callable\"],\"display_name\":\"LLMCheckerChain\",\"documentation\":\"https://python.langchain.com/docs/modules/chains/additional/llm_checker\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"LLMMathChain\":{\"template\":{\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"llm_chain\":{\"type\":\"LLMChain\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"llm_chain\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"memory\":{\"type\":\"BaseMemory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"prompt\":{\"type\":\"BasePromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"question\"],\"input_types\":{},\"output_parser\":null,\"partial_variables\":{},\"template\":\"Translate a math problem into a expression that can be executed using Python's numexpr library. Use the output of running this code to answer the question.\\n\\nQuestion: ${{Question with math problem.}}\\n```text\\n${{single line mathematical expression that solves the problem}}\\n```\\n...numexpr.evaluate(text)...\\n```output\\n${{Output of running the code}}\\n```\\nAnswer: ${{Answer}}\\n\\nBegin.\\n\\nQuestion: What is 37593 * 67?\\n```text\\n37593 * 67\\n```\\n...numexpr.evaluate(\\\"37593 * 67\\\")...\\n```output\\n2518731\\n```\\nAnswer: 2518731\\n\\nQuestion: 37593^(1/5)\\n```text\\n37593**(1/5)\\n```\\n...numexpr.evaluate(\\\"37593**(1/5)\\\")...\\n```output\\n8.222831614237718\\n```\\nAnswer: 8.222831614237718\\n\\nQuestion: {question}\\n\",\"template_format\":\"f-string\",\"validate_template\":false},\"fileTypes\":[],\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"input_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"question\",\"fileTypes\":[],\"password\":false,\"name\":\"input_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"output_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"answer\",\"fileTypes\":[],\"password\":false,\"name\":\"output_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"verbose\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"_type\":\"LLMMathChain\"},\"description\":\"Chain that interprets a prompt and executes python code to do math.\",\"base_classes\":[\"Chain\",\"LLMMathChain\",\"Callable\"],\"display_name\":\"LLMMathChain\",\"documentation\":\"https://python.langchain.com/docs/modules/chains/additional/llm_math\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"RetrievalQA\":{\"template\":{\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"combine_documents_chain\":{\"type\":\"BaseCombineDocumentsChain\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"combine_documents_chain\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"memory\":{\"type\":\"BaseMemory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"retriever\":{\"type\":\"BaseRetriever\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"input_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"query\",\"fileTypes\":[],\"password\":false,\"name\":\"input_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"output_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"result\",\"fileTypes\":[],\"password\":false,\"name\":\"output_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"return_source_documents\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"password\":false,\"name\":\"return_source_documents\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"verbose\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"_type\":\"RetrievalQA\"},\"description\":\"Chain for question-answering against an index.\",\"base_classes\":[\"RetrievalQA\",\"BaseRetrievalQA\",\"Chain\",\"Callable\"],\"display_name\":\"RetrievalQA\",\"documentation\":\"https://python.langchain.com/docs/modules/chains/popular/vector_db_qa\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"RetrievalQAWithSourcesChain\":{\"template\":{\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"combine_documents_chain\":{\"type\":\"BaseCombineDocumentsChain\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"combine_documents_chain\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"memory\":{\"type\":\"BaseMemory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"retriever\":{\"type\":\"BaseRetriever\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"answer_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"answer\",\"fileTypes\":[],\"password\":false,\"name\":\"answer_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"input_docs_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"docs\",\"fileTypes\":[],\"password\":false,\"name\":\"input_docs_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"max_tokens_limit\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":3375,\"fileTypes\":[],\"password\":false,\"name\":\"max_tokens_limit\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"question_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"question\",\"fileTypes\":[],\"password\":false,\"name\":\"question_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"reduce_k_below_max_tokens\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"reduce_k_below_max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"return_source_documents\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"password\":false,\"name\":\"return_source_documents\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"sources_answer_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"sources\",\"fileTypes\":[],\"password\":false,\"name\":\"sources_answer_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"verbose\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"_type\":\"RetrievalQAWithSourcesChain\"},\"description\":\"Question-answering with sources over an index.\",\"base_classes\":[\"RetrievalQAWithSourcesChain\",\"BaseQAWithSourcesChain\",\"Chain\",\"Callable\"],\"display_name\":\"RetrievalQAWithSourcesChain\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"SQLDatabaseChain\":{\"template\":{\"db\":{\"type\":\"SQLDatabase\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"db\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"prompt\":{\"type\":\"BasePromptTemplate\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"SQLDatabaseChain\"},\"description\":\"Create a SQLDatabaseChain from an LLM and a database connection.\",\"base_classes\":[\"SQLDatabaseChain\",\"Chain\",\"Callable\"],\"display_name\":\"SQLDatabaseChain\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"CombineDocsChain\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"chain_type\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"stuff\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"stuff\",\"map_reduce\",\"map_rerank\",\"refine\"],\"name\":\"chain_type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"load_qa_chain\"},\"description\":\"Load question answering chain.\",\"base_classes\":[\"BaseCombineDocumentsChain\",\"Callable\"],\"display_name\":\"CombineDocsChain\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"SeriesCharacterChain\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"character\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"character\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"series\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"series\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"SeriesCharacterChain\"},\"description\":\"SeriesCharacterChain is a chain you can use to have a conversation with a character from a series.\",\"base_classes\":[\"LLMChain\",\"BaseCustomChain\",\"Chain\",\"ConversationChain\",\"SeriesCharacterChain\",\"Callable\"],\"display_name\":\"SeriesCharacterChain\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"MidJourneyPromptChain\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"memory\":{\"type\":\"BaseChatMemory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"MidJourneyPromptChain\"},\"description\":\"MidJourneyPromptChain is a chain you can use to generate new MidJourney prompts.\",\"base_classes\":[\"LLMChain\",\"BaseCustomChain\",\"Chain\",\"ConversationChain\",\"MidJourneyPromptChain\"],\"display_name\":\"MidJourneyPromptChain\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"TimeTravelGuideChain\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"memory\":{\"type\":\"BaseChatMemory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"TimeTravelGuideChain\"},\"description\":\"Time travel guide chain.\",\"base_classes\":[\"LLMChain\",\"BaseCustomChain\",\"TimeTravelGuideChain\",\"Chain\",\"ConversationChain\"],\"display_name\":\"TimeTravelGuideChain\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"LLMChain\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"memory\":{\"type\":\"BaseMemory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"prompt\":{\"type\":\"BasePromptTemplate\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Callable, Optional, Union\\n\\nfrom langchain.chains import LLMChain\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import (\\n BaseLanguageModel,\\n BaseMemory,\\n BasePromptTemplate,\\n Chain,\\n)\\n\\n\\nclass LLMChainComponent(CustomComponent):\\n display_name = \\\"LLMChain\\\"\\n description = \\\"Chain to run queries against LLMs\\\"\\n\\n def build_config(self):\\n return {\\n \\\"prompt\\\": {\\\"display_name\\\": \\\"Prompt\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n prompt: BasePromptTemplate,\\n llm: BaseLanguageModel,\\n memory: Optional[BaseMemory] = None,\\n ) -> Union[Chain, Callable]:\\n return LLMChain(prompt=prompt, llm=llm, memory=memory)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"_type\":\"CustomComponent\"},\"description\":\"Chain to run queries against LLMs\",\"base_classes\":[\"Chain\",\"Callable\"],\"display_name\":\"LLMChain\",\"documentation\":\"\",\"custom_fields\":{\"llm\":null,\"memory\":null,\"prompt\":null},\"output_types\":[\"LLMChain\"],\"field_formatters\":{},\"beta\":true},\"ConversationChain\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"memory\":{\"type\":\"BaseMemory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"Memory to load context from. If none is provided, a ConversationBufferMemory will be used.\"},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langchain.chains import ConversationChain\\nfrom typing import Optional, Union, Callable\\nfrom langflow.field_typing import BaseLanguageModel, BaseMemory, Chain\\n\\n\\nclass ConversationChainComponent(CustomComponent):\\n display_name = \\\"ConversationChain\\\"\\n description = \\\"Chain to have a conversation and load context from memory.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"prompt\\\": {\\\"display_name\\\": \\\"Prompt\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"memory\\\": {\\n \\\"display_name\\\": \\\"Memory\\\",\\n \\\"info\\\": \\\"Memory to load context from. If none is provided, a ConversationBufferMemory will be used.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n llm: BaseLanguageModel,\\n memory: Optional[BaseMemory] = None,\\n ) -> Union[Chain, Callable]:\\n if memory is None:\\n return ConversationChain(llm=llm)\\n return ConversationChain(llm=llm, memory=memory)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"_type\":\"CustomComponent\"},\"description\":\"Chain to have a conversation and load context from memory.\",\"base_classes\":[\"Chain\",\"Callable\"],\"display_name\":\"ConversationChain\",\"documentation\":\"\",\"custom_fields\":{\"llm\":null,\"memory\":null},\"output_types\":[\"ConversationChain\"],\"field_formatters\":{},\"beta\":true},\"PromptRunner\":{\"template\":{\"llm\":{\"type\":\"BaseLLM\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"prompt\":{\"type\":\"PromptTemplate\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt Template\",\"advanced\":false,\"dynamic\":false,\"info\":\"Make sure the prompt has all variables filled.\"},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\n\\nfrom langchain.llms.base import BaseLLM\\nfrom langchain.prompts import PromptTemplate\\nfrom langchain.schema import Document\\n\\n\\nclass PromptRunner(CustomComponent):\\n display_name: str = \\\"Prompt Runner\\\"\\n description: str = \\\"Run a Chain with the given PromptTemplate\\\"\\n beta: bool = True\\n field_config = {\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"prompt\\\": {\\n \\\"display_name\\\": \\\"Prompt Template\\\",\\n \\\"info\\\": \\\"Make sure the prompt has all variables filled.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(self, llm: BaseLLM, prompt: PromptTemplate, inputs: dict = {}) -> Document:\\n chain = prompt | llm\\n # The input is an empty dict because the prompt is already filled\\n result = chain.invoke(input=inputs)\\n if hasattr(result, \\\"content\\\"):\\n result = result.content\\n self.repr_value = result\\n return Document(page_content=str(result))\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"inputs\":{\"type\":\"dict\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"inputs\",\"display_name\":\"inputs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CustomComponent\"},\"description\":\"Run a Chain with the given PromptTemplate\",\"base_classes\":[\"Document\"],\"display_name\":\"Prompt Runner\",\"documentation\":\"\",\"custom_fields\":{\"inputs\":null,\"llm\":null,\"prompt\":null},\"output_types\":[\"PromptRunner\"],\"field_formatters\":{},\"beta\":true}},\"agents\":{\"ZeroShotAgent\":{\"template\":{\"callback_manager\":{\"type\":\"BaseCallbackManager\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"callback_manager\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"output_parser\":{\"type\":\"AgentOutputParser\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tools\":{\"type\":\"BaseTool\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"tools\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"format_instructions\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":true,\"value\":\"Use the following format:\\n\\nQuestion: the input question you must answer\\nThought: you should always think about what to do\\nAction: the action to take, should be one of [{tool_names}]\\nAction Input: the input to the action\\nObservation: the result of the action\\n... (this Thought/Action/Action Input/Observation can repeat N times)\\nThought: I now know the final answer\\nFinal Answer: the final answer to the original input question\",\"fileTypes\":[],\"password\":false,\"name\":\"format_instructions\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"input_variables\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"Answer the following questions as best you can. You have access to the following tools:\",\"fileTypes\":[],\"password\":false,\"name\":\"prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"suffix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"Begin!\\n\\nQuestion: {input}\\nThought:{agent_scratchpad}\",\"fileTypes\":[],\"password\":false,\"name\":\"suffix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"ZeroShotAgent\"},\"description\":\"Construct an agent from an LLM and tools.\",\"base_classes\":[\"ZeroShotAgent\",\"BaseSingleActionAgent\",\"Agent\",\"Callable\"],\"display_name\":\"ZeroShotAgent\",\"documentation\":\"https://python.langchain.com/docs/modules/agents/how_to/custom_mrkl_agent\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"JsonAgent\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"toolkit\":{\"type\":\"BaseToolkit\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"toolkit\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"json_agent\"},\"description\":\"Construct a json agent from an LLM and tools.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"JsonAgent\",\"documentation\":\"https://python.langchain.com/docs/modules/agents/toolkits/openapi\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"CSVAgent\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".csv\"],\"file_path\":\"\",\"password\":false,\"name\":\"path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"csv_agent\"},\"description\":\"Construct a CSV agent from a CSV and tools.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"CSVAgent\",\"documentation\":\"https://python.langchain.com/docs/modules/agents/toolkits/csv\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"VectorStoreAgent\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"vectorstoreinfo\":{\"type\":\"VectorStoreInfo\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"vectorstoreinfo\",\"display_name\":\"Vector Store Info\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"vectorstore_agent\"},\"description\":\"Construct an agent from a Vector Store.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"VectorStoreAgent\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"VectorStoreRouterAgent\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"vectorstoreroutertoolkit\":{\"type\":\"VectorStoreRouterToolkit\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"vectorstoreroutertoolkit\",\"display_name\":\"Vector Store Router Toolkit\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"vectorstorerouter_agent\"},\"description\":\"Construct an agent from a Vector Store Router.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"VectorStoreRouterAgent\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"SQLAgent\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"database_uri\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"database_uri\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"sql_agent\"},\"description\":\"Construct an SQL agent from an LLM and tools.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"SQLAgent\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"AgentInitializer\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"Language Model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"memory\":{\"type\":\"BaseChatMemory\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tools\":{\"type\":\"Tool\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tools\",\"display_name\":\"Tools\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"agent\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"zero-shot-react-description\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"zero-shot-react-description\",\"react-docstore\",\"self-ask-with-search\",\"conversational-react-description\",\"chat-zero-shot-react-description\",\"chat-conversational-react-description\",\"structured-chat-zero-shot-react-description\",\"openai-functions\",\"openai-multi-functions\",\"JsonAgent\",\"CSVAgent\",\"AgentInitializer\",\"VectorStoreAgent\",\"VectorStoreRouterAgent\",\"SQLAgent\"],\"name\":\"agent\",\"display_name\":\"Agent Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Callable, List, Union\\n\\nfrom langchain.agents import AgentExecutor, AgentType, initialize_agent, types\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import BaseChatMemory, BaseLanguageModel, Tool\\n\\n\\nclass AgentInitializerComponent(CustomComponent):\\n display_name: str = \\\"Agent Initializer\\\"\\n description: str = \\\"Initialize a Langchain Agent.\\\"\\n documentation: str = \\\"https://python.langchain.com/docs/modules/agents/agent_types/\\\"\\n\\n def build_config(self):\\n agents = list(types.AGENT_TO_CLASS.keys())\\n # field_type and required are optional\\n return {\\n \\\"agent\\\": {\\\"options\\\": agents, \\\"value\\\": agents[0], \\\"display_name\\\": \\\"Agent Type\\\"},\\n \\\"max_iterations\\\": {\\\"display_name\\\": \\\"Max Iterations\\\", \\\"value\\\": 10},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"tools\\\": {\\\"display_name\\\": \\\"Tools\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"Language Model\\\"},\\n }\\n\\n def build(\\n self, agent: str, llm: BaseLanguageModel, memory: BaseChatMemory, tools: List[Tool], max_iterations: int\\n ) -> Union[AgentExecutor, Callable]:\\n agent = AgentType(agent)\\n return initialize_agent(\\n tools=tools,\\n llm=llm,\\n agent=agent,\\n memory=memory,\\n return_intermediate_steps=True,\\n handle_parsing_errors=True,\\n max_iterations=max_iterations,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"max_iterations\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":10,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_iterations\",\"display_name\":\"Max Iterations\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CustomComponent\"},\"description\":\"Initialize a Langchain Agent.\",\"base_classes\":[\"AgentExecutor\",\"Chain\",\"Callable\"],\"display_name\":\"Agent Initializer\",\"documentation\":\"https://python.langchain.com/docs/modules/agents/agent_types/\",\"custom_fields\":{\"agent\":null,\"llm\":null,\"max_iterations\":null,\"memory\":null,\"tools\":null},\"output_types\":[\"AgentInitializer\"],\"field_formatters\":{},\"beta\":true},\"OpenAIConversationalAgent\":{\"template\":{\"memory\":{\"type\":\"BaseMemory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"system_message\":{\"type\":\"SystemMessagePromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"system_message\",\"display_name\":\"System Message\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tools\":{\"type\":\"Tool\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tools\",\"display_name\":\"Tools\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import List, Optional\\n\\nfrom langchain.agents.agent import AgentExecutor\\nfrom langchain.agents.agent_toolkits.conversational_retrieval.openai_functions import _get_default_system_message\\nfrom langchain.agents.openai_functions_agent.base import OpenAIFunctionsAgent\\nfrom langchain.chat_models import ChatOpenAI\\nfrom langchain.memory.token_buffer import ConversationTokenBufferMemory\\nfrom langchain.prompts import SystemMessagePromptTemplate\\nfrom langchain.prompts.chat import MessagesPlaceholder\\nfrom langchain.schema.memory import BaseMemory\\nfrom langchain.tools import Tool\\nfrom langflow import CustomComponent\\n\\n\\nclass ConversationalAgent(CustomComponent):\\n display_name: str = \\\"OpenAI Conversational Agent\\\"\\n description: str = \\\"Conversational Agent that can use OpenAI's function calling API\\\"\\n\\n def build_config(self):\\n openai_function_models = [\\n \\\"gpt-4-1106-preview\\\",\\n \\\"gpt-3.5-turbo\\\",\\n \\\"gpt-3.5-turbo-16k\\\",\\n \\\"gpt-4\\\",\\n \\\"gpt-4-32k\\\",\\n ]\\n return {\\n \\\"tools\\\": {\\\"display_name\\\": \\\"Tools\\\"},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"system_message\\\": {\\\"display_name\\\": \\\"System Message\\\"},\\n \\\"max_token_limit\\\": {\\\"display_name\\\": \\\"Max Token Limit\\\"},\\n \\\"model_name\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"options\\\": openai_function_models,\\n \\\"value\\\": openai_function_models[0],\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model_name: str,\\n openai_api_key: str,\\n tools: List[Tool],\\n openai_api_base: Optional[str] = None,\\n memory: Optional[BaseMemory] = None,\\n system_message: Optional[SystemMessagePromptTemplate] = None,\\n max_token_limit: int = 2000,\\n ) -> AgentExecutor:\\n llm = ChatOpenAI(\\n model=model_name,\\n api_key=openai_api_key,\\n base_url=openai_api_base,\\n )\\n if not memory:\\n memory_key = \\\"chat_history\\\"\\n memory = ConversationTokenBufferMemory(\\n memory_key=memory_key,\\n return_messages=True,\\n output_key=\\\"output\\\",\\n llm=llm,\\n max_token_limit=max_token_limit,\\n )\\n else:\\n memory_key = memory.memory_key # type: ignore\\n\\n _system_message = system_message or _get_default_system_message()\\n prompt = OpenAIFunctionsAgent.create_prompt(\\n system_message=_system_message, # type: ignore\\n extra_prompt_messages=[MessagesPlaceholder(variable_name=memory_key)],\\n )\\n agent = OpenAIFunctionsAgent(\\n llm=llm,\\n tools=tools,\\n prompt=prompt, # type: ignore\\n )\\n return AgentExecutor(\\n agent=agent,\\n tools=tools, # type: ignore\\n memory=memory,\\n verbose=True,\\n return_intermediate_steps=True,\\n handle_parsing_errors=True,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"max_token_limit\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":2000,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"max_token_limit\",\"display_name\":\"Max Token Limit\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"model_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"gpt-4-1106-preview\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"gpt-4-1106-preview\",\"gpt-3.5-turbo\",\"gpt-3.5-turbo-16k\",\"gpt-4\",\"gpt-4-32k\"],\"name\":\"model_name\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"openai_api_base\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"openai_api_base\",\"display_name\":\"openai_api_base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"openai_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"openai_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CustomComponent\"},\"description\":\"Conversational Agent that can use OpenAI's function calling API\",\"base_classes\":[\"AgentExecutor\",\"Chain\"],\"display_name\":\"OpenAI Conversational Agent\",\"documentation\":\"\",\"custom_fields\":{\"max_token_limit\":null,\"memory\":null,\"model_name\":null,\"openai_api_base\":null,\"openai_api_key\":null,\"system_message\":null,\"tools\":null},\"output_types\":[\"OpenAIConversationalAgent\"],\"field_formatters\":{},\"beta\":true}},\"prompts\":{\"ChatMessagePromptTemplate\":{\"template\":{\"additional_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"additional_kwargs\",\"advanced\":true,\"dynamic\":true,\"info\":\"\"},\"prompt\":{\"type\":\"prompt\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"\\nYou are a helpful assistant that talks casually about life in general.\\nYou are a good listener and you can talk about anything.\\n\",\"fileTypes\":[],\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"role\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"role\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"_type\":\"ChatMessagePromptTemplate\"},\"description\":\"Chat message prompt template.\",\"base_classes\":[\"BaseStringMessagePromptTemplate\",\"ChatMessagePromptTemplate\",\"BaseMessagePromptTemplate\"],\"display_name\":\"ChatMessagePromptTemplate\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/msg_prompt_templates\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"ChatPromptTemplate\":{\"template\":{\"messages\":{\"type\":\"BaseMessagePromptTemplate\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"messages\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"output_parser\":{\"type\":\"BaseOutputParser\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"input_types\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"input_types\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"input_variables\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"partial_variables\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"partial_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"validate_template\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"validate_template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"_type\":\"ChatPromptTemplate\"},\"description\":\"A prompt template for chat models.\",\"base_classes\":[\"BaseChatPromptTemplate\",\"BasePromptTemplate\",\"ChatPromptTemplate\"],\"display_name\":\"ChatPromptTemplate\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/how_to/prompts\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"HumanMessagePromptTemplate\":{\"template\":{\"additional_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"additional_kwargs\",\"advanced\":true,\"dynamic\":true,\"info\":\"\"},\"prompt\":{\"type\":\"prompt\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"\\nYou are a helpful assistant that talks casually about life in general.\\nYou are a good listener and you can talk about anything.\\n\",\"fileTypes\":[],\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"_type\":\"HumanMessagePromptTemplate\"},\"description\":\"Human message prompt template. This is a message sent from the user.\",\"base_classes\":[\"BaseStringMessagePromptTemplate\",\"HumanMessagePromptTemplate\",\"BaseMessagePromptTemplate\"],\"display_name\":\"HumanMessagePromptTemplate\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/how_to/prompts\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"PromptTemplate\":{\"template\":{\"output_parser\":{\"type\":\"BaseOutputParser\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"input_types\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"input_types\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"input_variables\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"partial_variables\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"partial_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"template\":{\"type\":\"prompt\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"fileTypes\":[],\"password\":false,\"name\":\"template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"template_format\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"f-string\",\"fileTypes\":[],\"password\":false,\"name\":\"template_format\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"validate_template\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"validate_template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"_type\":\"PromptTemplate\"},\"description\":\"A prompt template for a language model.\",\"base_classes\":[\"BasePromptTemplate\",\"PromptTemplate\",\"StringPromptTemplate\"],\"display_name\":\"PromptTemplate\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"SystemMessagePromptTemplate\":{\"template\":{\"additional_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"additional_kwargs\",\"advanced\":true,\"dynamic\":true,\"info\":\"\"},\"prompt\":{\"type\":\"prompt\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"\\nYou are a helpful assistant that talks casually about life in general.\\nYou are a good listener and you can talk about anything.\\n\",\"fileTypes\":[],\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"_type\":\"SystemMessagePromptTemplate\"},\"description\":\"System message prompt template.\",\"base_classes\":[\"BaseStringMessagePromptTemplate\",\"SystemMessagePromptTemplate\",\"BaseMessagePromptTemplate\"],\"display_name\":\"SystemMessagePromptTemplate\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/how_to/prompts\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false}},\"llms\":{\"Anthropic\":{\"template\":{\"anthropic_api_key\":{\"type\":\"SecretStr\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":true,\"name\":\"anthropic_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"count_tokens\":{\"type\":\"Callable[[str], int]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"count_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"AI_PROMPT\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"AI_PROMPT\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"HUMAN_PROMPT\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"HUMAN_PROMPT\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"anthropic_api_url\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"anthropic_api_url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"async_client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"cache\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"default_request_timeout\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"default_request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"max_tokens_to_sample\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":256,\"fileTypes\":[],\"password\":false,\"name\":\"max_tokens_to_sample\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"model\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"claude-2\",\"fileTypes\":[],\"password\":false,\"name\":\"model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"model_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"streaming\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"top_k\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"top_k\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"top_p\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"top_p\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"verbose\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"Anthropic\"},\"description\":\"Anthropic large language models.\",\"base_classes\":[\"LLM\",\"BaseLanguageModel\",\"_AnthropicCommon\",\"BaseLLM\",\"Anthropic\"],\"display_name\":\"Anthropic\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"Cohere\":{\"template\":{\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"async_client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"cache\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"cohere_api_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":true,\"name\":\"cohere_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"frequency_penalty\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":0.0,\"fileTypes\":[],\"password\":false,\"name\":\"frequency_penalty\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"k\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":0,\"fileTypes\":[],\"password\":false,\"name\":\"k\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"max_retries\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":10,\"fileTypes\":[],\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"max_tokens\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":256,\"fileTypes\":[],\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"model\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"p\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":1,\"fileTypes\":[],\"password\":false,\"name\":\"p\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"presence_penalty\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":0.0,\"fileTypes\":[],\"password\":false,\"name\":\"presence_penalty\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"stop\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"stop\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"streaming\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.75,\"fileTypes\":[],\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"truncate\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"truncate\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"user_agent\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"langchain\",\"fileTypes\":[],\"password\":false,\"name\":\"user_agent\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"verbose\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"Cohere\"},\"description\":\"Cohere large language models.\",\"base_classes\":[\"LLM\",\"BaseLanguageModel\",\"Cohere\",\"BaseLLM\",\"BaseCohere\"],\"display_name\":\"Cohere\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/cohere\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"CTransformers\":{\"template\":{\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"cache\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"config\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"{\\n \\\"top_k\\\": 40,\\n \\\"top_p\\\": 0.95,\\n \\\"temperature\\\": 0.8,\\n \\\"repetition_penalty\\\": 1.1,\\n \\\"last_n_tokens\\\": 64,\\n \\\"seed\\\": -1,\\n \\\"max_new_tokens\\\": 256,\\n \\\"stop\\\": null,\\n \\\"stream\\\": false,\\n \\\"reset\\\": true,\\n \\\"batch_size\\\": 8,\\n \\\"threads\\\": -1,\\n \\\"context_length\\\": -1,\\n \\\"gpu_layers\\\": 0\\n}\",\"fileTypes\":[],\"password\":false,\"name\":\"config\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"lib\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"lib\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"model_file\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"model_file\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"model_type\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"model_type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"verbose\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CTransformers\"},\"description\":\"C Transformers LLM models.\",\"base_classes\":[\"LLM\",\"BaseLanguageModel\",\"BaseLLM\",\"CTransformers\"],\"display_name\":\"CTransformers\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/ctransformers\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"HuggingFaceHub\":{\"template\":{\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"cache\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"huggingfacehub_api_token\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":true,\"name\":\"huggingfacehub_api_token\",\"display_name\":\"HuggingFace Hub API Token\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"model_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"repo_id\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"gpt2\",\"fileTypes\":[],\"password\":false,\"name\":\"repo_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"task\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"text-generation\",\"fileTypes\":[],\"password\":false,\"options\":[\"text-generation\",\"text2text-generation\",\"summarization\"],\"name\":\"task\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"verbose\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"HuggingFaceHub\"},\"description\":\"HuggingFaceHub models.\",\"base_classes\":[\"LLM\",\"BaseLanguageModel\",\"HuggingFaceHub\",\"BaseLLM\"],\"display_name\":\"HuggingFaceHub\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/huggingface_hub\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"LlamaCpp\":{\"template\":{\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"callbacks\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"grammar\":{\"type\":\"ForwardRef('str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"grammar\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"cache\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"cache\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"echo\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"echo\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"f16_kv\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"password\":false,\"name\":\"f16_kv\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"grammar_path\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"grammar_path\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"last_n_tokens_size\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":64,\"fileTypes\":[],\"password\":false,\"name\":\"last_n_tokens_size\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"logits_all\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"logits_all\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"logprobs\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"logprobs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"lora_base\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"lora_base\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"lora_path\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"lora_path\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"max_tokens\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":256,\"fileTypes\":[],\"password\":true,\"name\":\"max_tokens\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"metadata\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"model_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"model_path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"model_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"n_batch\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":8,\"fileTypes\":[],\"password\":false,\"name\":\"n_batch\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"n_ctx\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":512,\"fileTypes\":[],\"password\":false,\"name\":\"n_ctx\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"n_gpu_layers\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"n_gpu_layers\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"n_parts\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":-1,\"fileTypes\":[],\"password\":false,\"name\":\"n_parts\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"n_threads\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"n_threads\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"repeat_penalty\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1.1,\"fileTypes\":[],\"password\":false,\"name\":\"repeat_penalty\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"rope_freq_base\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":10000.0,\"fileTypes\":[],\"password\":false,\"name\":\"rope_freq_base\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"rope_freq_scale\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1.0,\"fileTypes\":[],\"password\":false,\"name\":\"rope_freq_scale\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"seed\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":-1,\"fileTypes\":[],\"password\":false,\"name\":\"seed\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"stop\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":[],\"fileTypes\":[],\"password\":false,\"name\":\"stop\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"streaming\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"password\":false,\"name\":\"streaming\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"suffix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"fileTypes\":[],\"password\":false,\"name\":\"suffix\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"tags\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.8,\"fileTypes\":[],\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"top_k\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":40,\"fileTypes\":[],\"password\":false,\"name\":\"top_k\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"top_p\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.95,\"fileTypes\":[],\"password\":false,\"name\":\"top_p\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"use_mlock\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"use_mlock\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"use_mmap\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"password\":false,\"name\":\"use_mmap\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"verbose\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"vocab_only\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"vocab_only\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"_type\":\"LlamaCpp\"},\"description\":\"llama.cpp model.\",\"base_classes\":[\"LLM\",\"BaseLanguageModel\",\"LlamaCpp\",\"BaseLLM\"],\"display_name\":\"LlamaCpp\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/llamacpp\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"OpenAI\":{\"template\":{\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"allowed_special\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"value\":[],\"fileTypes\":[],\"password\":false,\"name\":\"allowed_special\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"async_client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"batch_size\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":20,\"fileTypes\":[],\"password\":false,\"name\":\"batch_size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"best_of\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":1,\"fileTypes\":[],\"password\":false,\"name\":\"best_of\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"cache\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"default_headers\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"default_headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"default_query\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"default_query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"disallowed_special\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"all\",\"fileTypes\":[],\"password\":false,\"name\":\"disallowed_special\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"frequency_penalty\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":0,\"fileTypes\":[],\"password\":false,\"name\":\"frequency_penalty\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"http_client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"http_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"logit_bias\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"logit_bias\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"max_retries\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":2,\"fileTypes\":[],\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"max_tokens\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":256,\"fileTypes\":[],\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"model_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"model_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"text-davinci-003\",\"fileTypes\":[],\"password\":false,\"options\":[\"text-davinci-003\",\"text-davinci-002\",\"text-curie-001\",\"text-babbage-001\",\"text-ada-001\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"n\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":1,\"fileTypes\":[],\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"openai_api_base\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\"},\"openai_api_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"openai_organization\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"openai_proxy\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"presence_penalty\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":0,\"fileTypes\":[],\"password\":false,\"name\":\"presence_penalty\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"request_timeout\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"streaming\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.7,\"fileTypes\":[],\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"tiktoken_model_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"top_p\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":1,\"fileTypes\":[],\"password\":false,\"name\":\"top_p\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"verbose\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"OpenAI\"},\"description\":\"OpenAI large language models.\",\"base_classes\":[\"BaseLanguageModel\",\"OpenAI\",\"BaseOpenAI\",\"BaseLLM\"],\"display_name\":\"OpenAI\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"VertexAI\":{\"template\":{\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"client\":{\"type\":\"ForwardRef(\\\"'_LanguageModel'\\\")\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"client_preview\":{\"type\":\"ForwardRef(\\\"'_LanguageModel'\\\")\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"client_preview\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"credentials\":{\"type\":\"file\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".json\"],\"file_path\":\"\",\"password\":false,\"name\":\"credentials\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"cache\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"location\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"us-central1\",\"fileTypes\":[],\"password\":false,\"name\":\"location\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"max_output_tokens\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":128,\"fileTypes\":[],\"password\":false,\"name\":\"max_output_tokens\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"max_retries\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":6,\"fileTypes\":[],\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"model_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"text-bison\",\"fileTypes\":[],\"password\":false,\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"n\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1,\"fileTypes\":[],\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"project\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"project\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"request_parallelism\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":5,\"fileTypes\":[],\"password\":false,\"name\":\"request_parallelism\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"stop\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"stop\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"streaming\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.0,\"fileTypes\":[],\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"top_k\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":40,\"fileTypes\":[],\"password\":false,\"name\":\"top_k\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"top_p\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.95,\"fileTypes\":[],\"password\":false,\"name\":\"top_p\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"tuned_model_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"tuned_model_name\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"verbose\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"_type\":\"VertexAI\"},\"description\":\"Google Vertex AI large language models.\",\"base_classes\":[\"BaseLanguageModel\",\"_VertexAIBase\",\"_VertexAICommon\",\"BaseLLM\",\"VertexAI\"],\"display_name\":\"VertexAI\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/google_vertex_ai_palm\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"ChatAnthropic\":{\"template\":{\"anthropic_api_key\":{\"type\":\"SecretStr\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":true,\"name\":\"anthropic_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"count_tokens\":{\"type\":\"Callable[[str], int]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"count_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"AI_PROMPT\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"AI_PROMPT\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"HUMAN_PROMPT\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"HUMAN_PROMPT\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"anthropic_api_url\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"anthropic_api_url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"async_client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"cache\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"default_request_timeout\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"default_request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"max_tokens_to_sample\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":256,\"fileTypes\":[],\"password\":false,\"name\":\"max_tokens_to_sample\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"model\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"claude-2\",\"fileTypes\":[],\"password\":false,\"name\":\"model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"model_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"streaming\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"top_k\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"top_k\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"top_p\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"top_p\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"verbose\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"ChatAnthropic\"},\"description\":\"`Anthropic` chat large language models.\",\"base_classes\":[\"_AnthropicCommon\",\"BaseLanguageModel\",\"BaseChatModel\",\"ChatAnthropic\",\"BaseLLM\"],\"display_name\":\"ChatAnthropic\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/anthropic\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"ChatOpenAI\":{\"template\":{\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"async_client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"cache\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"default_headers\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"default_headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"default_query\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"default_query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"http_client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"http_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"max_retries\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":2,\"fileTypes\":[],\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"max_tokens\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"model_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"model_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"gpt-4-1106-preview\",\"fileTypes\":[],\"password\":false,\"options\":[\"gpt-4-1106-preview\",\"gpt-4-vision-preview\",\"gpt-4\",\"gpt-4-32k\",\"gpt-3.5-turbo\",\"gpt-3.5-turbo-16k\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"n\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":1,\"fileTypes\":[],\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"openai_api_base\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\"},\"openai_api_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"openai_organization\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"openai_proxy\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"request_timeout\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"streaming\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.7,\"fileTypes\":[],\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"tiktoken_model_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"verbose\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"ChatOpenAI\"},\"description\":\"`OpenAI` Chat large language models API.\",\"base_classes\":[\"BaseLanguageModel\",\"BaseChatModel\",\"ChatOpenAI\",\"BaseLLM\"],\"display_name\":\"ChatOpenAI\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"ChatVertexAI\":{\"template\":{\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"client\":{\"type\":\"ForwardRef(\\\"'_LanguageModel'\\\")\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"client_preview\":{\"type\":\"ForwardRef(\\\"'_LanguageModel'\\\")\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"client_preview\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"credentials\":{\"type\":\"file\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".json\"],\"file_path\":\"\",\"password\":false,\"name\":\"credentials\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"examples\":{\"type\":\"BaseMessage\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":true,\"fileTypes\":[],\"password\":false,\"name\":\"examples\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"cache\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"location\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"us-central1\",\"fileTypes\":[],\"password\":false,\"name\":\"location\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"max_output_tokens\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":128,\"fileTypes\":[],\"password\":false,\"name\":\"max_output_tokens\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"max_retries\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":6,\"fileTypes\":[],\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"model_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"chat-bison\",\"fileTypes\":[],\"password\":false,\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"n\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":1,\"fileTypes\":[],\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"project\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"project\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"request_parallelism\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":5,\"fileTypes\":[],\"password\":false,\"name\":\"request_parallelism\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"stop\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"stop\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"streaming\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.0,\"fileTypes\":[],\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"top_k\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":40,\"fileTypes\":[],\"password\":false,\"name\":\"top_k\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"top_p\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.95,\"fileTypes\":[],\"password\":false,\"name\":\"top_p\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"verbose\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"_type\":\"ChatVertexAI\"},\"description\":\"`Vertex AI` Chat large language models API.\",\"base_classes\":[\"BaseLanguageModel\",\"BaseChatModel\",\"_VertexAIBase\",\"_VertexAICommon\",\"ChatVertexAI\",\"BaseLLM\"],\"display_name\":\"ChatVertexAI\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/google_vertex_ai_palm\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"AmazonBedrock\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\nfrom langflow import CustomComponent\\nfrom langchain.llms.bedrock import Bedrock\\nfrom langchain.llms.base import BaseLLM\\n\\n\\nclass AmazonBedrockComponent(CustomComponent):\\n display_name: str = \\\"Amazon Bedrock\\\"\\n description: str = \\\"LLM model from Amazon Bedrock.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"model_id\\\": {\\n \\\"display_name\\\": \\\"Model Id\\\",\\n \\\"options\\\": [\\n \\\"ai21.j2-grande-instruct\\\",\\n \\\"ai21.j2-jumbo-instruct\\\",\\n \\\"ai21.j2-mid\\\",\\n \\\"ai21.j2-mid-v1\\\",\\n \\\"ai21.j2-ultra\\\",\\n \\\"ai21.j2-ultra-v1\\\",\\n \\\"anthropic.claude-instant-v1\\\",\\n \\\"anthropic.claude-v1\\\",\\n \\\"anthropic.claude-v2\\\",\\n \\\"cohere.command-text-v14\\\",\\n ],\\n },\\n \\\"credentials_profile_name\\\": {\\\"display_name\\\": \\\"Credentials Profile Name\\\"},\\n \\\"streaming\\\": {\\\"display_name\\\": \\\"Streaming\\\", \\\"field_type\\\": \\\"bool\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model_id: str = \\\"anthropic.claude-instant-v1\\\",\\n credentials_profile_name: Optional[str] = None,\\n ) -> BaseLLM:\\n try:\\n output = Bedrock(\\n credentials_profile_name=credentials_profile_name,\\n model_id=model_id,\\n ) # type: ignore\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to AmazonBedrock API.\\\") from e\\n return output\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"credentials_profile_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"credentials_profile_name\",\"display_name\":\"Credentials Profile Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"model_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"anthropic.claude-instant-v1\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"ai21.j2-grande-instruct\",\"ai21.j2-jumbo-instruct\",\"ai21.j2-mid\",\"ai21.j2-mid-v1\",\"ai21.j2-ultra\",\"ai21.j2-ultra-v1\",\"anthropic.claude-instant-v1\",\"anthropic.claude-v1\",\"anthropic.claude-v2\",\"cohere.command-text-v14\"],\"name\":\"model_id\",\"display_name\":\"Model Id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CustomComponent\"},\"description\":\"LLM model from Amazon Bedrock.\",\"base_classes\":[\"BaseLanguageModel\",\"BaseLLM\"],\"display_name\":\"Amazon Bedrock\",\"documentation\":\"\",\"custom_fields\":{\"credentials_profile_name\":null,\"model_id\":null},\"output_types\":[\"AmazonBedrock\"],\"field_formatters\":{},\"beta\":true},\"AnthropicLLM\":{\"template\":{\"anthropic_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"anthropic_api_key\",\"display_name\":\"Anthropic API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"Your Anthropic API key.\"},\"api_endpoint\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_endpoint\",\"display_name\":\"API Endpoint\",\"advanced\":false,\"dynamic\":false,\"info\":\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\"},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.chat_models.anthropic import ChatAnthropic\\nfrom langchain.llms.base import BaseLanguageModel\\nfrom pydantic.v1 import SecretStr\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass AnthropicLLM(CustomComponent):\\n display_name: str = \\\"AnthropicLLM\\\"\\n description: str = \\\"Anthropic Chat&Completion large language models.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"model\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"options\\\": [\\n \\\"claude-2.1\\\",\\n \\\"claude-2.0\\\",\\n \\\"claude-instant-1.2\\\",\\n \\\"claude-instant-1\\\",\\n # Add more models as needed\\n ],\\n \\\"info\\\": \\\"https://python.langchain.com/docs/integrations/chat/anthropic\\\",\\n \\\"required\\\": True,\\n \\\"value\\\": \\\"claude-2.1\\\",\\n },\\n \\\"anthropic_api_key\\\": {\\n \\\"display_name\\\": \\\"Anthropic API Key\\\",\\n \\\"required\\\": True,\\n \\\"password\\\": True,\\n \\\"info\\\": \\\"Your Anthropic API key.\\\",\\n },\\n \\\"max_tokens\\\": {\\n \\\"display_name\\\": \\\"Max Tokens\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 256,\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"value\\\": 0.7,\\n },\\n \\\"api_endpoint\\\": {\\n \\\"display_name\\\": \\\"API Endpoint\\\",\\n \\\"info\\\": \\\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model: str,\\n anthropic_api_key: Optional[str] = None,\\n max_tokens: Optional[int] = None,\\n temperature: Optional[float] = None,\\n api_endpoint: Optional[str] = None,\\n ) -> BaseLanguageModel:\\n # Set default API endpoint if not provided\\n if not api_endpoint:\\n api_endpoint = \\\"https://api.anthropic.com\\\"\\n\\n try:\\n output = ChatAnthropic(\\n model_name=model,\\n anthropic_api_key=SecretStr(anthropic_api_key) if anthropic_api_key else None,\\n max_tokens_to_sample=max_tokens, # type: ignore\\n temperature=temperature,\\n anthropic_api_url=api_endpoint,\\n )\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to Anthropic API.\\\") from e\\n return output\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"max_tokens\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":256,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"max_tokens\",\"display_name\":\"Max Tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"claude-2.1\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"claude-2.1\",\"claude-2.0\",\"claude-instant-1.2\",\"claude-instant-1\"],\"name\":\"model\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"https://python.langchain.com/docs/integrations/chat/anthropic\"},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.7,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"_type\":\"CustomComponent\"},\"description\":\"Anthropic Chat&Completion large language models.\",\"base_classes\":[\"BaseLanguageModel\"],\"display_name\":\"AnthropicLLM\",\"documentation\":\"\",\"custom_fields\":{\"anthropic_api_key\":null,\"api_endpoint\":null,\"max_tokens\":null,\"model\":null,\"temperature\":null},\"output_types\":[\"AnthropicLLM\"],\"field_formatters\":{},\"beta\":true},\"HuggingFaceEndpoints\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\nfrom langflow import CustomComponent\\nfrom langchain.llms.huggingface_endpoint import HuggingFaceEndpoint\\nfrom langchain.llms.base import BaseLLM\\n\\n\\nclass HuggingFaceEndpointsComponent(CustomComponent):\\n display_name: str = \\\"Hugging Face Inference API\\\"\\n description: str = \\\"LLM model from Hugging Face Inference API.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"endpoint_url\\\": {\\\"display_name\\\": \\\"Endpoint URL\\\", \\\"password\\\": True},\\n \\\"task\\\": {\\n \\\"display_name\\\": \\\"Task\\\",\\n \\\"options\\\": [\\\"text2text-generation\\\", \\\"text-generation\\\", \\\"summarization\\\"],\\n },\\n \\\"huggingfacehub_api_token\\\": {\\\"display_name\\\": \\\"API token\\\", \\\"password\\\": True},\\n \\\"model_kwargs\\\": {\\n \\\"display_name\\\": \\\"Model Keyword Arguments\\\",\\n \\\"field_type\\\": \\\"code\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n endpoint_url: str,\\n task: str = \\\"text2text-generation\\\",\\n huggingfacehub_api_token: Optional[str] = None,\\n model_kwargs: Optional[dict] = None,\\n ) -> BaseLLM:\\n try:\\n output = HuggingFaceEndpoint(\\n endpoint_url=endpoint_url,\\n task=task,\\n huggingfacehub_api_token=huggingfacehub_api_token,\\n model_kwargs=model_kwargs,\\n )\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to HuggingFace Endpoints API.\\\") from e\\n return output\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"endpoint_url\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"endpoint_url\",\"display_name\":\"Endpoint URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"huggingfacehub_api_token\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"huggingfacehub_api_token\",\"display_name\":\"API token\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"model_kwargs\":{\"type\":\"code\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_kwargs\",\"display_name\":\"Model Keyword Arguments\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"task\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"text2text-generation\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"text2text-generation\",\"text-generation\",\"summarization\"],\"name\":\"task\",\"display_name\":\"Task\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CustomComponent\"},\"description\":\"LLM model from Hugging Face Inference API.\",\"base_classes\":[\"BaseLanguageModel\",\"BaseLLM\"],\"display_name\":\"Hugging Face Inference API\",\"documentation\":\"\",\"custom_fields\":{\"endpoint_url\":null,\"huggingfacehub_api_token\":null,\"model_kwargs\":null,\"task\":null},\"output_types\":[\"HuggingFaceEndpoints\"],\"field_formatters\":{},\"beta\":true},\"BaiduQianfanChatEndpoints\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint\\nfrom langchain.llms.base import BaseLLM\\nfrom pydantic.v1 import SecretStr\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass QianfanChatEndpointComponent(CustomComponent):\\n display_name: str = \\\"QianfanChatEndpoint\\\"\\n description: str = (\\n \\\"Baidu Qianfan chat models. Get more detail from \\\"\\n \\\"https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint.\\\"\\n )\\n\\n def build_config(self):\\n return {\\n \\\"model\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"options\\\": [\\n \\\"ERNIE-Bot\\\",\\n \\\"ERNIE-Bot-turbo\\\",\\n \\\"BLOOMZ-7B\\\",\\n \\\"Llama-2-7b-chat\\\",\\n \\\"Llama-2-13b-chat\\\",\\n \\\"Llama-2-70b-chat\\\",\\n \\\"Qianfan-BLOOMZ-7B-compressed\\\",\\n \\\"Qianfan-Chinese-Llama-2-7B\\\",\\n \\\"ChatGLM2-6B-32K\\\",\\n \\\"AquilaChat-7B\\\",\\n ],\\n \\\"info\\\": \\\"https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint\\\",\\n \\\"required\\\": True,\\n },\\n \\\"qianfan_ak\\\": {\\n \\\"display_name\\\": \\\"Qianfan Ak\\\",\\n \\\"required\\\": True,\\n \\\"password\\\": True,\\n \\\"info\\\": \\\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\\\",\\n },\\n \\\"qianfan_sk\\\": {\\n \\\"display_name\\\": \\\"Qianfan Sk\\\",\\n \\\"required\\\": True,\\n \\\"password\\\": True,\\n \\\"info\\\": \\\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\\\",\\n },\\n \\\"top_p\\\": {\\n \\\"display_name\\\": \\\"Top p\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\\\",\\n \\\"value\\\": 0.8,\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\\\",\\n \\\"value\\\": 0.95,\\n },\\n \\\"penalty_score\\\": {\\n \\\"display_name\\\": \\\"Penalty Score\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\\\",\\n \\\"value\\\": 1.0,\\n },\\n \\\"endpoint\\\": {\\n \\\"display_name\\\": \\\"Endpoint\\\",\\n \\\"info\\\": \\\"Endpoint of the Qianfan LLM, required if custom model used.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model: str = \\\"ERNIE-Bot-turbo\\\",\\n qianfan_ak: Optional[str] = None,\\n qianfan_sk: Optional[str] = None,\\n top_p: Optional[float] = None,\\n temperature: Optional[float] = None,\\n penalty_score: Optional[float] = None,\\n endpoint: Optional[str] = None,\\n ) -> BaseLLM:\\n try:\\n output = QianfanChatEndpoint( # type: ignore\\n model=model,\\n qianfan_ak=SecretStr(qianfan_ak) if qianfan_ak else None,\\n qianfan_sk=SecretStr(qianfan_sk) if qianfan_sk else None,\\n top_p=top_p,\\n temperature=temperature,\\n penalty_score=penalty_score,\\n endpoint=endpoint,\\n )\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to Baidu Qianfan API.\\\") from e\\n return output # type: ignore\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"endpoint\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"endpoint\",\"display_name\":\"Endpoint\",\"advanced\":false,\"dynamic\":false,\"info\":\"Endpoint of the Qianfan LLM, required if custom model used.\"},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"ERNIE-Bot-turbo\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"ERNIE-Bot\",\"ERNIE-Bot-turbo\",\"BLOOMZ-7B\",\"Llama-2-7b-chat\",\"Llama-2-13b-chat\",\"Llama-2-70b-chat\",\"Qianfan-BLOOMZ-7B-compressed\",\"Qianfan-Chinese-Llama-2-7B\",\"ChatGLM2-6B-32K\",\"AquilaChat-7B\"],\"name\":\"model\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint\"},\"penalty_score\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":1.0,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"penalty_score\",\"display_name\":\"Penalty Score\",\"advanced\":false,\"dynamic\":false,\"info\":\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"qianfan_ak\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"qianfan_ak\",\"display_name\":\"Qianfan Ak\",\"advanced\":false,\"dynamic\":false,\"info\":\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\"},\"qianfan_sk\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"qianfan_sk\",\"display_name\":\"Qianfan Sk\",\"advanced\":false,\"dynamic\":false,\"info\":\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\"},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.95,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"top_p\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":0.8,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_p\",\"display_name\":\"Top p\",\"advanced\":false,\"dynamic\":false,\"info\":\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"_type\":\"CustomComponent\"},\"description\":\"Baidu Qianfan chat models. Get more detail from https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint.\",\"base_classes\":[\"BaseLanguageModel\",\"BaseLLM\"],\"display_name\":\"QianfanChatEndpoint\",\"documentation\":\"\",\"custom_fields\":{\"endpoint\":null,\"model\":null,\"penalty_score\":null,\"qianfan_ak\":null,\"qianfan_sk\":null,\"temperature\":null,\"top_p\":null},\"output_types\":[\"BaiduQianfanChatEndpoints\"],\"field_formatters\":{},\"beta\":true},\"BaiduQianfanLLMEndpoints\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\nfrom langflow import CustomComponent\\nfrom langchain.llms.baidu_qianfan_endpoint import QianfanLLMEndpoint\\nfrom langchain.llms.base import BaseLLM\\n\\n\\nclass QianfanLLMEndpointComponent(CustomComponent):\\n display_name: str = \\\"QianfanLLMEndpoint\\\"\\n description: str = (\\n \\\"Baidu Qianfan hosted open source or customized models. \\\"\\n \\\"Get more detail from https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint\\\"\\n )\\n\\n def build_config(self):\\n return {\\n \\\"model\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"options\\\": [\\n \\\"ERNIE-Bot\\\",\\n \\\"ERNIE-Bot-turbo\\\",\\n \\\"BLOOMZ-7B\\\",\\n \\\"Llama-2-7b-chat\\\",\\n \\\"Llama-2-13b-chat\\\",\\n \\\"Llama-2-70b-chat\\\",\\n \\\"Qianfan-BLOOMZ-7B-compressed\\\",\\n \\\"Qianfan-Chinese-Llama-2-7B\\\",\\n \\\"ChatGLM2-6B-32K\\\",\\n \\\"AquilaChat-7B\\\",\\n ],\\n \\\"info\\\": \\\"https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint\\\",\\n \\\"required\\\": True,\\n },\\n \\\"qianfan_ak\\\": {\\n \\\"display_name\\\": \\\"Qianfan Ak\\\",\\n \\\"required\\\": True,\\n \\\"password\\\": True,\\n \\\"info\\\": \\\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\\\",\\n },\\n \\\"qianfan_sk\\\": {\\n \\\"display_name\\\": \\\"Qianfan Sk\\\",\\n \\\"required\\\": True,\\n \\\"password\\\": True,\\n \\\"info\\\": \\\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\\\",\\n },\\n \\\"top_p\\\": {\\n \\\"display_name\\\": \\\"Top p\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\\\",\\n \\\"value\\\": 0.8,\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\\\",\\n \\\"value\\\": 0.95,\\n },\\n \\\"penalty_score\\\": {\\n \\\"display_name\\\": \\\"Penalty Score\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\\\",\\n \\\"value\\\": 1.0,\\n },\\n \\\"endpoint\\\": {\\n \\\"display_name\\\": \\\"Endpoint\\\",\\n \\\"info\\\": \\\"Endpoint of the Qianfan LLM, required if custom model used.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model: str = \\\"ERNIE-Bot-turbo\\\",\\n qianfan_ak: Optional[str] = None,\\n qianfan_sk: Optional[str] = None,\\n top_p: Optional[float] = None,\\n temperature: Optional[float] = None,\\n penalty_score: Optional[float] = None,\\n endpoint: Optional[str] = None,\\n ) -> BaseLLM:\\n try:\\n output = QianfanLLMEndpoint( # type: ignore\\n model=model,\\n qianfan_ak=qianfan_ak,\\n qianfan_sk=qianfan_sk,\\n top_p=top_p,\\n temperature=temperature,\\n penalty_score=penalty_score,\\n endpoint=endpoint,\\n )\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to Baidu Qianfan API.\\\") from e\\n return output # type: ignore\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"endpoint\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"endpoint\",\"display_name\":\"Endpoint\",\"advanced\":false,\"dynamic\":false,\"info\":\"Endpoint of the Qianfan LLM, required if custom model used.\"},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"ERNIE-Bot-turbo\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"ERNIE-Bot\",\"ERNIE-Bot-turbo\",\"BLOOMZ-7B\",\"Llama-2-7b-chat\",\"Llama-2-13b-chat\",\"Llama-2-70b-chat\",\"Qianfan-BLOOMZ-7B-compressed\",\"Qianfan-Chinese-Llama-2-7B\",\"ChatGLM2-6B-32K\",\"AquilaChat-7B\"],\"name\":\"model\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint\"},\"penalty_score\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":1.0,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"penalty_score\",\"display_name\":\"Penalty Score\",\"advanced\":false,\"dynamic\":false,\"info\":\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"qianfan_ak\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"qianfan_ak\",\"display_name\":\"Qianfan Ak\",\"advanced\":false,\"dynamic\":false,\"info\":\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\"},\"qianfan_sk\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"qianfan_sk\",\"display_name\":\"Qianfan Sk\",\"advanced\":false,\"dynamic\":false,\"info\":\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\"},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.95,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"top_p\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":0.8,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_p\",\"display_name\":\"Top p\",\"advanced\":false,\"dynamic\":false,\"info\":\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"_type\":\"CustomComponent\"},\"description\":\"Baidu Qianfan hosted open source or customized models. Get more detail from https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint\",\"base_classes\":[\"BaseLanguageModel\",\"BaseLLM\"],\"display_name\":\"QianfanLLMEndpoint\",\"documentation\":\"\",\"custom_fields\":{\"endpoint\":null,\"model\":null,\"penalty_score\":null,\"qianfan_ak\":null,\"qianfan_sk\":null,\"temperature\":null,\"top_p\":null},\"output_types\":[\"BaiduQianfanLLMEndpoints\"],\"field_formatters\":{},\"beta\":true}},\"memories\":{\"ConversationBufferMemory\":{\"template\":{\"chat_memory\":{\"type\":\"BaseChatMessageHistory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"ai_prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"AI\",\"fileTypes\":[],\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"human_prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"Human\",\"fileTypes\":[],\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"input_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\"},\"memory_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"fileTypes\":[],\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"output_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\"},\"return_messages\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"ConversationBufferMemory\"},\"description\":\"Buffer for storing conversation memory.\",\"base_classes\":[\"BaseChatMemory\",\"BaseMemory\",\"ConversationBufferMemory\"],\"display_name\":\"ConversationBufferMemory\",\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/buffer\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"ConversationBufferWindowMemory\":{\"template\":{\"chat_memory\":{\"type\":\"BaseChatMessageHistory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"ai_prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"AI\",\"fileTypes\":[],\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"human_prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"Human\",\"fileTypes\":[],\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"input_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\"},\"k\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":10,\"fileTypes\":[],\"password\":false,\"name\":\"k\",\"display_name\":\"Memory Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"memory_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"fileTypes\":[],\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"output_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\"},\"return_messages\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"ConversationBufferWindowMemory\"},\"description\":\"Buffer for storing conversation memory inside a limited size window.\",\"base_classes\":[\"BaseChatMemory\",\"ConversationBufferWindowMemory\",\"BaseMemory\"],\"display_name\":\"ConversationBufferWindowMemory\",\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/buffer_window\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"ConversationEntityMemory\":{\"template\":{\"chat_memory\":{\"type\":\"BaseChatMessageHistory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"entity_extraction_prompt\":{\"type\":\"BasePromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"entity_extraction_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"entity_store\":{\"type\":\"BaseEntityStore\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"entity_store\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"entity_summarization_prompt\":{\"type\":\"BasePromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"entity_summarization_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"ai_prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"AI\",\"fileTypes\":[],\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"chat_history_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"history\",\"fileTypes\":[],\"password\":false,\"name\":\"chat_history_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"entity_cache\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"entity_cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"human_prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"Human\",\"fileTypes\":[],\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"input_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\"},\"k\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":10,\"fileTypes\":[],\"password\":false,\"name\":\"k\",\"display_name\":\"Memory Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"output_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\"},\"return_messages\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"ConversationEntityMemory\"},\"description\":\"Entity extractor & summarizer memory.\",\"base_classes\":[\"BaseChatMemory\",\"BaseMemory\",\"ConversationEntityMemory\"],\"display_name\":\"ConversationEntityMemory\",\"documentation\":\"https://python.langchain.com/docs/modules/memory/integrations/entity_memory_with_sqlite\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"ConversationKGMemory\":{\"template\":{\"chat_memory\":{\"type\":\"BaseChatMessageHistory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"entity_extraction_prompt\":{\"type\":\"BasePromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"entity_extraction_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"kg\":{\"type\":\"NetworkxEntityGraph\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"kg\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"knowledge_extraction_prompt\":{\"type\":\"BasePromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"knowledge_extraction_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"summary_message_cls\":{\"type\":\"Type[langchain_core.messages.base.BaseMessage]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"summary_message_cls\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"ai_prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"AI\",\"fileTypes\":[],\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"human_prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"Human\",\"fileTypes\":[],\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"input_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\"},\"k\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":10,\"fileTypes\":[],\"password\":false,\"name\":\"k\",\"display_name\":\"Memory Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"memory_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"fileTypes\":[],\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"output_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\"},\"return_messages\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"ConversationKGMemory\"},\"description\":\"Knowledge graph conversation memory.\",\"base_classes\":[\"BaseChatMemory\",\"ConversationKGMemory\",\"BaseMemory\"],\"display_name\":\"ConversationKGMemory\",\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/kg\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"ConversationSummaryMemory\":{\"template\":{\"chat_memory\":{\"type\":\"BaseChatMessageHistory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"prompt\":{\"type\":\"BasePromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"summary_message_cls\":{\"type\":\"Type[langchain_core.messages.base.BaseMessage]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"summary_message_cls\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"ai_prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"AI\",\"fileTypes\":[],\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"buffer\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"password\":false,\"name\":\"buffer\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"human_prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"Human\",\"fileTypes\":[],\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"input_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\"},\"memory_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"fileTypes\":[],\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"output_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\"},\"return_messages\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"ConversationSummaryMemory\"},\"description\":\"Conversation summarizer to chat memory.\",\"base_classes\":[\"BaseChatMemory\",\"ConversationSummaryMemory\",\"BaseMemory\",\"SummarizerMixin\"],\"display_name\":\"ConversationSummaryMemory\",\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/summary\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"MongoDBChatMessageHistory\":{\"template\":{\"collection_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"message_store\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"collection_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"connection_string\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"connection_string\",\"advanced\":false,\"dynamic\":false,\"info\":\"MongoDB connection string (e.g mongodb://mongo_user:password123@mongo:27017)\"},\"database_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"database_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"session_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"session_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"MongoDBChatMessageHistory\"},\"description\":\"Memory store with MongoDB\",\"base_classes\":[\"MongoDBChatMessageHistory\",\"BaseChatMessageHistory\"],\"display_name\":\"MongoDBChatMessageHistory\",\"documentation\":\"https://python.langchain.com/docs/modules/memory/integrations/mongodb_chat_message_history\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"MotorheadMemory\":{\"template\":{\"chat_memory\":{\"type\":\"BaseChatMessageHistory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"api_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"client_id\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"client_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"context\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"context\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"input_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\"},\"memory_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"fileTypes\":[],\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"output_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\"},\"return_messages\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"session_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"session_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"timeout\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"url\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"https://api.getmetal.io/v1/motorhead\",\"fileTypes\":[],\"password\":false,\"name\":\"url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"MotorheadMemory\"},\"description\":\"Chat message memory backed by Motorhead service.\",\"base_classes\":[\"BaseChatMemory\",\"MotorheadMemory\",\"BaseMemory\"],\"display_name\":\"MotorheadMemory\",\"documentation\":\"https://python.langchain.com/docs/integrations/memory/motorhead_memory\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"PostgresChatMessageHistory\":{\"template\":{\"connection_string\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"postgresql://postgres:mypassword@localhost/chat_history\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"connection_string\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"session_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"session_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"table_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"message_store\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"table_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"PostgresChatMessageHistory\"},\"description\":\"Memory store with Postgres\",\"base_classes\":[\"PostgresChatMessageHistory\",\"BaseChatMessageHistory\"],\"display_name\":\"PostgresChatMessageHistory\",\"documentation\":\"https://python.langchain.com/docs/modules/memory/integrations/postgres_chat_message_history\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"VectorStoreRetrieverMemory\":{\"template\":{\"retriever\":{\"type\":\"VectorStoreRetriever\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"exclude_input_keys\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"exclude_input_keys\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"input_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\"},\"memory_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"fileTypes\":[],\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"return_docs\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"return_docs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"return_messages\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"VectorStoreRetrieverMemory\"},\"description\":\"VectorStoreRetriever-backed memory.\",\"base_classes\":[\"BaseMemory\",\"VectorStoreRetrieverMemory\"],\"display_name\":\"VectorStoreRetrieverMemory\",\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/vectorstore_retriever_memory\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false}},\"tools\":{\"Calculator\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"Calculator\"},\"description\":\"Useful for when you need to answer questions about math.\",\"base_classes\":[\"Tool\",\"BaseTool\"],\"display_name\":\"Calculator\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"Search\":{\"template\":{\"aiosession\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"aiosession\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"serpapi_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"serpapi_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"Search\"},\"description\":\"A search engine. Useful for when you need to answer questions about current events. Input should be a search query.\",\"base_classes\":[\"Tool\",\"BaseTool\"],\"display_name\":\"Search\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"Tool\":{\"template\":{\"func\":{\"type\":\"Callable\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"func\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"description\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"description\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"return_direct\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_direct\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"Tool\"},\"description\":\"Converts a chain, agent or function into a tool.\",\"base_classes\":[\"Tool\",\"BaseTool\"],\"display_name\":\"Tool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"PythonFunctionTool\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"\\ndef python_function(text: str) -> str:\\n \\\"\\\"\\\"This is a default python function that returns the input text\\\"\\\"\\\"\\n return text\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"description\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"description\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"return_direct\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_direct\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"PythonFunctionTool\"},\"description\":\"Python function to be executed.\",\"base_classes\":[\"BaseTool\",\"Tool\"],\"display_name\":\"PythonFunctionTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"PythonFunction\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"\\ndef python_function(text: str) -> str:\\n \\\"\\\"\\\"This is a default python function that returns the input text\\\"\\\"\\\"\\n return text\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"PythonFunction\"},\"description\":\"Python function to be executed.\",\"base_classes\":[\"Callable\"],\"display_name\":\"PythonFunction\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"JsonSpec\":{\"template\":{\"path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".json\",\".yaml\",\".yml\"],\"file_path\":\"\",\"password\":false,\"name\":\"path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"max_value_length\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_value_length\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"JsonSpec\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"JsonSpec\"],\"display_name\":\"JsonSpec\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"BingSearchRun\":{\"template\":{\"api_wrapper\":{\"type\":\"BingSearchAPIWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"BingSearchRun\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseTool\",\"BingSearchRun\"],\"display_name\":\"BingSearchRun\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"GoogleSearchResults\":{\"template\":{\"api_wrapper\":{\"type\":\"GoogleSearchAPIWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"num_results\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":4,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"num_results\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"GoogleSearchResults\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"GoogleSearchResults\",\"BaseTool\"],\"display_name\":\"GoogleSearchResults\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"GoogleSearchRun\":{\"template\":{\"api_wrapper\":{\"type\":\"GoogleSearchAPIWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"GoogleSearchRun\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseTool\",\"GoogleSearchRun\"],\"display_name\":\"GoogleSearchRun\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"GoogleSerperRun\":{\"template\":{\"api_wrapper\":{\"type\":\"GoogleSerperAPIWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"GoogleSerperRun\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseTool\",\"GoogleSerperRun\"],\"display_name\":\"GoogleSerperRun\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"InfoSQLDatabaseTool\":{\"template\":{\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"db\":{\"type\":\"SQLDatabase\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"db\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"InfoSQLDatabaseTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseSQLDatabaseTool\",\"BaseTool\",\"InfoSQLDatabaseTool\"],\"display_name\":\"InfoSQLDatabaseTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"JsonGetValueTool\":{\"template\":{\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"spec\":{\"type\":\"JsonSpec\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"spec\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"JsonGetValueTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseTool\",\"JsonGetValueTool\"],\"display_name\":\"JsonGetValueTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"JsonListKeysTool\":{\"template\":{\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"spec\":{\"type\":\"JsonSpec\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"spec\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"JsonListKeysTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"JsonListKeysTool\",\"BaseTool\"],\"display_name\":\"JsonListKeysTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"ListSQLDatabaseTool\":{\"template\":{\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"db\":{\"type\":\"SQLDatabase\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"db\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"ListSQLDatabaseTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseSQLDatabaseTool\",\"ListSQLDatabaseTool\",\"BaseTool\"],\"display_name\":\"ListSQLDatabaseTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"QuerySQLDataBaseTool\":{\"template\":{\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"db\":{\"type\":\"SQLDatabase\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"db\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"QuerySQLDataBaseTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseSQLDatabaseTool\",\"BaseTool\",\"QuerySQLDataBaseTool\"],\"display_name\":\"QuerySQLDataBaseTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"RequestsDeleteTool\":{\"template\":{\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"requests_wrapper\":{\"type\":\"TextRequestsWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"RequestsDeleteTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"RequestsDeleteTool\",\"BaseRequestsTool\",\"BaseTool\"],\"display_name\":\"RequestsDeleteTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"RequestsGetTool\":{\"template\":{\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"requests_wrapper\":{\"type\":\"TextRequestsWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"RequestsGetTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"RequestsGetTool\",\"BaseRequestsTool\",\"BaseTool\"],\"display_name\":\"RequestsGetTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"RequestsPatchTool\":{\"template\":{\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"requests_wrapper\":{\"type\":\"TextRequestsWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"RequestsPatchTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"RequestsPatchTool\",\"BaseRequestsTool\",\"BaseTool\"],\"display_name\":\"RequestsPatchTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"RequestsPostTool\":{\"template\":{\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"requests_wrapper\":{\"type\":\"TextRequestsWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"RequestsPostTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"RequestsPostTool\",\"BaseRequestsTool\",\"BaseTool\"],\"display_name\":\"RequestsPostTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"RequestsPutTool\":{\"template\":{\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"requests_wrapper\":{\"type\":\"TextRequestsWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"RequestsPutTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseRequestsTool\",\"BaseTool\",\"RequestsPutTool\"],\"display_name\":\"RequestsPutTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"WikipediaQueryRun\":{\"template\":{\"api_wrapper\":{\"type\":\"WikipediaAPIWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"WikipediaQueryRun\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"WikipediaQueryRun\",\"BaseTool\"],\"display_name\":\"WikipediaQueryRun\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"WolframAlphaQueryRun\":{\"template\":{\"api_wrapper\":{\"type\":\"WolframAlphaAPIWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"WolframAlphaQueryRun\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseTool\",\"WolframAlphaQueryRun\"],\"display_name\":\"WolframAlphaQueryRun\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false}},\"toolkits\":{\"JsonToolkit\":{\"template\":{\"spec\":{\"type\":\"JsonSpec\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"spec\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"JsonToolkit\"},\"description\":\"Toolkit for interacting with a JSON spec.\",\"base_classes\":[\"JsonToolkit\",\"BaseToolkit\",\"Tool\"],\"display_name\":\"JsonToolkit\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"OpenAPIToolkit\":{\"template\":{\"json_agent\":{\"type\":\"AgentExecutor\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"json_agent\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"requests_wrapper\":{\"type\":\"TextRequestsWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"OpenAPIToolkit\"},\"description\":\"Toolkit for interacting with an OpenAPI API.\",\"base_classes\":[\"OpenAPIToolkit\",\"BaseToolkit\",\"Tool\"],\"display_name\":\"OpenAPIToolkit\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"VectorStoreInfo\":{\"template\":{\"vectorstore\":{\"type\":\"VectorStore\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"vectorstore\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"description\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"fileTypes\":[],\"password\":false,\"name\":\"description\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"VectorStoreInfo\"},\"description\":\"Information about a VectorStore.\",\"base_classes\":[\"VectorStoreInfo\"],\"display_name\":\"VectorStoreInfo\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"VectorStoreRouterToolkit\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"vectorstores\":{\"type\":\"VectorStoreInfo\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"vectorstores\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"VectorStoreRouterToolkit\"},\"description\":\"Toolkit for routing between Vector Stores.\",\"base_classes\":[\"VectorStoreRouterToolkit\",\"BaseToolkit\",\"Tool\"],\"display_name\":\"VectorStoreRouterToolkit\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"VectorStoreToolkit\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"vectorstore_info\":{\"type\":\"VectorStoreInfo\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"vectorstore_info\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"VectorStoreToolkit\"},\"description\":\"Toolkit for interacting with a Vector Store.\",\"base_classes\":[\"VectorStoreToolkit\",\"BaseToolkit\",\"Tool\"],\"display_name\":\"VectorStoreToolkit\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"Metaphor\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import List, Union\\n\\nfrom langchain.agents import tool\\nfrom langchain.agents.agent_toolkits.base import BaseToolkit\\nfrom langchain.tools import Tool\\nfrom metaphor_python import Metaphor # type: ignore\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass MetaphorToolkit(CustomComponent):\\n display_name: str = \\\"Metaphor\\\"\\n description: str = \\\"Metaphor Toolkit\\\"\\n documentation = \\\"https://python.langchain.com/docs/integrations/tools/metaphor_search\\\"\\n beta: bool = True\\n # api key should be password = True\\n field_config = {\\n \\\"metaphor_api_key\\\": {\\\"display_name\\\": \\\"Metaphor API Key\\\", \\\"password\\\": True},\\n \\\"code\\\": {\\\"advanced\\\": True},\\n }\\n\\n def build(\\n self,\\n metaphor_api_key: str,\\n use_autoprompt: bool = True,\\n search_num_results: int = 5,\\n similar_num_results: int = 5,\\n ) -> Union[Tool, BaseToolkit]:\\n # If documents, then we need to create a Vectara instance using .from_documents\\n client = Metaphor(api_key=metaphor_api_key)\\n\\n @tool\\n def search(query: str):\\n \\\"\\\"\\\"Call search engine with a query.\\\"\\\"\\\"\\n return client.search(query, use_autoprompt=use_autoprompt, num_results=search_num_results)\\n\\n @tool\\n def get_contents(ids: List[str]):\\n \\\"\\\"\\\"Get contents of a webpage.\\n\\n The ids passed in should be a list of ids as fetched from `search`.\\n \\\"\\\"\\\"\\n return client.get_contents(ids)\\n\\n @tool\\n def find_similar(url: str):\\n \\\"\\\"\\\"Get search results similar to a given URL.\\n\\n The url passed in should be a URL returned from `search`\\n \\\"\\\"\\\"\\n return client.find_similar(url, num_results=similar_num_results)\\n\\n return [search, get_contents, find_similar] # type: ignore\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":true,\"dynamic\":true,\"info\":\"\"},\"metaphor_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"metaphor_api_key\",\"display_name\":\"Metaphor API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"search_num_results\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":5,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"search_num_results\",\"display_name\":\"search_num_results\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"similar_num_results\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":5,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"similar_num_results\",\"display_name\":\"similar_num_results\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"use_autoprompt\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"use_autoprompt\",\"display_name\":\"use_autoprompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CustomComponent\"},\"description\":\"Metaphor Toolkit\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseToolkit\"],\"display_name\":\"Metaphor\",\"documentation\":\"https://python.langchain.com/docs/integrations/tools/metaphor_search\",\"custom_fields\":{\"metaphor_api_key\":null,\"search_num_results\":null,\"similar_num_results\":null,\"use_autoprompt\":null},\"output_types\":[\"Metaphor\"],\"field_formatters\":{},\"beta\":true}},\"wrappers\":{\"TextRequestsWrapper\":{\"template\":{\"aiosession\":{\"type\":\"ClientSession\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"aiosession\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"auth\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"auth\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"headers\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"{\\\"Authorization\\\": \\\"Bearer \\\"}\",\"fileTypes\":[],\"password\":false,\"name\":\"headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"TextRequestsWrapper\"},\"description\":\"Lightweight wrapper around requests library.\",\"base_classes\":[\"TextRequestsWrapper\"],\"display_name\":\"TextRequestsWrapper\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"SQLDatabase\":{\"template\":{\"database_uri\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"database_uri\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"engine_args\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"engine_args\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"SQLDatabase\"},\"description\":\"Construct a SQLAlchemy engine from URI.\",\"base_classes\":[\"SQLDatabase\",\"Callable\"],\"display_name\":\"SQLDatabase\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false}},\"embeddings\":{\"OpenAIEmbeddings\":{\"template\":{\"allowed_special\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":[],\"fileTypes\":[],\"password\":false,\"name\":\"allowed_special\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"async_client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"async_client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"chunk_size\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1000,\"fileTypes\":[],\"password\":false,\"name\":\"chunk_size\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"default_headers\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"default_headers\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"default_query\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"default_query\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"deployment\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"text-embedding-ada-002\",\"fileTypes\":[],\"password\":false,\"name\":\"deployment\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"disallowed_special\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"all\",\"fileTypes\":[],\"password\":false,\"name\":\"disallowed_special\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"embedding_ctx_length\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":8191,\"fileTypes\":[],\"password\":false,\"name\":\"embedding_ctx_length\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"headers\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":true,\"value\":\"{\\\"Authorization\\\": \\\"Bearer \\\"}\",\"fileTypes\":[],\"password\":false,\"name\":\"headers\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"http_client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"http_client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"max_retries\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":2,\"fileTypes\":[],\"password\":false,\"name\":\"max_retries\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"model\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"text-embedding-ada-002\",\"fileTypes\":[],\"password\":false,\"name\":\"model\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"model_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"openai_api_base\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":true,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"openai_api_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"openai_api_type\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":true,\"name\":\"openai_api_type\",\"display_name\":\"OpenAI API Type\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"openai_api_version\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":true,\"name\":\"openai_api_version\",\"display_name\":\"OpenAI API Version\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"openai_organization\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"openai_proxy\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"request_timeout\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"request_timeout\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"retry_max_seconds\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":20,\"fileTypes\":[],\"password\":false,\"name\":\"retry_max_seconds\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"retry_min_seconds\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":4,\"fileTypes\":[],\"password\":false,\"name\":\"retry_min_seconds\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"show_progress_bar\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"show_progress_bar\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"skip_empty\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"skip_empty\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"tiktoken_enabled\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"password\":true,\"name\":\"tiktoken_enabled\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tiktoken_model_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":true,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"OpenAIEmbeddings\"},\"description\":\"OpenAI embedding models.\",\"base_classes\":[\"OpenAIEmbeddings\",\"Embeddings\"],\"display_name\":\"OpenAIEmbeddings\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/openai\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"CohereEmbeddings\":{\"template\":{\"async_client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"async_client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"cohere_api_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":true,\"name\":\"cohere_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"max_retries\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"max_retries\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"model\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"embed-english-v2.0\",\"fileTypes\":[],\"password\":false,\"name\":\"model\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"request_timeout\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"request_timeout\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"truncate\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"truncate\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"user_agent\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"langchain\",\"fileTypes\":[],\"password\":false,\"name\":\"user_agent\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CohereEmbeddings\"},\"description\":\"Cohere embedding models.\",\"base_classes\":[\"Embeddings\",\"CohereEmbeddings\"],\"display_name\":\"CohereEmbeddings\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/cohere\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"HuggingFaceEmbeddings\":{\"template\":{\"cache_folder\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"cache_folder\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"encode_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"encode_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"model_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"model_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"sentence-transformers/all-mpnet-base-v2\",\"fileTypes\":[],\"password\":false,\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"multi_process\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"multi_process\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"_type\":\"HuggingFaceEmbeddings\"},\"description\":\"HuggingFace sentence_transformers embedding models.\",\"base_classes\":[\"Embeddings\",\"HuggingFaceEmbeddings\"],\"display_name\":\"HuggingFaceEmbeddings\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/sentence_transformers\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"VertexAIEmbeddings\":{\"template\":{\"client\":{\"type\":\"ForwardRef(\\\"'_LanguageModel'\\\")\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"client_preview\":{\"type\":\"ForwardRef(\\\"'_LanguageModel'\\\")\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"client_preview\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"credentials\":{\"type\":\"file\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".json\"],\"file_path\":\"\",\"password\":false,\"name\":\"credentials\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"location\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"us-central1\",\"fileTypes\":[],\"password\":false,\"name\":\"location\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"max_output_tokens\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":128,\"fileTypes\":[],\"password\":true,\"name\":\"max_output_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"max_retries\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":6,\"fileTypes\":[],\"password\":false,\"name\":\"max_retries\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"model_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"textembedding-gecko\",\"fileTypes\":[],\"password\":false,\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"n\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1,\"fileTypes\":[],\"password\":false,\"name\":\"n\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"project\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"project\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"request_parallelism\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":5,\"fileTypes\":[],\"password\":false,\"name\":\"request_parallelism\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"stop\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"stop\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"streaming\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"streaming\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.0,\"fileTypes\":[],\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"top_k\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":40,\"fileTypes\":[],\"password\":false,\"name\":\"top_k\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"top_p\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.95,\"fileTypes\":[],\"password\":false,\"name\":\"top_p\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"_type\":\"VertexAIEmbeddings\"},\"description\":\"Google Cloud VertexAI embedding models.\",\"base_classes\":[\"_VertexAIBase\",\"_VertexAICommon\",\"Embeddings\",\"VertexAIEmbeddings\"],\"display_name\":\"VertexAIEmbeddings\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/google_vertex_ai_palm\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"AmazonBedrockEmbeddings\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.embeddings import BedrockEmbeddings\\nfrom langchain.embeddings.base import Embeddings\\nfrom langflow import CustomComponent\\n\\n\\nclass AmazonBedrockEmeddingsComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing an Embeddings Model using Amazon Bedrock.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"Amazon Bedrock Embeddings\\\"\\n description: str = \\\"Embeddings model from Amazon Bedrock.\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\\\"\\n beta = True\\n\\n def build_config(self):\\n return {\\n \\\"model_id\\\": {\\n \\\"display_name\\\": \\\"Model Id\\\",\\n \\\"options\\\": [\\\"amazon.titan-embed-text-v1\\\"],\\n },\\n \\\"credentials_profile_name\\\": {\\\"display_name\\\": \\\"Credentials Profile Name\\\"},\\n \\\"endpoint_url\\\": {\\\"display_name\\\": \\\"Bedrock Endpoint URL\\\"},\\n \\\"region_name\\\": {\\\"display_name\\\": \\\"AWS Region\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model_id: str = \\\"amazon.titan-embed-text-v1\\\",\\n credentials_profile_name: Optional[str] = None,\\n endpoint_url: Optional[str] = None,\\n region_name: Optional[str] = None,\\n ) -> Embeddings:\\n try:\\n output = BedrockEmbeddings(\\n credentials_profile_name=credentials_profile_name,\\n model_id=model_id,\\n endpoint_url=endpoint_url,\\n region_name=region_name,\\n ) # type: ignore\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to AmazonBedrock API.\\\") from e\\n return output\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"credentials_profile_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"credentials_profile_name\",\"display_name\":\"Credentials Profile Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"endpoint_url\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"endpoint_url\",\"display_name\":\"Bedrock Endpoint URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"model_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"amazon.titan-embed-text-v1\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"amazon.titan-embed-text-v1\"],\"name\":\"model_id\",\"display_name\":\"Model Id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"region_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"region_name\",\"display_name\":\"AWS Region\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CustomComponent\"},\"description\":\"Embeddings model from Amazon Bedrock.\",\"base_classes\":[\"Embeddings\"],\"display_name\":\"Amazon Bedrock Embeddings\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\",\"custom_fields\":{\"credentials_profile_name\":null,\"endpoint_url\":null,\"model_id\":null,\"region_name\":null},\"output_types\":[\"AmazonBedrockEmbeddings\"],\"field_formatters\":{},\"beta\":true}},\"vectorstores\":{\"FAISS\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"embedding\":{\"type\":\"Embeddings\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"folder_path\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"folder_path\",\"display_name\":\"Local Path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"ids\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"ids\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"index_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"index_name\",\"display_name\":\"Index Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadatas\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"search_kwargs\":{\"type\":\"NestedDict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"{}\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"_type\":\"FAISS\"},\"description\":\"Construct FAISS wrapper from raw documents.\",\"base_classes\":[\"VectorStore\",\"FAISS\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"FAISS\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/faiss\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"MongoDBAtlasVectorSearch\":{\"template\":{\"collection\":{\"type\":\"Collection[MongoDBDocumentType]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"collection\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"documents\":{\"type\":\"Document\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"embedding\":{\"type\":\"Embeddings\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"collection_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"collection_name\",\"display_name\":\"Collection Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"db_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"db_name\",\"display_name\":\"Database Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"index_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"index_name\",\"display_name\":\"Index Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadatas\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"mongodb_atlas_cluster_uri\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"mongodb_atlas_cluster_uri\",\"display_name\":\"MongoDB Atlas Cluster URI\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"search_kwargs\":{\"type\":\"NestedDict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"{}\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"_type\":\"MongoDBAtlasVectorSearch\"},\"description\":\"Construct a `MongoDB Atlas Vector Search` vector store from raw documents.\",\"base_classes\":[\"VectorStore\",\"MongoDBAtlasVectorSearch\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"MongoDB Atlas\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/mongodb_atlas\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"Pinecone\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"embedding\":{\"type\":\"Embeddings\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1000,\"fileTypes\":[],\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"batch_size\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":32,\"fileTypes\":[],\"password\":false,\"name\":\"batch_size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"ids\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"ids\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"index_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"index_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadatas\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"namespace\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"namespace\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"pinecone_api_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"pinecone_api_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"pinecone_env\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"pinecone_env\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"pool_threads\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":4,\"fileTypes\":[],\"password\":false,\"name\":\"pool_threads\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"search_kwargs\":{\"type\":\"NestedDict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"{}\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"text_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"text\",\"fileTypes\":[],\"password\":true,\"name\":\"text_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"upsert_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"upsert_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"_type\":\"Pinecone\"},\"description\":\"Construct Pinecone wrapper from raw documents.\",\"base_classes\":[\"VectorStore\",\"Pinecone\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"Pinecone\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/pinecone\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"Qdrant\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"embedding\":{\"type\":\"Embeddings\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"hnsw_config\":{\"type\":\"common_types.HnswConfigDiff\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"hnsw_config\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"init_from\":{\"type\":\"common_types.InitFrom\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"init_from\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"optimizers_config\":{\"type\":\"common_types.OptimizersConfigDiff\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"optimizers_config\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"quantization_config\":{\"type\":\"common_types.QuantizationConfig\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"quantization_config\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"wal_config\":{\"type\":\"common_types.WalConfigDiff\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"wal_config\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"api_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":true,\"name\":\"api_key\",\"display_name\":\"API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"batch_size\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":64,\"fileTypes\":[],\"password\":false,\"name\":\"batch_size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"collection_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"collection_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"content_payload_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"page_content\",\"fileTypes\":[],\"password\":false,\"name\":\"content_payload_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"distance_func\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"Cosine\",\"fileTypes\":[],\"password\":false,\"name\":\"distance_func\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"force_recreate\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"force_recreate\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"grpc_port\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":6334,\"fileTypes\":[],\"password\":false,\"name\":\"grpc_port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"host\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"host\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"https\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"https\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"ids\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"ids\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"location\":{\"type\":\"str\",\"required\":false,\"placeholder\":\":memory:\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\":memory:\",\"fileTypes\":[],\"password\":false,\"name\":\"location\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata_payload_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"metadata\",\"fileTypes\":[],\"password\":false,\"name\":\"metadata_payload_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"metadatas\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"on_disk\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"on_disk\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"on_disk_payload\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"on_disk_payload\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"path\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"path\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"port\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":6333,\"fileTypes\":[],\"password\":false,\"name\":\"port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"prefer_grpc\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"prefer_grpc\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"fileTypes\":[],\"password\":false,\"name\":\"prefix\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"replication_factor\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"replication_factor\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"search_kwargs\":{\"type\":\"NestedDict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"{}\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"shard_number\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"shard_number\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"timeout\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"timeout\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1}},\"url\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"url\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"vector_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"vector_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"write_consistency_factor\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"write_consistency_factor\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"Qdrant\"},\"description\":\"Construct Qdrant wrapper from a list of texts.\",\"base_classes\":[\"VectorStore\",\"Qdrant\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"Qdrant\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/qdrant\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"SupabaseVectorStore\":{\"template\":{\"client\":{\"type\":\"supabase.client.Client\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"documents\":{\"type\":\"Document\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"embedding\":{\"type\":\"Embeddings\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"chunk_size\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":500,\"fileTypes\":[],\"password\":false,\"name\":\"chunk_size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"ids\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"ids\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadatas\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"query_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"query_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"search_kwargs\":{\"type\":\"NestedDict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"{}\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"supabase_service_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"supabase_service_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"supabase_url\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"supabase_url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"table_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"table_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"SupabaseVectorStore\"},\"description\":\"Return VectorStore initialized from texts and embeddings.\",\"base_classes\":[\"VectorStore\",\"SupabaseVectorStore\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"Supabase\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/supabase\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"Weaviate\":{\"template\":{\"client\":{\"type\":\"weaviate.Client\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"documents\":{\"type\":\"Document\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"embedding\":{\"type\":\"Embeddings\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"relevance_score_fn\":{\"type\":\"Callable[[float], float]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"password\":false,\"name\":\"relevance_score_fn\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"batch_size\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"batch_size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"by_text\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"by_text\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"client_kwargs\":{\"type\":\"code\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"{}\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"client_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"index_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"index_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadatas\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"search_kwargs\":{\"type\":\"NestedDict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"{}\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"text_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"text\",\"fileTypes\":[],\"password\":true,\"name\":\"text_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"weaviate_api_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":true,\"name\":\"weaviate_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"weaviate_url\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"http://localhost:8080\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"http://localhost:8080\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"weaviate_url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"Weaviate\"},\"description\":\"Construct Weaviate wrapper from raw documents.\",\"base_classes\":[\"VectorStore\",\"Weaviate\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"Weaviate\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/weaviate\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"Redis\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"embedding\":{\"type\":\"Embeddings\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\nfrom langflow import CustomComponent\\n\\nfrom langchain.vectorstores.redis import Redis\\nfrom langchain.schema import Document\\nfrom langchain.vectorstores.base import VectorStore\\nfrom langchain.embeddings.base import Embeddings\\n\\n\\nclass RedisComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing a Vector Store using Redis.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"Redis\\\"\\n description: str = \\\"Implementation of Vector Store using Redis\\\"\\n documentation = \\\"https://python.langchain.com/docs/integrations/vectorstores/redis\\\"\\n beta = True\\n\\n def build_config(self):\\n \\\"\\\"\\\"\\n Builds the configuration for the component.\\n\\n Returns:\\n - dict: A dictionary containing the configuration options for the component.\\n \\\"\\\"\\\"\\n return {\\n \\\"index_name\\\": {\\\"display_name\\\": \\\"Index Name\\\", \\\"value\\\": \\\"your_index\\\"},\\n \\\"code\\\": {\\\"show\\\": False, \\\"display_name\\\": \\\"Code\\\"},\\n \\\"documents\\\": {\\\"display_name\\\": \\\"Documents\\\", \\\"is_list\\\": True},\\n \\\"embedding\\\": {\\\"display_name\\\": \\\"Embedding\\\"},\\n \\\"redis_server_url\\\": {\\n \\\"display_name\\\": \\\"Redis Server Connection String\\\",\\n \\\"advanced\\\": False,\\n },\\n \\\"redis_index_name\\\": {\\\"display_name\\\": \\\"Redis Index\\\", \\\"advanced\\\": False},\\n }\\n\\n def build(\\n self,\\n embedding: Embeddings,\\n redis_server_url: str,\\n redis_index_name: str,\\n documents: Optional[Document] = None,\\n ) -> VectorStore:\\n \\\"\\\"\\\"\\n Builds the Vector Store or BaseRetriever object.\\n\\n Args:\\n - embedding (Embeddings): The embeddings to use for the Vector Store.\\n - documents (Optional[Document]): The documents to use for the Vector Store.\\n - redis_index_name (str): The name of the Redis index.\\n - redis_server_url (str): The URL for the Redis server.\\n\\n Returns:\\n - VectorStore: The Vector Store object.\\n \\\"\\\"\\\"\\n\\n return Redis.from_documents(\\n documents=documents, # type: ignore\\n embedding=embedding,\\n redis_url=redis_server_url,\\n index_name=redis_index_name,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"redis_index_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"redis_index_name\",\"display_name\":\"Redis Index\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"redis_server_url\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"redis_server_url\",\"display_name\":\"Redis Server Connection String\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CustomComponent\"},\"description\":\"Implementation of Vector Store using Redis\",\"base_classes\":[\"VectorStore\"],\"display_name\":\"Redis\",\"documentation\":\"https://python.langchain.com/docs/integrations/vectorstores/redis\",\"custom_fields\":{\"documents\":null,\"embedding\":null,\"redis_index_name\":null,\"redis_server_url\":null},\"output_types\":[\"Redis\"],\"field_formatters\":{},\"beta\":true},\"Chroma\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"embedding\":{\"type\":\"Embeddings\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"chroma_server_cors_allow_origins\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chroma_server_cors_allow_origins\",\"display_name\":\"Server CORS Allow Origins\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"chroma_server_grpc_port\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chroma_server_grpc_port\",\"display_name\":\"Server gRPC Port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"chroma_server_host\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chroma_server_host\",\"display_name\":\"Server Host\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"chroma_server_port\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chroma_server_port\",\"display_name\":\"Server Port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"chroma_server_ssl_enabled\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chroma_server_ssl_enabled\",\"display_name\":\"Server SSL Enabled\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional, Union\\nfrom langflow import CustomComponent\\n\\nfrom langchain.vectorstores import Chroma\\nfrom langchain.schema import Document\\nfrom langchain.vectorstores.base import VectorStore\\nfrom langchain.schema import BaseRetriever\\nfrom langchain.embeddings.base import Embeddings\\nimport chromadb # type: ignore\\n\\n\\nclass ChromaComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing a Vector Store using Chroma.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"Chroma\\\"\\n description: str = \\\"Implementation of Vector Store using Chroma\\\"\\n documentation = \\\"https://python.langchain.com/docs/integrations/vectorstores/chroma\\\"\\n beta: bool = True\\n\\n def build_config(self):\\n \\\"\\\"\\\"\\n Builds the configuration for the component.\\n\\n Returns:\\n - dict: A dictionary containing the configuration options for the component.\\n \\\"\\\"\\\"\\n return {\\n \\\"collection_name\\\": {\\\"display_name\\\": \\\"Collection Name\\\", \\\"value\\\": \\\"langflow\\\"},\\n \\\"persist\\\": {\\\"display_name\\\": \\\"Persist\\\"},\\n \\\"persist_directory\\\": {\\\"display_name\\\": \\\"Persist Directory\\\"},\\n \\\"code\\\": {\\\"show\\\": False, \\\"display_name\\\": \\\"Code\\\"},\\n \\\"documents\\\": {\\\"display_name\\\": \\\"Documents\\\", \\\"is_list\\\": True},\\n \\\"embedding\\\": {\\\"display_name\\\": \\\"Embedding\\\"},\\n \\\"chroma_server_cors_allow_origins\\\": {\\n \\\"display_name\\\": \\\"Server CORS Allow Origins\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"chroma_server_host\\\": {\\\"display_name\\\": \\\"Server Host\\\", \\\"advanced\\\": True},\\n \\\"chroma_server_port\\\": {\\\"display_name\\\": \\\"Server Port\\\", \\\"advanced\\\": True},\\n \\\"chroma_server_grpc_port\\\": {\\n \\\"display_name\\\": \\\"Server gRPC Port\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"chroma_server_ssl_enabled\\\": {\\n \\\"display_name\\\": \\\"Server SSL Enabled\\\",\\n \\\"advanced\\\": True,\\n },\\n }\\n\\n def build(\\n self,\\n collection_name: str,\\n persist: bool,\\n chroma_server_ssl_enabled: bool,\\n persist_directory: Optional[str] = None,\\n embedding: Optional[Embeddings] = None,\\n documents: Optional[Document] = None,\\n chroma_server_cors_allow_origins: Optional[str] = None,\\n chroma_server_host: Optional[str] = None,\\n chroma_server_port: Optional[int] = None,\\n chroma_server_grpc_port: Optional[int] = None,\\n ) -> Union[VectorStore, BaseRetriever]:\\n \\\"\\\"\\\"\\n Builds the Vector Store or BaseRetriever object.\\n\\n Args:\\n - collection_name (str): The name of the collection.\\n - persist_directory (Optional[str]): The directory to persist the Vector Store to.\\n - chroma_server_ssl_enabled (bool): Whether to enable SSL for the Chroma server.\\n - persist (bool): Whether to persist the Vector Store or not.\\n - embedding (Optional[Embeddings]): The embeddings to use for the Vector Store.\\n - documents (Optional[Document]): The documents to use for the Vector Store.\\n - chroma_server_cors_allow_origins (Optional[str]): The CORS allow origins for the Chroma server.\\n - chroma_server_host (Optional[str]): The host for the Chroma server.\\n - chroma_server_port (Optional[int]): The port for the Chroma server.\\n - chroma_server_grpc_port (Optional[int]): The gRPC port for the Chroma server.\\n\\n Returns:\\n - Union[VectorStore, BaseRetriever]: The Vector Store or BaseRetriever object.\\n \\\"\\\"\\\"\\n\\n # Chroma settings\\n chroma_settings = None\\n\\n if chroma_server_host is not None:\\n chroma_settings = chromadb.config.Settings(\\n chroma_server_cors_allow_origins=chroma_server_cors_allow_origins or None,\\n chroma_server_host=chroma_server_host,\\n chroma_server_port=chroma_server_port or None,\\n chroma_server_grpc_port=chroma_server_grpc_port or None,\\n chroma_server_ssl_enabled=chroma_server_ssl_enabled,\\n )\\n\\n # If documents, then we need to create a Chroma instance using .from_documents\\n if documents is not None and embedding is not None:\\n return Chroma.from_documents(\\n documents=documents, # type: ignore\\n persist_directory=persist_directory if persist else None,\\n collection_name=collection_name,\\n embedding=embedding,\\n client_settings=chroma_settings,\\n )\\n\\n return Chroma(persist_directory=persist_directory, client_settings=chroma_settings)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"collection_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"langflow\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"collection_name\",\"display_name\":\"Collection Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"persist\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"persist\",\"display_name\":\"Persist\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"persist_directory\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"persist_directory\",\"display_name\":\"Persist Directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CustomComponent\"},\"description\":\"Implementation of Vector Store using Chroma\",\"base_classes\":[\"VectorStore\",\"BaseRetriever\"],\"display_name\":\"Chroma\",\"documentation\":\"https://python.langchain.com/docs/integrations/vectorstores/chroma\",\"custom_fields\":{\"chroma_server_cors_allow_origins\":null,\"chroma_server_grpc_port\":null,\"chroma_server_host\":null,\"chroma_server_port\":null,\"chroma_server_ssl_enabled\":null,\"collection_name\":null,\"documents\":null,\"embedding\":null,\"persist\":null,\"persist_directory\":null},\"output_types\":[\"Chroma\"],\"field_formatters\":{},\"beta\":true},\"pgvector\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"embedding\":{\"type\":\"Embeddings\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional, List\\nfrom langflow import CustomComponent\\n\\nfrom langchain.vectorstores.pgvector import PGVector\\nfrom langchain.schema import Document\\nfrom langchain.vectorstores.base import VectorStore\\nfrom langchain.embeddings.base import Embeddings\\n\\n\\nclass PostgresqlVectorComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing a Vector Store using PostgreSQL.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"PGVector\\\"\\n description: str = \\\"Implementation of Vector Store using PostgreSQL\\\"\\n documentation = \\\"https://python.langchain.com/docs/integrations/vectorstores/pgvector\\\"\\n beta = True\\n\\n def build_config(self):\\n \\\"\\\"\\\"\\n Builds the configuration for the component.\\n\\n Returns:\\n - dict: A dictionary containing the configuration options for the component.\\n \\\"\\\"\\\"\\n return {\\n \\\"index_name\\\": {\\\"display_name\\\": \\\"Index Name\\\", \\\"value\\\": \\\"your_index\\\"},\\n \\\"code\\\": {\\\"show\\\": True, \\\"display_name\\\": \\\"Code\\\"},\\n \\\"documents\\\": {\\\"display_name\\\": \\\"Documents\\\", \\\"is_list\\\": True},\\n \\\"embedding\\\": {\\\"display_name\\\": \\\"Embedding\\\"},\\n \\\"pg_server_url\\\": {\\n \\\"display_name\\\": \\\"PostgreSQL Server Connection String\\\",\\n \\\"advanced\\\": False,\\n },\\n \\\"collection_name\\\": {\\\"display_name\\\": \\\"Table\\\", \\\"advanced\\\": False},\\n }\\n\\n def build(\\n self,\\n embedding: Embeddings,\\n pg_server_url: str,\\n collection_name: str,\\n documents: Optional[List[Document]] = None,\\n ) -> VectorStore:\\n \\\"\\\"\\\"\\n Builds the Vector Store or BaseRetriever object.\\n\\n Args:\\n - embedding (Embeddings): The embeddings to use for the Vector Store.\\n - documents (Optional[Document]): The documents to use for the Vector Store.\\n - collection_name (str): The name of the PG table.\\n - pg_server_url (str): The URL for the PG server.\\n\\n Returns:\\n - VectorStore: The Vector Store object.\\n \\\"\\\"\\\"\\n\\n try:\\n if documents is None:\\n return PGVector.from_existing_index(\\n embedding=embedding,\\n collection_name=collection_name,\\n connection_string=pg_server_url,\\n )\\n\\n return PGVector.from_documents(\\n embedding=embedding,\\n documents=documents,\\n collection_name=collection_name,\\n connection_string=pg_server_url,\\n )\\n except Exception as e:\\n raise RuntimeError(f\\\"Failed to build PGVector: {e}\\\")\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"collection_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"collection_name\",\"display_name\":\"Table\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"pg_server_url\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"pg_server_url\",\"display_name\":\"PostgreSQL Server Connection String\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CustomComponent\"},\"description\":\"Implementation of Vector Store using PostgreSQL\",\"base_classes\":[],\"display_name\":\"PGVector\",\"documentation\":\"https://python.langchain.com/docs/integrations/vectorstores/pgvector\",\"custom_fields\":{\"collection_name\":null,\"documents\":null,\"embedding\":null,\"pg_server_url\":null},\"output_types\":[\"pgvector\"],\"field_formatters\":{},\"beta\":true},\"Vectara\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional, Union\\n\\nfrom langchain.schema import BaseRetriever, Document\\nfrom langchain.vectorstores import Vectara\\nfrom langchain.vectorstores.base import VectorStore\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass VectaraComponent(CustomComponent):\\n display_name: str = \\\"Vectara\\\"\\n description: str = \\\"Implementation of Vector Store using Vectara\\\"\\n documentation = \\\"https://python.langchain.com/docs/integrations/vectorstores/vectara\\\"\\n beta = True\\n # api key should be password = True\\n field_config = {\\n \\\"vectara_customer_id\\\": {\\\"display_name\\\": \\\"Vectara Customer ID\\\"},\\n \\\"vectara_corpus_id\\\": {\\\"display_name\\\": \\\"Vectara Corpus ID\\\"},\\n \\\"vectara_api_key\\\": {\\\"display_name\\\": \\\"Vectara API Key\\\", \\\"password\\\": True},\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"documents\\\": {\\\"display_name\\\": \\\"Documents\\\"},\\n }\\n\\n def build(\\n self,\\n vectara_customer_id: str,\\n vectara_corpus_id: str,\\n vectara_api_key: str,\\n documents: Optional[Document] = None,\\n ) -> Union[VectorStore, BaseRetriever]:\\n # If documents, then we need to create a Vectara instance using .from_documents\\n if documents is not None:\\n return Vectara.from_documents(\\n documents=documents, # type: ignore\\n vectara_customer_id=vectara_customer_id,\\n vectara_corpus_id=vectara_corpus_id,\\n vectara_api_key=vectara_api_key,\\n source=\\\"langflow\\\",\\n )\\n\\n return Vectara(\\n vectara_customer_id=vectara_customer_id,\\n vectara_corpus_id=vectara_corpus_id,\\n vectara_api_key=vectara_api_key,\\n source=\\\"langflow\\\",\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"vectara_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"vectara_api_key\",\"display_name\":\"Vectara API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"vectara_corpus_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"vectara_corpus_id\",\"display_name\":\"Vectara Corpus ID\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"vectara_customer_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"vectara_customer_id\",\"display_name\":\"Vectara Customer ID\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CustomComponent\"},\"description\":\"Implementation of Vector Store using Vectara\",\"base_classes\":[\"VectorStore\",\"BaseRetriever\"],\"display_name\":\"Vectara\",\"documentation\":\"https://python.langchain.com/docs/integrations/vectorstores/vectara\",\"custom_fields\":{\"documents\":null,\"vectara_api_key\":null,\"vectara_corpus_id\":null,\"vectara_customer_id\":null},\"output_types\":[\"Vectara\"],\"field_formatters\":{},\"beta\":true}},\"documentloaders\":{\"AZLyricsLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"web_path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"AZLyricsLoader\"},\"description\":\"Load `AZLyrics` webpages.\",\"base_classes\":[\"Document\"],\"display_name\":\"AZLyricsLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/azlyrics\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"AirbyteJSONLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".json\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"AirbyteJSONLoader\"},\"description\":\"Load local `Airbyte` json files.\",\"base_classes\":[\"Document\"],\"display_name\":\"AirbyteJSONLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"BSHTMLLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".html\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"BSHTMLLoader\"},\"description\":\"Load `HTML` files and parse them with `beautiful soup`.\",\"base_classes\":[\"Document\"],\"display_name\":\"BSHTMLLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/html\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"CSVLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".csv\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CSVLoader\"},\"description\":\"Load a `CSV` file into a list of Documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"CSVLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"CoNLLULoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".csv\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CoNLLULoader\"},\"description\":\"Load `CoNLL-U` files.\",\"base_classes\":[\"Document\"],\"display_name\":\"CoNLLULoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/conll-u\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"CollegeConfidentialLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"web_path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CollegeConfidentialLoader\"},\"description\":\"Load `College Confidential` webpages.\",\"base_classes\":[\"Document\"],\"display_name\":\"CollegeConfidentialLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/college_confidential\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"DirectoryLoader\":{\"template\":{\"glob\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"**/*.txt\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"glob\",\"display_name\":\"glob\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"load_hidden\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"False\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"load_hidden\",\"display_name\":\"Load hidden files\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"max_concurrency\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":10,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_concurrency\",\"display_name\":\"Max concurrency\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"path\",\"display_name\":\"Local directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"recursive\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"True\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"recursive\",\"display_name\":\"Recursive\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"silent_errors\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"False\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"silent_errors\",\"display_name\":\"Silent errors\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"use_multithreading\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"True\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"use_multithreading\",\"display_name\":\"Use multithreading\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"_type\":\"DirectoryLoader\"},\"description\":\"Load from a directory.\",\"base_classes\":[\"Document\"],\"display_name\":\"DirectoryLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/file_directory\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"EverNoteLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".xml\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"EverNoteLoader\"},\"description\":\"Load from `EverNote`.\",\"base_classes\":[\"Document\"],\"display_name\":\"EverNoteLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/evernote\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"FacebookChatLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".json\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"FacebookChatLoader\"},\"description\":\"Load `Facebook Chat` messages directory dump.\",\"base_classes\":[\"Document\"],\"display_name\":\"FacebookChatLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/facebook_chat\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"GitLoader\":{\"template\":{\"branch\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"branch\",\"display_name\":\"Branch\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"clone_url\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"clone_url\",\"display_name\":\"Clone URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"file_filter\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"file_filter\",\"display_name\":\"File extensions (comma-separated)\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"repo_path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"repo_path\",\"display_name\":\"Path to repository\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"GitLoader\"},\"description\":\"Load `Git` repository files.\",\"base_classes\":[\"Document\"],\"display_name\":\"GitLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/git\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"GitbookLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"web_page\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"web_page\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"GitbookLoader\"},\"description\":\"Load `GitBook` data.\",\"base_classes\":[\"Document\"],\"display_name\":\"GitbookLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/gitbook\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"GutenbergLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"web_path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"GutenbergLoader\"},\"description\":\"Load from `Gutenberg.org`.\",\"base_classes\":[\"Document\"],\"display_name\":\"GutenbergLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/gutenberg\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"HNLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"web_path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"HNLoader\"},\"description\":\"Load `Hacker News` data.\",\"base_classes\":[\"Document\"],\"display_name\":\"HNLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/hacker_news\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"IFixitLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"web_path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"IFixitLoader\"},\"description\":\"Load `iFixit` repair guides, device wikis and answers.\",\"base_classes\":[\"Document\"],\"display_name\":\"IFixitLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/ifixit\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"IMSDbLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"web_path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"IMSDbLoader\"},\"description\":\"Load `IMSDb` webpages.\",\"base_classes\":[\"Document\"],\"display_name\":\"IMSDbLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/imsdb\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"NotionDirectoryLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"path\",\"display_name\":\"Local directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"NotionDirectoryLoader\"},\"description\":\"Load `Notion directory` dump.\",\"base_classes\":[\"Document\"],\"display_name\":\"NotionDirectoryLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/notion\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"PyPDFLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".pdf\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"PyPDFLoader\"},\"description\":\"Load PDF using pypdf into list of documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"PyPDFLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/pdf\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"PyPDFDirectoryLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"path\",\"display_name\":\"Local directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"PyPDFDirectoryLoader\"},\"description\":\"Load a directory with `PDF` files using `pypdf` and chunks at character level.\",\"base_classes\":[\"Document\"],\"display_name\":\"PyPDFDirectoryLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/pdf\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"ReadTheDocsLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"path\",\"display_name\":\"Local directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"ReadTheDocsLoader\"},\"description\":\"Load `ReadTheDocs` documentation directory.\",\"base_classes\":[\"Document\"],\"display_name\":\"ReadTheDocsLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/readthedocs_documentation\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"SRTLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".srt\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"SRTLoader\"},\"description\":\"Load `.srt` (subtitle) files.\",\"base_classes\":[\"Document\"],\"display_name\":\"SRTLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/subtitle\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"SlackDirectoryLoader\":{\"template\":{\"zip_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".zip\"],\"file_path\":\"\",\"password\":false,\"name\":\"zip_path\",\"display_name\":\"Path to zip file\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"workspace_url\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"workspace_url\",\"display_name\":\"Workspace URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"SlackDirectoryLoader\"},\"description\":\"Load from a `Slack` directory dump.\",\"base_classes\":[\"Document\"],\"display_name\":\"SlackDirectoryLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/slack\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"TextLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".txt\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"TextLoader\"},\"description\":\"Load text file.\",\"base_classes\":[\"Document\"],\"display_name\":\"TextLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"UnstructuredEmailLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".eml\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"UnstructuredEmailLoader\"},\"description\":\"Load email files using `Unstructured`.\",\"base_classes\":[\"Document\"],\"display_name\":\"UnstructuredEmailLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/email\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"UnstructuredHTMLLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".html\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"UnstructuredHTMLLoader\"},\"description\":\"Load `HTML` files using `Unstructured`.\",\"base_classes\":[\"Document\"],\"display_name\":\"UnstructuredHTMLLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/html\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"UnstructuredMarkdownLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".md\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"UnstructuredMarkdownLoader\"},\"description\":\"Load `Markdown` files using `Unstructured`.\",\"base_classes\":[\"Document\"],\"display_name\":\"UnstructuredMarkdownLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/markdown\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"UnstructuredPowerPointLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".pptx\",\".ppt\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"UnstructuredPowerPointLoader\"},\"description\":\"Load `Microsoft PowerPoint` files using `Unstructured`.\",\"base_classes\":[\"Document\"],\"display_name\":\"UnstructuredPowerPointLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/microsoft_powerpoint\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"UnstructuredWordDocumentLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".docx\",\".doc\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"UnstructuredWordDocumentLoader\"},\"description\":\"Load `Microsoft Word` file using `Unstructured`.\",\"base_classes\":[\"Document\"],\"display_name\":\"UnstructuredWordDocumentLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/microsoft_word\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"WebBaseLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"web_path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"WebBaseLoader\"},\"description\":\"Load HTML pages using `urllib` and parse them with `BeautifulSoup'.\",\"base_classes\":[\"Document\"],\"display_name\":\"WebBaseLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/web_base\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"FileLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[\"json\",\"txt\",\"csv\",\"jsonl\",\"html\",\"htm\",\"conllu\",\"enex\",\"msg\",\"pdf\",\"srt\",\"eml\",\"md\",\"pptx\",\"docx\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"display_name\":\"File Path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langchain.schema import Document\\n\\nfrom langflow import CustomComponent\\nfrom langflow.utils.constants import LOADERS_INFO\\n\\n\\nclass FileLoaderComponent(CustomComponent):\\n display_name: str = \\\"File Loader\\\"\\n description: str = \\\"Generic File Loader\\\"\\n beta = True\\n\\n def build_config(self):\\n loader_options = [\\\"Automatic\\\"] + [loader_info[\\\"name\\\"] for loader_info in LOADERS_INFO]\\n\\n file_types = []\\n suffixes = []\\n\\n for loader_info in LOADERS_INFO:\\n if \\\"allowedTypes\\\" in loader_info:\\n file_types.extend(loader_info[\\\"allowedTypes\\\"])\\n suffixes.extend([f\\\".{ext}\\\" for ext in loader_info[\\\"allowedTypes\\\"]])\\n\\n return {\\n \\\"file_path\\\": {\\n \\\"display_name\\\": \\\"File Path\\\",\\n \\\"required\\\": True,\\n \\\"field_type\\\": \\\"file\\\",\\n \\\"file_types\\\": [\\n \\\"json\\\",\\n \\\"txt\\\",\\n \\\"csv\\\",\\n \\\"jsonl\\\",\\n \\\"html\\\",\\n \\\"htm\\\",\\n \\\"conllu\\\",\\n \\\"enex\\\",\\n \\\"msg\\\",\\n \\\"pdf\\\",\\n \\\"srt\\\",\\n \\\"eml\\\",\\n \\\"md\\\",\\n \\\"pptx\\\",\\n \\\"docx\\\",\\n ],\\n \\\"suffixes\\\": [\\n \\\".json\\\",\\n \\\".txt\\\",\\n \\\".csv\\\",\\n \\\".jsonl\\\",\\n \\\".html\\\",\\n \\\".htm\\\",\\n \\\".conllu\\\",\\n \\\".enex\\\",\\n \\\".msg\\\",\\n \\\".pdf\\\",\\n \\\".srt\\\",\\n \\\".eml\\\",\\n \\\".md\\\",\\n \\\".pptx\\\",\\n \\\".docx\\\",\\n ],\\n # \\\"file_types\\\" : file_types,\\n # \\\"suffixes\\\": suffixes,\\n },\\n \\\"loader\\\": {\\n \\\"display_name\\\": \\\"Loader\\\",\\n \\\"is_list\\\": True,\\n \\\"required\\\": True,\\n \\\"options\\\": loader_options,\\n \\\"value\\\": \\\"Automatic\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(self, file_path: str, loader: str) -> Document:\\n file_type = file_path.split(\\\".\\\")[-1]\\n\\n # Mapeie o nome do loader selecionado para suas informações\\n selected_loader_info = None\\n for loader_info in LOADERS_INFO:\\n if loader_info[\\\"name\\\"] == loader:\\n selected_loader_info = loader_info\\n break\\n\\n if selected_loader_info is None and loader != \\\"Automatic\\\":\\n raise ValueError(f\\\"Loader {loader} not found in the loader info list\\\")\\n\\n if loader == \\\"Automatic\\\":\\n # Determine o loader automaticamente com base na extensão do arquivo\\n default_loader_info = None\\n for info in LOADERS_INFO:\\n if \\\"defaultFor\\\" in info and file_type in info[\\\"defaultFor\\\"]:\\n default_loader_info = info\\n break\\n\\n if default_loader_info is None:\\n raise ValueError(f\\\"No default loader found for file type: {file_type}\\\")\\n\\n selected_loader_info = default_loader_info\\n if isinstance(selected_loader_info, dict):\\n loader_import: str = selected_loader_info[\\\"import\\\"]\\n else:\\n raise ValueError(f\\\"Loader info for {loader} is not a dict\\\\nLoader info:\\\\n{selected_loader_info}\\\")\\n module_name, class_name = loader_import.rsplit(\\\".\\\", 1)\\n\\n try:\\n # Importe o loader dinamicamente\\n loader_module = __import__(module_name, fromlist=[class_name])\\n loader_instance = getattr(loader_module, class_name)\\n except ImportError as e:\\n raise ValueError(f\\\"Loader {loader} could not be imported\\\\nLoader info:\\\\n{selected_loader_info}\\\") from e\\n\\n result = loader_instance(file_path=file_path)\\n return result.load()\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"loader\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"Automatic\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"Automatic\",\"Airbyte JSON (.jsonl)\",\"JSON (.json)\",\"BeautifulSoup4 HTML (.html, .htm)\",\"CSV (.csv)\",\"CoNLL-U (.conllu)\",\"EverNote (.enex)\",\"Facebook Chat (.json)\",\"Outlook Message (.msg)\",\"PyPDF (.pdf)\",\"Subtitle (.str)\",\"Text (.txt)\",\"Unstructured Email (.eml)\",\"Unstructured HTML (.html, .htm)\",\"Unstructured Markdown (.md)\",\"Unstructured PowerPoint (.pptx)\",\"Unstructured Word (.docx)\"],\"name\":\"loader\",\"display_name\":\"Loader\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CustomComponent\"},\"description\":\"Generic File Loader\",\"base_classes\":[\"Document\"],\"display_name\":\"File Loader\",\"documentation\":\"\",\"custom_fields\":{\"file_path\":null,\"loader\":null},\"output_types\":[\"FileLoader\"],\"field_formatters\":{},\"beta\":true},\"UrlLoader\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import List\\n\\nfrom langchain import document_loaders\\nfrom langchain.schema import Document\\nfrom langflow import CustomComponent\\n\\n\\nclass UrlLoaderComponent(CustomComponent):\\n display_name: str = \\\"Url Loader\\\"\\n description: str = \\\"Generic Url Loader Component\\\"\\n\\n def build_config(self):\\n return {\\n \\\"web_path\\\": {\\n \\\"display_name\\\": \\\"Url\\\",\\n \\\"required\\\": True,\\n },\\n \\\"loader\\\": {\\n \\\"display_name\\\": \\\"Loader\\\",\\n \\\"is_list\\\": True,\\n \\\"required\\\": True,\\n \\\"options\\\": [\\n \\\"AZLyricsLoader\\\",\\n \\\"CollegeConfidentialLoader\\\",\\n \\\"GitbookLoader\\\",\\n \\\"HNLoader\\\",\\n \\\"IFixitLoader\\\",\\n \\\"IMSDbLoader\\\",\\n \\\"WebBaseLoader\\\",\\n ],\\n \\\"value\\\": \\\"WebBaseLoader\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(self, web_path: str, loader: str) -> List[Document]:\\n try:\\n loader_instance = getattr(document_loaders, loader)(web_path=web_path)\\n except Exception as e:\\n raise ValueError(f\\\"No loader found for: {web_path}\\\") from e\\n docs = loader_instance.load()\\n avg_length = sum(len(doc.page_content) for doc in docs if hasattr(doc, \\\"page_content\\\")) / len(docs)\\n self.status = f\\\"\\\"\\\"{len(docs)} documents)\\n \\\\nAvg. Document Length (characters): {int(avg_length)}\\n Documents: {docs[:3]}...\\\"\\\"\\\"\\n return docs\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"loader\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"WebBaseLoader\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"AZLyricsLoader\",\"CollegeConfidentialLoader\",\"GitbookLoader\",\"HNLoader\",\"IFixitLoader\",\"IMSDbLoader\",\"WebBaseLoader\"],\"name\":\"loader\",\"display_name\":\"Loader\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"web_path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CustomComponent\"},\"description\":\"Generic Url Loader Component\",\"base_classes\":[\"Document\"],\"display_name\":\"Url Loader\",\"documentation\":\"\",\"custom_fields\":{\"loader\":null,\"web_path\":null},\"output_types\":[\"UrlLoader\"],\"field_formatters\":{},\"beta\":true}},\"textsplitters\":{\"CharacterTextSplitter\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"chunk_overlap\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":200,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chunk_overlap\",\"display_name\":\"Chunk Overlap\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"chunk_size\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1000,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chunk_size\",\"display_name\":\"Chunk Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"separator\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\\\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"separator\",\"display_name\":\"Separator\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CharacterTextSplitter\"},\"description\":\"Splitting text that looks at characters.\",\"base_classes\":[\"Document\"],\"display_name\":\"CharacterTextSplitter\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/character_text_splitter\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"beta\":false},\"LanguageRecursiveTextSplitter\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"The documents to split.\"},\"chunk_overlap\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":200,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chunk_overlap\",\"display_name\":\"Chunk Overlap\",\"advanced\":false,\"dynamic\":false,\"info\":\"The amount of overlap between chunks.\"},\"chunk_size\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":1000,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chunk_size\",\"display_name\":\"Chunk Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"The maximum length of each chunk.\"},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\nfrom langflow import CustomComponent\\nfrom langchain.text_splitter import Language\\nfrom langchain.schema import Document\\n\\n\\nclass LanguageRecursiveTextSplitterComponent(CustomComponent):\\n display_name: str = \\\"Language Recursive Text Splitter\\\"\\n description: str = \\\"Split text into chunks of a specified length based on language.\\\"\\n documentation: str = \\\"https://docs.langflow.org/components/text-splitters#languagerecursivetextsplitter\\\"\\n\\n def build_config(self):\\n options = [x.value for x in Language]\\n return {\\n \\\"documents\\\": {\\n \\\"display_name\\\": \\\"Documents\\\",\\n \\\"info\\\": \\\"The documents to split.\\\",\\n },\\n \\\"separator_type\\\": {\\n \\\"display_name\\\": \\\"Separator Type\\\",\\n \\\"info\\\": \\\"The type of separator to use.\\\",\\n \\\"field_type\\\": \\\"str\\\",\\n \\\"options\\\": options,\\n \\\"value\\\": \\\"Python\\\",\\n },\\n \\\"separators\\\": {\\n \\\"display_name\\\": \\\"Separators\\\",\\n \\\"info\\\": \\\"The characters to split on.\\\",\\n \\\"is_list\\\": True,\\n },\\n \\\"chunk_size\\\": {\\n \\\"display_name\\\": \\\"Chunk Size\\\",\\n \\\"info\\\": \\\"The maximum length of each chunk.\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 1000,\\n },\\n \\\"chunk_overlap\\\": {\\n \\\"display_name\\\": \\\"Chunk Overlap\\\",\\n \\\"info\\\": \\\"The amount of overlap between chunks.\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 200,\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n documents: list[Document],\\n chunk_size: Optional[int] = 1000,\\n chunk_overlap: Optional[int] = 200,\\n separator_type: Optional[str] = \\\"Python\\\",\\n ) -> list[Document]:\\n \\\"\\\"\\\"\\n Split text into chunks of a specified length.\\n\\n Args:\\n separators (list[str]): The characters to split on.\\n chunk_size (int): The maximum length of each chunk.\\n chunk_overlap (int): The amount of overlap between chunks.\\n length_function (function): The function to use to calculate the length of the text.\\n\\n Returns:\\n list[str]: The chunks of text.\\n \\\"\\\"\\\"\\n from langchain.text_splitter import RecursiveCharacterTextSplitter\\n\\n # Make sure chunk_size and chunk_overlap are ints\\n if isinstance(chunk_size, str):\\n chunk_size = int(chunk_size)\\n if isinstance(chunk_overlap, str):\\n chunk_overlap = int(chunk_overlap)\\n\\n splitter = RecursiveCharacterTextSplitter.from_language(\\n language=Language(separator_type),\\n chunk_size=chunk_size,\\n chunk_overlap=chunk_overlap,\\n )\\n\\n docs = splitter.split_documents(documents)\\n return docs\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"separator_type\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"value\":\"Python\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"cpp\",\"go\",\"java\",\"kotlin\",\"js\",\"ts\",\"php\",\"proto\",\"python\",\"rst\",\"ruby\",\"rust\",\"scala\",\"swift\",\"markdown\",\"latex\",\"html\",\"sol\",\"csharp\",\"cobol\"],\"name\":\"separator_type\",\"display_name\":\"Separator Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"The type of separator to use.\"},\"_type\":\"CustomComponent\"},\"description\":\"Split text into chunks of a specified length based on language.\",\"base_classes\":[\"Document\"],\"display_name\":\"Language Recursive Text Splitter\",\"documentation\":\"https://docs.langflow.org/components/text-splitters#languagerecursivetextsplitter\",\"custom_fields\":{\"chunk_overlap\":null,\"chunk_size\":null,\"documents\":null,\"separator_type\":null},\"output_types\":[\"LanguageRecursiveTextSplitter\"],\"field_formatters\":{},\"beta\":true},\"RecursiveCharacterTextSplitter\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"The documents to split.\"},\"chunk_overlap\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":200,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chunk_overlap\",\"display_name\":\"Chunk Overlap\",\"advanced\":false,\"dynamic\":false,\"info\":\"The amount of overlap between chunks.\"},\"chunk_size\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":1000,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chunk_size\",\"display_name\":\"Chunk Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"The maximum length of each chunk.\"},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\nfrom langflow import CustomComponent\\nfrom langchain.schema import Document\\nfrom langflow.utils.util import build_loader_repr_from_documents\\n\\n\\nclass RecursiveCharacterTextSplitterComponent(CustomComponent):\\n display_name: str = \\\"Recursive Character Text Splitter\\\"\\n description: str = \\\"Split text into chunks of a specified length.\\\"\\n documentation: str = \\\"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\\\"\\n\\n def build_config(self):\\n return {\\n \\\"documents\\\": {\\n \\\"display_name\\\": \\\"Documents\\\",\\n \\\"info\\\": \\\"The documents to split.\\\",\\n },\\n \\\"separators\\\": {\\n \\\"display_name\\\": \\\"Separators\\\",\\n \\\"info\\\": 'The characters to split on.\\\\nIf left empty defaults to [\\\"\\\\\\\\n\\\\\\\\n\\\", \\\"\\\\\\\\n\\\", \\\" \\\", \\\"\\\"].',\\n \\\"is_list\\\": True,\\n },\\n \\\"chunk_size\\\": {\\n \\\"display_name\\\": \\\"Chunk Size\\\",\\n \\\"info\\\": \\\"The maximum length of each chunk.\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 1000,\\n },\\n \\\"chunk_overlap\\\": {\\n \\\"display_name\\\": \\\"Chunk Overlap\\\",\\n \\\"info\\\": \\\"The amount of overlap between chunks.\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 200,\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n documents: list[Document],\\n separators: Optional[list[str]] = None,\\n chunk_size: Optional[int] = 1000,\\n chunk_overlap: Optional[int] = 200,\\n ) -> list[Document]:\\n \\\"\\\"\\\"\\n Split text into chunks of a specified length.\\n\\n Args:\\n separators (list[str]): The characters to split on.\\n chunk_size (int): The maximum length of each chunk.\\n chunk_overlap (int): The amount of overlap between chunks.\\n length_function (function): The function to use to calculate the length of the text.\\n\\n Returns:\\n list[str]: The chunks of text.\\n \\\"\\\"\\\"\\n from langchain.text_splitter import RecursiveCharacterTextSplitter\\n\\n if separators == \\\"\\\":\\n separators = None\\n elif separators:\\n # check if the separators list has escaped characters\\n # if there are escaped characters, unescape them\\n separators = [x.encode().decode(\\\"unicode-escape\\\") for x in separators]\\n\\n # Make sure chunk_size and chunk_overlap are ints\\n if isinstance(chunk_size, str):\\n chunk_size = int(chunk_size)\\n if isinstance(chunk_overlap, str):\\n chunk_overlap = int(chunk_overlap)\\n splitter = RecursiveCharacterTextSplitter(\\n separators=separators,\\n chunk_size=chunk_size,\\n chunk_overlap=chunk_overlap,\\n )\\n\\n docs = splitter.split_documents(documents)\\n self.repr_value = build_loader_repr_from_documents(docs)\\n return docs\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"separators\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"separators\",\"display_name\":\"Separators\",\"advanced\":false,\"dynamic\":false,\"info\":\"The characters to split on.\\nIf left empty defaults to [\\\"\\\\n\\\\n\\\", \\\"\\\\n\\\", \\\" \\\", \\\"\\\"].\"},\"_type\":\"CustomComponent\"},\"description\":\"Split text into chunks of a specified length.\",\"base_classes\":[\"Document\"],\"display_name\":\"Recursive Character Text Splitter\",\"documentation\":\"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\",\"custom_fields\":{\"chunk_overlap\":null,\"chunk_size\":null,\"documents\":null,\"separators\":null},\"output_types\":[\"RecursiveCharacterTextSplitter\"],\"field_formatters\":{},\"beta\":true}},\"utilities\":{\"BingSearchAPIWrapper\":{\"template\":{\"bing_search_url\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"bing_search_url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"bing_subscription_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":true,\"name\":\"bing_subscription_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"k\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":10,\"fileTypes\":[],\"password\":false,\"name\":\"k\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"search_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\"},\"_type\":\"BingSearchAPIWrapper\"},\"description\":\"Wrapper for Bing Search API.\",\"base_classes\":[\"BingSearchAPIWrapper\"],\"display_name\":\"BingSearchAPIWrapper\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"GoogleSearchAPIWrapper\":{\"template\":{\"google_api_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":true,\"name\":\"google_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"google_cse_id\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"google_cse_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"k\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":10,\"fileTypes\":[],\"password\":false,\"name\":\"k\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"search_engine\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"search_engine\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"siterestrict\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"siterestrict\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"GoogleSearchAPIWrapper\"},\"description\":\"Wrapper for Google Search API.\",\"base_classes\":[\"GoogleSearchAPIWrapper\"],\"display_name\":\"GoogleSearchAPIWrapper\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"GoogleSerperAPIWrapper\":{\"template\":{\"aiosession\":{\"type\":\"ClientSession\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"aiosession\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"gl\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"us\",\"fileTypes\":[],\"password\":false,\"name\":\"gl\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"hl\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"en\",\"fileTypes\":[],\"password\":false,\"name\":\"hl\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"k\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":10,\"fileTypes\":[],\"password\":false,\"name\":\"k\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"result_key_for_type\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"{\\n \\\"news\\\": \\\"news\\\",\\n \\\"places\\\": \\\"places\\\",\\n \\\"images\\\": \\\"images\\\",\\n \\\"search\\\": \\\"organic\\\"\\n}\",\"fileTypes\":[],\"password\":true,\"name\":\"result_key_for_type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"serper_api_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":true,\"name\":\"serper_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"tbs\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"tbs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"type\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"search\",\"fileTypes\":[],\"password\":false,\"name\":\"type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"GoogleSerperAPIWrapper\"},\"description\":\"Wrapper around the Serper.dev Google Search API.\",\"base_classes\":[\"GoogleSerperAPIWrapper\"],\"display_name\":\"GoogleSerperAPIWrapper\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"SearxSearchWrapper\":{\"template\":{\"aiosession\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"aiosession\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"categories\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"value\":[],\"fileTypes\":[],\"password\":false,\"name\":\"categories\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"engines\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"value\":[],\"fileTypes\":[],\"password\":false,\"name\":\"engines\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"headers\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"{\\\"Authorization\\\": \\\"Bearer \\\"}\",\"fileTypes\":[],\"password\":false,\"name\":\"headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"k\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":10,\"fileTypes\":[],\"password\":false,\"name\":\"k\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"params\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"params\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"query_suffix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"password\":false,\"name\":\"query_suffix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"searx_host\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"password\":false,\"name\":\"searx_host\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"unsecure\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"unsecure\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"SearxSearchWrapper\"},\"description\":\"Wrapper for Searx API.\",\"base_classes\":[\"SearxSearchWrapper\"],\"display_name\":\"SearxSearchWrapper\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"SerpAPIWrapper\":{\"template\":{\"aiosession\":{\"type\":\"ClientSession\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"aiosession\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"params\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"{\\n \\\"engine\\\": \\\"google\\\",\\n \\\"google_domain\\\": \\\"google.com\\\",\\n \\\"gl\\\": \\\"us\\\",\\n \\\"hl\\\": \\\"en\\\"\\n}\",\"fileTypes\":[],\"password\":false,\"name\":\"params\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"search_engine\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"search_engine\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"serpapi_api_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":true,\"name\":\"serpapi_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"SerpAPIWrapper\"},\"description\":\"Wrapper around SerpAPI.\",\"base_classes\":[\"SerpAPIWrapper\"],\"display_name\":\"SerpAPIWrapper\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"WikipediaAPIWrapper\":{\"template\":{\"doc_content_chars_max\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":4000,\"fileTypes\":[],\"password\":false,\"name\":\"doc_content_chars_max\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"lang\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"en\",\"fileTypes\":[],\"password\":false,\"name\":\"lang\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"load_all_available_meta\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"load_all_available_meta\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"top_k_results\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":3,\"fileTypes\":[],\"password\":false,\"name\":\"top_k_results\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"wiki_client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"wiki_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"WikipediaAPIWrapper\"},\"description\":\"Wrapper around WikipediaAPI.\",\"base_classes\":[\"WikipediaAPIWrapper\"],\"display_name\":\"WikipediaAPIWrapper\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"WolframAlphaAPIWrapper\":{\"template\":{\"wolfram_alpha_appid\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"wolfram_alpha_appid\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"wolfram_client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"wolfram_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"WolframAlphaAPIWrapper\"},\"description\":\"Wrapper for Wolfram Alpha.\",\"base_classes\":[\"WolframAlphaAPIWrapper\"],\"display_name\":\"WolframAlphaAPIWrapper\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"GetRequest\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langchain.schema import Document\\nfrom langflow.services.database.models.base import orjson_dumps\\nimport requests\\nfrom typing import Optional\\n\\n\\nclass GetRequest(CustomComponent):\\n display_name: str = \\\"GET Request\\\"\\n description: str = \\\"Make a GET request to the given URL.\\\"\\n output_types: list[str] = [\\\"Document\\\"]\\n documentation: str = \\\"https://docs.langflow.org/components/utilities#get-request\\\"\\n beta: bool = True\\n field_config = {\\n \\\"url\\\": {\\n \\\"display_name\\\": \\\"URL\\\",\\n \\\"info\\\": \\\"The URL to make the request to\\\",\\n \\\"is_list\\\": True,\\n },\\n \\\"headers\\\": {\\n \\\"display_name\\\": \\\"Headers\\\",\\n \\\"info\\\": \\\"The headers to send with the request.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"timeout\\\": {\\n \\\"display_name\\\": \\\"Timeout\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"The timeout to use for the request.\\\",\\n \\\"value\\\": 5,\\n },\\n }\\n\\n def get_document(self, session: requests.Session, url: str, headers: Optional[dict], timeout: int) -> Document:\\n try:\\n response = session.get(url, headers=headers, timeout=int(timeout))\\n try:\\n response_json = response.json()\\n result = orjson_dumps(response_json, indent_2=False)\\n except Exception:\\n result = response.text\\n self.repr_value = result\\n return Document(\\n page_content=result,\\n metadata={\\n \\\"source\\\": url,\\n \\\"headers\\\": headers,\\n \\\"status_code\\\": response.status_code,\\n },\\n )\\n except requests.Timeout:\\n return Document(\\n page_content=\\\"Request Timed Out\\\",\\n metadata={\\\"source\\\": url, \\\"headers\\\": headers, \\\"status_code\\\": 408},\\n )\\n except Exception as exc:\\n return Document(\\n page_content=str(exc),\\n metadata={\\\"source\\\": url, \\\"headers\\\": headers, \\\"status_code\\\": 500},\\n )\\n\\n def build(\\n self,\\n url: str,\\n headers: Optional[dict] = None,\\n timeout: int = 5,\\n ) -> list[Document]:\\n if headers is None:\\n headers = {}\\n urls = url if isinstance(url, list) else [url]\\n with requests.Session() as session:\\n documents = [self.get_document(session, u, headers, timeout) for u in urls]\\n self.repr_value = documents\\n return documents\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"headers\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"{\\\"Authorization\\\": \\\"Bearer \\\"}\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"headers\",\"display_name\":\"Headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"The headers to send with the request.\"},\"timeout\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":5,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"timeout\",\"display_name\":\"Timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"The timeout to use for the request.\"},\"url\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"url\",\"display_name\":\"URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"The URL to make the request to\"},\"_type\":\"CustomComponent\"},\"description\":\"Make a GET request to the given URL.\",\"base_classes\":[\"Document\"],\"display_name\":\"GET Request\",\"documentation\":\"https://docs.langflow.org/components/utilities#get-request\",\"custom_fields\":{\"headers\":null,\"timeout\":null,\"url\":null},\"output_types\":[\"GetRequest\"],\"field_formatters\":{},\"beta\":true},\"JSONDocumentBuilder\":{\"template\":{\"document\":{\"type\":\"Document\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"document\",\"display_name\":\"Document\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"### JSON Document Builder\\n\\n# Build a Document containing a JSON object using a key and another Document page content.\\n\\n# **Params**\\n\\n# - **Key:** The key to use for the JSON object.\\n# - **Document:** The Document page to use for the JSON object.\\n\\n# **Output**\\n\\n# - **Document:** The Document containing the JSON object.\\n\\nfrom langchain.schema import Document\\nfrom langflow import CustomComponent\\nfrom langflow.services.database.models.base import orjson_dumps\\n\\n\\nclass JSONDocumentBuilder(CustomComponent):\\n display_name: str = \\\"JSON Document Builder\\\"\\n description: str = \\\"Build a Document containing a JSON object using a key and another Document page content.\\\"\\n output_types: list[str] = [\\\"Document\\\"]\\n beta = True\\n documentation: str = \\\"https://docs.langflow.org/components/utilities#json-document-builder\\\"\\n\\n field_config = {\\n \\\"key\\\": {\\\"display_name\\\": \\\"Key\\\"},\\n \\\"document\\\": {\\\"display_name\\\": \\\"Document\\\"},\\n }\\n\\n def build(\\n self,\\n key: str,\\n document: Document,\\n ) -> Document:\\n documents = None\\n if isinstance(document, list):\\n documents = [\\n Document(page_content=orjson_dumps({key: doc.page_content}, indent_2=False)) for doc in document\\n ]\\n elif isinstance(document, Document):\\n documents = Document(page_content=orjson_dumps({key: document.page_content}, indent_2=False))\\n else:\\n raise TypeError(f\\\"Expected Document or list of Documents, got {type(document)}\\\")\\n self.repr_value = documents\\n return documents\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"key\",\"display_name\":\"Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CustomComponent\"},\"description\":\"Build a Document containing a JSON object using a key and another Document page content.\",\"base_classes\":[\"Document\"],\"display_name\":\"JSON Document Builder\",\"documentation\":\"https://docs.langflow.org/components/utilities#json-document-builder\",\"custom_fields\":{\"document\":null,\"key\":null},\"output_types\":[\"JSONDocumentBuilder\"],\"field_formatters\":{},\"beta\":true},\"UpdateRequest\":{\"template\":{\"document\":{\"type\":\"Document\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"document\",\"display_name\":\"Document\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import List, Optional\\nimport requests\\nfrom langflow import CustomComponent\\nfrom langchain.schema import Document\\nfrom langflow.services.database.models.base import orjson_dumps\\n\\n\\nclass UpdateRequest(CustomComponent):\\n display_name: str = \\\"Update Request\\\"\\n description: str = \\\"Make a PATCH request to the given URL.\\\"\\n output_types: list[str] = [\\\"Document\\\"]\\n documentation: str = \\\"https://docs.langflow.org/components/utilities#update-request\\\"\\n beta: bool = True\\n field_config = {\\n \\\"url\\\": {\\\"display_name\\\": \\\"URL\\\", \\\"info\\\": \\\"The URL to make the request to.\\\"},\\n \\\"headers\\\": {\\n \\\"display_name\\\": \\\"Headers\\\",\\n \\\"field_type\\\": \\\"NestedDict\\\",\\n \\\"info\\\": \\\"The headers to send with the request.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"document\\\": {\\\"display_name\\\": \\\"Document\\\"},\\n \\\"method\\\": {\\n \\\"display_name\\\": \\\"Method\\\",\\n \\\"field_type\\\": \\\"str\\\",\\n \\\"info\\\": \\\"The HTTP method to use.\\\",\\n \\\"options\\\": [\\\"PATCH\\\", \\\"PUT\\\"],\\n \\\"value\\\": \\\"PATCH\\\",\\n },\\n }\\n\\n def update_document(\\n self,\\n session: requests.Session,\\n document: Document,\\n url: str,\\n headers: Optional[dict] = None,\\n method: str = \\\"PATCH\\\",\\n ) -> Document:\\n try:\\n if method == \\\"PATCH\\\":\\n response = session.patch(url, headers=headers, data=document.page_content)\\n elif method == \\\"PUT\\\":\\n response = session.put(url, headers=headers, data=document.page_content)\\n else:\\n raise ValueError(f\\\"Unsupported method: {method}\\\")\\n try:\\n response_json = response.json()\\n result = orjson_dumps(response_json, indent_2=False)\\n except Exception:\\n result = response.text\\n self.repr_value = result\\n return Document(\\n page_content=result,\\n metadata={\\n \\\"source\\\": url,\\n \\\"headers\\\": headers,\\n \\\"status_code\\\": response.status_code,\\n },\\n )\\n except Exception as exc:\\n return Document(\\n page_content=str(exc),\\n metadata={\\\"source\\\": url, \\\"headers\\\": headers, \\\"status_code\\\": 500},\\n )\\n\\n def build(\\n self,\\n method: str,\\n document: Document,\\n url: str,\\n headers: Optional[dict] = None,\\n ) -> List[Document]:\\n if headers is None:\\n headers = {}\\n\\n if not isinstance(document, list) and isinstance(document, Document):\\n documents: list[Document] = [document]\\n elif isinstance(document, list) and all(isinstance(doc, Document) for doc in document):\\n documents = document\\n else:\\n raise ValueError(\\\"document must be a Document or a list of Documents\\\")\\n\\n with requests.Session() as session:\\n documents = [self.update_document(session, doc, url, headers, method) for doc in documents]\\n self.repr_value = documents\\n return documents\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"headers\":{\"type\":\"NestedDict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"{\\\"Authorization\\\": \\\"Bearer \\\"}\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"headers\",\"display_name\":\"Headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"The headers to send with the request.\"},\"method\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"PATCH\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"PATCH\",\"PUT\"],\"name\":\"method\",\"display_name\":\"Method\",\"advanced\":false,\"dynamic\":false,\"info\":\"The HTTP method to use.\"},\"url\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"url\",\"display_name\":\"URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"The URL to make the request to.\"},\"_type\":\"CustomComponent\"},\"description\":\"Make a PATCH request to the given URL.\",\"base_classes\":[\"Document\"],\"display_name\":\"Update Request\",\"documentation\":\"https://docs.langflow.org/components/utilities#update-request\",\"custom_fields\":{\"document\":null,\"headers\":null,\"method\":null,\"url\":null},\"output_types\":[\"UpdateRequest\"],\"field_formatters\":{},\"beta\":true},\"PostRequest\":{\"template\":{\"document\":{\"type\":\"Document\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"document\",\"display_name\":\"Document\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langchain.schema import Document\\nfrom langflow.services.database.models.base import orjson_dumps\\nimport requests\\nfrom typing import Optional\\n\\n\\nclass PostRequest(CustomComponent):\\n display_name: str = \\\"POST Request\\\"\\n description: str = \\\"Make a POST request to the given URL.\\\"\\n output_types: list[str] = [\\\"Document\\\"]\\n documentation: str = \\\"https://docs.langflow.org/components/utilities#post-request\\\"\\n beta: bool = True\\n field_config = {\\n \\\"url\\\": {\\\"display_name\\\": \\\"URL\\\", \\\"info\\\": \\\"The URL to make the request to.\\\"},\\n \\\"headers\\\": {\\n \\\"display_name\\\": \\\"Headers\\\",\\n \\\"info\\\": \\\"The headers to send with the request.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"document\\\": {\\\"display_name\\\": \\\"Document\\\"},\\n }\\n\\n def post_document(\\n self,\\n session: requests.Session,\\n document: Document,\\n url: str,\\n headers: Optional[dict] = None,\\n ) -> Document:\\n try:\\n response = session.post(url, headers=headers, data=document.page_content)\\n try:\\n response_json = response.json()\\n result = orjson_dumps(response_json, indent_2=False)\\n except Exception:\\n result = response.text\\n self.repr_value = result\\n return Document(\\n page_content=result,\\n metadata={\\n \\\"source\\\": url,\\n \\\"headers\\\": headers,\\n \\\"status_code\\\": response,\\n },\\n )\\n except Exception as exc:\\n return Document(\\n page_content=str(exc),\\n metadata={\\n \\\"source\\\": url,\\n \\\"headers\\\": headers,\\n \\\"status_code\\\": 500,\\n },\\n )\\n\\n def build(\\n self,\\n document: Document,\\n url: str,\\n headers: Optional[dict] = None,\\n ) -> list[Document]:\\n if headers is None:\\n headers = {}\\n\\n if not isinstance(document, list) and isinstance(document, Document):\\n documents: list[Document] = [document]\\n elif isinstance(document, list) and all(isinstance(doc, Document) for doc in document):\\n documents = document\\n else:\\n raise ValueError(\\\"document must be a Document or a list of Documents\\\")\\n\\n with requests.Session() as session:\\n documents = [self.post_document(session, doc, url, headers) for doc in documents]\\n self.repr_value = documents\\n return documents\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"headers\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"{\\\"Authorization\\\": \\\"Bearer \\\"}\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"headers\",\"display_name\":\"Headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"The headers to send with the request.\"},\"url\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"url\",\"display_name\":\"URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"The URL to make the request to.\"},\"_type\":\"CustomComponent\"},\"description\":\"Make a POST request to the given URL.\",\"base_classes\":[\"Document\"],\"display_name\":\"POST Request\",\"documentation\":\"https://docs.langflow.org/components/utilities#post-request\",\"custom_fields\":{\"document\":null,\"headers\":null,\"url\":null},\"output_types\":[\"PostRequest\"],\"field_formatters\":{},\"beta\":true}},\"output_parsers\":{\"ResponseSchema\":{\"template\":{\"description\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"fileTypes\":[],\"password\":false,\"name\":\"description\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"type\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"string\",\"fileTypes\":[],\"password\":false,\"name\":\"type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"ResponseSchema\"},\"description\":\"A schema for a response from a structured output parser.\",\"base_classes\":[\"ResponseSchema\"],\"display_name\":\"ResponseSchema\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/output_parsers/structured\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"StructuredOutputParser\":{\"template\":{\"response_schemas\":{\"type\":\"ResponseSchema\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"response_schemas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"StructuredOutputParser\"},\"description\":\"\",\"base_classes\":[\"BaseLLMOutputParser\",\"BaseOutputParser\",\"StructuredOutputParser\"],\"display_name\":\"StructuredOutputParser\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/output_parsers/structured\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false}},\"retrievers\":{\"MultiQueryRetriever\":{\"template\":{\"llm\":{\"type\":\"BaseLLM\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"prompt\":{\"type\":\"PromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{\"input_variables\":[\"question\"],\"input_types\":{},\"output_parser\":null,\"partial_variables\":{},\"template\":\"You are an AI language model assistant. Your task is \\n to generate 3 different versions of the given user \\n question to retrieve relevant documents from a vector database. \\n By generating multiple perspectives on the user question, \\n your goal is to help the user overcome some of the limitations \\n of distance-based similarity search. Provide these alternative \\n questions separated by newlines. Original question: {question}\",\"template_format\":\"f-string\",\"validate_template\":false},\"fileTypes\":[],\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"retriever\":{\"type\":\"BaseRetriever\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"include_original\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"include_original\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"parser_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"lines\",\"fileTypes\":[],\"password\":false,\"name\":\"parser_key\",\"display_name\":\"Parser Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"MultiQueryRetriever\"},\"description\":\"Initialize from llm using default template.\",\"base_classes\":[\"MultiQueryRetriever\",\"BaseRetriever\"],\"display_name\":\"MultiQueryRetriever\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/retrievers/how_to/MultiQueryRetriever\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"beta\":false},\"AmazonKendra\":{\"template\":{\"attribute_filter\":{\"type\":\"code\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"attribute_filter\",\"display_name\":\"Attribute Filter\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\nfrom langflow import CustomComponent\\nfrom langchain.retrievers import AmazonKendraRetriever\\nfrom langchain.schema import BaseRetriever\\n\\n\\nclass AmazonKendraRetrieverComponent(CustomComponent):\\n display_name: str = \\\"Amazon Kendra Retriever\\\"\\n description: str = \\\"Retriever that uses the Amazon Kendra API.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"index_id\\\": {\\\"display_name\\\": \\\"Index ID\\\"},\\n \\\"region_name\\\": {\\\"display_name\\\": \\\"Region Name\\\"},\\n \\\"credentials_profile_name\\\": {\\\"display_name\\\": \\\"Credentials Profile Name\\\"},\\n \\\"attribute_filter\\\": {\\n \\\"display_name\\\": \\\"Attribute Filter\\\",\\n \\\"field_type\\\": \\\"code\\\",\\n },\\n \\\"top_k\\\": {\\\"display_name\\\": \\\"Top K\\\", \\\"field_type\\\": \\\"int\\\"},\\n \\\"user_context\\\": {\\n \\\"display_name\\\": \\\"User Context\\\",\\n \\\"field_type\\\": \\\"code\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n index_id: str,\\n top_k: int = 3,\\n region_name: Optional[str] = None,\\n credentials_profile_name: Optional[str] = None,\\n attribute_filter: Optional[dict] = None,\\n user_context: Optional[dict] = None,\\n ) -> BaseRetriever:\\n try:\\n output = AmazonKendraRetriever(\\n index_id=index_id,\\n top_k=top_k,\\n region_name=region_name,\\n credentials_profile_name=credentials_profile_name,\\n attribute_filter=attribute_filter,\\n user_context=user_context,\\n ) # type: ignore\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to AmazonKendra API.\\\") from e\\n return output\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"credentials_profile_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"credentials_profile_name\",\"display_name\":\"Credentials Profile Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"index_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"index_id\",\"display_name\":\"Index ID\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"region_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"region_name\",\"display_name\":\"Region Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"top_k\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":3,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_k\",\"display_name\":\"Top K\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"user_context\":{\"type\":\"code\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"user_context\",\"display_name\":\"User Context\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CustomComponent\"},\"description\":\"Retriever that uses the Amazon Kendra API.\",\"base_classes\":[\"BaseRetriever\"],\"display_name\":\"Amazon Kendra Retriever\",\"documentation\":\"\",\"custom_fields\":{\"attribute_filter\":null,\"credentials_profile_name\":null,\"index_id\":null,\"region_name\":null,\"top_k\":null,\"user_context\":null},\"output_types\":[\"AmazonKendra\"],\"field_formatters\":{},\"beta\":true},\"MetalRetriever\":{\"template\":{\"api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_key\",\"display_name\":\"API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"client_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"client_id\",\"display_name\":\"Client ID\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\nfrom langflow import CustomComponent\\nfrom langchain.retrievers import MetalRetriever\\nfrom langchain.schema import BaseRetriever\\nfrom metal_sdk.metal import Metal # type: ignore\\n\\n\\nclass MetalRetrieverComponent(CustomComponent):\\n display_name: str = \\\"Metal Retriever\\\"\\n description: str = \\\"Retriever that uses the Metal API.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"api_key\\\": {\\\"display_name\\\": \\\"API Key\\\", \\\"password\\\": True},\\n \\\"client_id\\\": {\\\"display_name\\\": \\\"Client ID\\\", \\\"password\\\": True},\\n \\\"index_id\\\": {\\\"display_name\\\": \\\"Index ID\\\"},\\n \\\"params\\\": {\\\"display_name\\\": \\\"Parameters\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(self, api_key: str, client_id: str, index_id: str, params: Optional[dict] = None) -> BaseRetriever:\\n try:\\n metal = Metal(api_key=api_key, client_id=client_id, index_id=index_id)\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to Metal API.\\\") from e\\n return MetalRetriever(client=metal, params=params or {})\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"index_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"index_id\",\"display_name\":\"Index ID\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"params\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"params\",\"display_name\":\"Parameters\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"_type\":\"CustomComponent\"},\"description\":\"Retriever that uses the Metal API.\",\"base_classes\":[\"BaseRetriever\"],\"display_name\":\"Metal Retriever\",\"documentation\":\"\",\"custom_fields\":{\"api_key\":null,\"client_id\":null,\"index_id\":null,\"params\":null},\"output_types\":[\"MetalRetriever\"],\"field_formatters\":{},\"beta\":true}},\"custom_components\":{\"CustomComponent\":{\"template\":{\"param\":{\"type\":\"Data\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"param\",\"display_name\":\"Parameter\",\"advanced\":false,\"dynamic\":false,\"info\":\"\"},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\n\\nclass Component(CustomComponent):\\n documentation: str = \\\"http://docs.langflow.org/components/custom\\\"\\n\\n def build_config(self):\\n return {\\\"param\\\": {\\\"display_name\\\": \\\"Parameter\\\"}}\\n\\n def build(self, param: Data) -> Data:\\n return param\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\"},\"_type\":\"CustomComponent\"},\"base_classes\":[\"Data\"],\"display_name\":\"CustomComponent\",\"documentation\":\"http://docs.langflow.org/components/custom\",\"custom_fields\":{\"param\":null},\"output_types\":[\"CustomComponent\"],\"field_formatters\":{},\"beta\":true}}}" - }, - "headersSize": -1, - "bodySize": -1, - "redirectURL": "" - }, - "cache": {}, - "timings": { "send": -1, "wait": -1, "receive": 0.901 } - }, - { - "startedDateTime": "2023-12-11T18:54:58.423Z", - "time": 0.527, - "request": { - "method": "GET", - "url": "http://localhost:3000/api/v1/auto_login", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Accept", "value": "application/json, text/plain, */*" }, - { "name": "Accept-Encoding", "value": "gzip, deflate, br" }, - { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, - { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkMjUzYmZiYS02MzY4LTQ0ZGMtODVmNy0wZDZkYTllNDU5NjgiLCJleHAiOjE3MzM4NTY4OTh9.5MFFb0JCck3ITSKXbxhwO9yAscnXcwXNTV70ZYBRB20" }, - { "name": "Connection", "value": "keep-alive" }, - { "name": "Cookie", "value": "access_token_lf=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkMjUzYmZiYS02MzY4LTQ0ZGMtODVmNy0wZDZkYTllNDU5NjgiLCJleHAiOjE3MzM4NTY4OTh9.5MFFb0JCck3ITSKXbxhwO9yAscnXcwXNTV70ZYBRB20; refresh_tkn_lflw=auto" }, - { "name": "Host", "value": "localhost:3000" }, - { "name": "Referer", "value": "http://localhost:3000/flows" }, - { "name": "Sec-Fetch-Dest", "value": "empty" }, - { "name": "Sec-Fetch-Mode", "value": "cors" }, - { "name": "Sec-Fetch-Site", "value": "same-origin" }, - { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36" }, - { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"119\", \"Not?A_Brand\";v=\"24\"" }, - { "name": "sec-ch-ua-mobile", "value": "?0" }, - { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } - ], - "queryString": [], - "headersSize": -1, - "bodySize": -1 - }, - "response": { - "status": 200, - "statusText": "OK", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Access-Control-Allow-Origin", "value": "*" }, - { "name": "connection", "value": "close" }, - { "name": "content-length", "value": "227" }, - { "name": "content-type", "value": "application/json" }, - { "name": "date", "value": "Mon, 11 Dec 2023 18:54:58 GMT" }, - { "name": "server", "value": "uvicorn" } - ], - "content": { - "size": -1, - "mimeType": "application/json", - "text": "{\"access_token\":\"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkMjUzYmZiYS02MzY4LTQ0ZGMtODVmNy0wZDZkYTllNDU5NjgiLCJleHAiOjE3MzM4NTY4OTh9.5MFFb0JCck3ITSKXbxhwO9yAscnXcwXNTV70ZYBRB20\",\"refresh_token\":null,\"token_type\":\"bearer\"}" - }, - "headersSize": -1, - "bodySize": -1, - "redirectURL": "" - }, - "cache": {}, - "timings": { "send": -1, "wait": -1, "receive": 0.527 } - }, - { - "startedDateTime": "2023-12-11T18:55:08.881Z", - "time": 0.635, - "request": { - "method": "GET", - "url": "http://localhost:3000/api/v1/build/2920dde2-5c24-4fe0-9c06-ef86b5a16a99/status", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Accept", "value": "application/json, text/plain, */*" }, - { "name": "Accept-Encoding", "value": "gzip, deflate, br" }, - { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, - { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkMjUzYmZiYS02MzY4LTQ0ZGMtODVmNy0wZDZkYTllNDU5NjgiLCJleHAiOjE3MzM4NTY4OTh9.5MFFb0JCck3ITSKXbxhwO9yAscnXcwXNTV70ZYBRB20" }, - { "name": "Connection", "value": "keep-alive" }, - { "name": "Cookie", "value": "access_token_lf=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkMjUzYmZiYS02MzY4LTQ0ZGMtODVmNy0wZDZkYTllNDU5NjgiLCJleHAiOjE3MzM4NTY4OTh9.5MFFb0JCck3ITSKXbxhwO9yAscnXcwXNTV70ZYBRB20; refresh_tkn_lflw=auto" }, - { "name": "Host", "value": "localhost:3000" }, - { "name": "Referer", "value": "http://localhost:3000/flow/2920dde2-5c24-4fe0-9c06-ef86b5a16a99" }, - { "name": "Sec-Fetch-Dest", "value": "empty" }, - { "name": "Sec-Fetch-Mode", "value": "cors" }, - { "name": "Sec-Fetch-Site", "value": "same-origin" }, - { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36" }, - { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"119\", \"Not?A_Brand\";v=\"24\"" }, - { "name": "sec-ch-ua-mobile", "value": "?0" }, - { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } - ], - "queryString": [], - "headersSize": -1, - "bodySize": -1 - }, - "response": { - "status": 200, - "statusText": "OK", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Access-Control-Allow-Origin", "value": "*" }, - { "name": "connection", "value": "close" }, - { "name": "content-length", "value": "15" }, - { "name": "content-type", "value": "application/json" }, - { "name": "date", "value": "Mon, 11 Dec 2023 18:55:08 GMT" }, - { "name": "server", "value": "uvicorn" } - ], - "content": { - "size": -1, - "mimeType": "application/json", - "text": "{\"built\":false}" - }, - "headersSize": -1, - "bodySize": -1, - "redirectURL": "" - }, - "cache": {}, - "timings": { "send": -1, "wait": -1, "receive": 0.635 } - }, - { - "startedDateTime": "2023-12-11T18:55:08.881Z", - "time": 1.309, - "request": { - "method": "GET", - "url": "http://localhost:3000/api/v1/flows/", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Accept", "value": "application/json, text/plain, */*" }, - { "name": "Accept-Encoding", "value": "gzip, deflate, br" }, - { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, - { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkMjUzYmZiYS02MzY4LTQ0ZGMtODVmNy0wZDZkYTllNDU5NjgiLCJleHAiOjE3MzM4NTY4OTh9.5MFFb0JCck3ITSKXbxhwO9yAscnXcwXNTV70ZYBRB20" }, - { "name": "Connection", "value": "keep-alive" }, - { "name": "Cookie", "value": "access_token_lf=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkMjUzYmZiYS02MzY4LTQ0ZGMtODVmNy0wZDZkYTllNDU5NjgiLCJleHAiOjE3MzM4NTY4OTh9.5MFFb0JCck3ITSKXbxhwO9yAscnXcwXNTV70ZYBRB20; refresh_tkn_lflw=auto" }, - { "name": "Host", "value": "localhost:3000" }, - { "name": "Referer", "value": "http://localhost:3000/flow/2920dde2-5c24-4fe0-9c06-ef86b5a16a99" }, - { "name": "Sec-Fetch-Dest", "value": "empty" }, - { "name": "Sec-Fetch-Mode", "value": "cors" }, - { "name": "Sec-Fetch-Site", "value": "same-origin" }, - { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36" }, - { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"119\", \"Not?A_Brand\";v=\"24\"" }, - { "name": "sec-ch-ua-mobile", "value": "?0" }, - { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } - ], - "queryString": [], - "headersSize": -1, - "bodySize": -1 - }, - "response": { - "status": 200, - "statusText": "OK", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Access-Control-Allow-Origin", "value": "*" }, - { "name": "connection", "value": "close" }, - { "name": "content-length", "value": "375696" }, - { "name": "content-type", "value": "application/json" }, - { "name": "date", "value": "Mon, 11 Dec 2023 18:55:08 GMT" }, - { "name": "server", "value": "uvicorn" } - ], - "content": { - "size": -1, - "mimeType": "application/json", - "text": "[{\"name\":\"Awesome Euclid\",\"description\":\"Language Models, Mapped and Mastered.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":374,\"id\":\"PromptTemplate-m2yFu\",\"type\":\"genericNode\",\"position\":{\"x\":462.6456058272081,\"y\":1033.9314297130313},\"data\":{\"type\":\"PromptTemplate\",\"node\":{\"template\":{\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false},\"input_types\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_types\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"input_variables\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":true,\"value\":[\"transcription\"]},\"partial_variables\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"template\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"prompt\",\"list\":false,\"value\":\"create an image prompt based on the song's lyrics to be used as the album cover of this song:\\n\\nlyrics:\\n{transcription}\"},\"template_format\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"f-string\",\"password\":false,\"name\":\"template_format\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":false},\"validate_template\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"validate_template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"PromptTemplate\",\"transcription\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"transcription\",\"display_name\":\"transcription\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"A prompt template for a language model.\",\"base_classes\":[\"PromptTemplate\",\"StringPromptTemplate\",\"BasePromptTemplate\"],\"name\":\"\",\"display_name\":\"PromptTemplate\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/\",\"custom_fields\":{\"\":[\"transcription\"],\"template\":[\"transcription\",\"question\"]},\"output_types\":[],\"full_path\":null,\"field_formatters\":{},\"beta\":false,\"error\":null},\"id\":\"PromptTemplate-m2yFu\"},\"selected\":false,\"positionAbsolute\":{\"x\":462.6456058272081,\"y\":1033.9314297130313},\"dragging\":false},{\"width\":384,\"height\":626,\"id\":\"ChatOpenAI-urapv\",\"type\":\"genericNode\",\"position\":{\"x\":189.94856095084924,\"y\":-85.06556385338186},\"data\":{\"type\":\"ChatOpenAI\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"list\":true},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"default_query\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"http_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"http_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":2,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false,\"value\":\"\"},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"gpt-4-1106-preview\",\"password\":false,\"options\":[\"gpt-4-1106-preview\",\"gpt-4\",\"gpt-4-32k\",\"gpt-3.5-turbo\",\"gpt-3.5-turbo-16k\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.7,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatOpenAI\"},\"description\":\"`OpenAI` Chat large language models API.\",\"base_classes\":[\"ChatOpenAI\",\"BaseChatModel\",\"BaseLanguageModel\",\"BaseLLM\"],\"display_name\":\"ChatOpenAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"ChatOpenAI-urapv\"},\"selected\":false,\"positionAbsolute\":{\"x\":189.94856095084924,\"y\":-85.06556385338186},\"dragging\":false},{\"width\":384,\"height\":806,\"id\":\"CustomComponent-IEIUl\",\"type\":\"genericNode\",\"position\":{\"x\":2364.865919667005,\"y\":88.43094097025096},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nfrom platformdirs import user_cache_dir\\nimport base64\\nfrom PIL import Image\\nfrom io import BytesIO\\nfrom openai import OpenAI\\nfrom pathlib import Path\\n\\nclass Component(CustomComponent):\\n display_name: str = \\\"Image generator\\\"\\n description: str = \\\"generate images using Dall-E\\\"\\n documentation: str = \\\"http://docs.langflow.org/components/custom\\\"\\n\\n def build_config(self):\\n return {\\\"prompt\\\": {\\\"display_name\\\": \\\"Prompt\\\",\\\"input_types\\\":[\\\"str\\\"]},\\n \\\"api_key\\\":{\\\"display_name\\\":\\\"OpenAI API key\\\",\\\"password\\\":True},\\n \\\"model\\\":{\\\"display_name\\\":\\\"Model name\\\",\\\"advanced\\\":True,\\\"options\\\":[\\\"dall-e-2\\\",\\\"dall-e-3\\\"], \\\"value\\\":\\\"dall-e-3\\\"},\\n \\\"file_name\\\":{\\\"display_name\\\":\\\"File Name\\\"},\\n \\\"output_format\\\":{\\\"display_name\\\":\\\"Output format\\\",\\\"options\\\":[\\\"jpeg\\\",\\\"png\\\"],\\\"value\\\":\\\"jpeg\\\"},\\n \\\"width\\\":{\\\"display_name\\\":\\\"Width\\\" ,\\\"value\\\":1024},\\n \\\"height\\\":{\\\"display_name\\\":\\\"Height\\\", \\\"value\\\":1024}\\n }\\n\\n def build(self, prompt:str,api_key:str,model:str,file_name:str,output_format:str,width:int,height:int):\\n client = OpenAI(api_key=api_key)\\n cache_dir = Path(user_cache_dir(\\\"langflow\\\"))\\n images_dir = cache_dir / \\\"images\\\"\\n images_dir.mkdir(parents=True, exist_ok=True)\\n image_path = images_dir / f\\\"{file_name}.{output_format}\\\"\\n response = client.images.generate(\\n model=model,\\n prompt=prompt,\\n size=f\\\"{height}x{width}\\\",\\n response_format=\\\"b64_json\\\",\\n n=1,\\n )\\n # Decode base64-encoded image string\\n binary_data = base64.b64decode(response.data[0].b64_json)\\n # Create PIL Image object from binary image data\\n image_pil = Image.open(BytesIO(binary_data))\\n image_pil.save(image_path, format=output_format.upper())\\n return \\\"\\\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"api_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"api_key\",\"display_name\":\"OpenAI API key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"file_name\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"file_name\",\"display_name\":\"File Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"album\"},\"height\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1024,\"password\":false,\"name\":\"height\",\"display_name\":\"Height\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"model\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"dall-e-3\",\"password\":false,\"options\":[\"dall-e-2\",\"dall-e-3\"],\"name\":\"model\",\"display_name\":\"Model name\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"output_format\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"jpeg\",\"password\":false,\"options\":[\"jpeg\",\"png\"],\"name\":\"output_format\",\"display_name\":\"Output format\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt\",\"advanced\":false,\"input_types\":[\"str\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"width\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1024,\"password\":false,\"name\":\"width\",\"display_name\":\"Width\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false}},\"description\":\"generate images using Dall-E\",\"base_classes\":[\"Data\"],\"display_name\":\"Image generator\",\"custom_fields\":{\"api_key\":null,\"file_name\":null,\"height\":null,\"model\":null,\"output_format\":null,\"prompt\":null,\"width\":null},\"output_types\":[\"Data\"],\"documentation\":\"http://docs.langflow.org/components/custom\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-IEIUl\"},\"selected\":false,\"positionAbsolute\":{\"x\":2364.865919667005,\"y\":88.43094097025096}},{\"width\":384,\"height\":338,\"id\":\"LLMChain-KlJb3\",\"type\":\"genericNode\",\"position\":{\"x\":1242.9851164540805,\"y\":-139.74634696823108},\"data\":{\"type\":\"LLMChain\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Callable, Optional, Union\\n\\nfrom langchain.chains import LLMChain\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import (\\n BaseLanguageModel,\\n BaseMemory,\\n BasePromptTemplate,\\n Chain,\\n)\\n\\n\\nclass LLMChainComponent(CustomComponent):\\n display_name = \\\"LLMChain\\\"\\n description = \\\"Chain to run queries against LLMs\\\"\\n\\n def build_config(self):\\n return {\\n \\\"prompt\\\": {\\\"display_name\\\": \\\"Prompt\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n prompt: BasePromptTemplate,\\n llm: BaseLanguageModel,\\n memory: Optional[BaseMemory] = None,\\n ) -> Union[Chain, Callable]:\\n return LLMChain(prompt=prompt, llm=llm, memory=memory)\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"display_name\":\"code\"},\"_type\":\"CustomComponent\",\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false}},\"description\":\"Chain to run queries against LLMs\",\"base_classes\":[\"Chain\",\"Callable\"],\"display_name\":\"LLMChain\",\"custom_fields\":{\"llm\":null,\"memory\":null,\"prompt\":null},\"output_types\":[\"LLMChain\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"LLMChain-KlJb3\"},\"selected\":false,\"positionAbsolute\":{\"x\":1242.9851164540805,\"y\":-139.74634696823108},\"dragging\":false},{\"width\":384,\"height\":328,\"id\":\"CustomComponent-bCuc0\",\"type\":\"genericNode\",\"position\":{\"x\":1747.8107777342625,\"y\":319.13729017446667},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\n\\nclass Component(CustomComponent):\\n display_name: str = \\\"Custom Component\\\"\\n description: str = \\\"Create any custom component you want!\\\"\\n documentation: str = \\\"http://docs.langflow.org/components/custom\\\"\\n\\n def build_config(self):\\n return {\\\"param\\\": {\\\"display_name\\\": \\\"Parameter\\\"}}\\n\\n def build(self, param: Chain) -> str:\\n result = param.run({})\\n self.status = result\\n return result\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"param\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"param\",\"display_name\":\"Parameter\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Chain\",\"list\":false}},\"description\":\"Create any custom component you want!\",\"base_classes\":[\"str\"],\"display_name\":\"Custom Component\",\"custom_fields\":{\"param\":null},\"output_types\":[\"str\"],\"documentation\":\"http://docs.langflow.org/components/custom\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-bCuc0\"},\"selected\":false,\"positionAbsolute\":{\"x\":1747.8107777342625,\"y\":319.13729017446667}}],\"edges\":[{\"source\":\"LLMChain-KlJb3\",\"target\":\"CustomComponent-bCuc0\",\"sourceHandle\":\"{œbaseClassesœ:[œChainœ,œCallableœ],œdataTypeœ:œLLMChainœ,œidœ:œLLMChain-KlJb3œ}\",\"targetHandle\":\"{œfieldNameœ:œparamœ,œidœ:œCustomComponent-bCuc0œ,œinputTypesœ:null,œtypeœ:œChainœ}\",\"id\":\"reactflow__edge-LLMChain-KlJb3{œbaseClassesœ:[œChainœ,œCallableœ],œdataTypeœ:œLLMChainœ,œidœ:œLLMChain-KlJb3œ}-CustomComponent-bCuc0{œfieldNameœ:œparamœ,œidœ:œCustomComponent-bCuc0œ,œinputTypesœ:null,œtypeœ:œChainœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"param\",\"id\":\"CustomComponent-bCuc0\",\"inputTypes\":null,\"type\":\"Chain\"},\"sourceHandle\":{\"baseClasses\":[\"Chain\",\"Callable\"],\"dataType\":\"LLMChain\",\"id\":\"LLMChain-KlJb3\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"selected\":false},{\"source\":\"ChatOpenAI-urapv\",\"target\":\"LLMChain-KlJb3\",\"sourceHandle\":\"{œbaseClassesœ:[œChatOpenAIœ,œBaseChatModelœ,œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-urapvœ}\",\"targetHandle\":\"{œfieldNameœ:œllmœ,œidœ:œLLMChain-KlJb3œ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"id\":\"reactflow__edge-ChatOpenAI-urapv{œbaseClassesœ:[œChatOpenAIœ,œBaseChatModelœ,œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-urapvœ}-LLMChain-KlJb3{œfieldNameœ:œllmœ,œidœ:œLLMChain-KlJb3œ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"llm\",\"id\":\"LLMChain-KlJb3\",\"inputTypes\":null,\"type\":\"BaseLanguageModel\"},\"sourceHandle\":{\"baseClasses\":[\"ChatOpenAI\",\"BaseChatModel\",\"BaseLanguageModel\",\"BaseLLM\"],\"dataType\":\"ChatOpenAI\",\"id\":\"ChatOpenAI-urapv\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"selected\":false},{\"source\":\"PromptTemplate-m2yFu\",\"target\":\"LLMChain-KlJb3\",\"sourceHandle\":\"{œbaseClassesœ:[œPromptTemplateœ,œStringPromptTemplateœ,œBasePromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-m2yFuœ}\",\"targetHandle\":\"{œfieldNameœ:œpromptœ,œidœ:œLLMChain-KlJb3œ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\",\"id\":\"reactflow__edge-PromptTemplate-m2yFu{œbaseClassesœ:[œPromptTemplateœ,œStringPromptTemplateœ,œBasePromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-m2yFuœ}-LLMChain-KlJb3{œfieldNameœ:œpromptœ,œidœ:œLLMChain-KlJb3œ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"prompt\",\"id\":\"LLMChain-KlJb3\",\"inputTypes\":null,\"type\":\"BasePromptTemplate\"},\"sourceHandle\":{\"baseClasses\":[\"PromptTemplate\",\"StringPromptTemplate\",\"BasePromptTemplate\"],\"dataType\":\"PromptTemplate\",\"id\":\"PromptTemplate-m2yFu\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"selected\":false},{\"source\":\"CustomComponent-bCuc0\",\"target\":\"CustomComponent-IEIUl\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-bCuc0œ}\",\"targetHandle\":\"{œfieldNameœ:œpromptœ,œidœ:œCustomComponent-IEIUlœ,œinputTypesœ:[œstrœ],œtypeœ:œstrœ}\",\"id\":\"reactflow__edge-CustomComponent-bCuc0{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-bCuc0œ}-CustomComponent-IEIUl{œfieldNameœ:œpromptœ,œidœ:œCustomComponent-IEIUlœ,œinputTypesœ:[œstrœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"prompt\",\"id\":\"CustomComponent-IEIUl\",\"inputTypes\":[\"str\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-bCuc0\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"selected\":false}],\"viewport\":{\"x\":92.23454077990459,\"y\":183.8125619056221,\"zoom\":0.6070974421975234}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:33:18.503954\",\"folder\":null,\"id\":\"fe142ce5-32dc-4955-b186-672ced662f13\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Goofy Darwin\",\"description\":\"Conversation Catalyst Engine.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":626,\"id\":\"OpenAI-3ZVDh\",\"type\":\"genericNode\",\"position\":{\"x\":-4.0041891741949485,\"y\":-114.02615182719649},\"data\":{\"type\":\"OpenAI\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"list\":true},\"allowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":[],\"password\":false,\"name\":\"allowed_special\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"batch_size\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":20,\"password\":false,\"name\":\"batch_size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"best_of\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"best_of\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"default_query\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"disallowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"all\",\"password\":false,\"name\":\"disallowed_special\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"frequency_penalty\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":0,\"password\":false,\"name\":\"frequency_penalty\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"http_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"http_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"logit_bias\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"logit_bias\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":2,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":256,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-davinci-003\",\"password\":false,\"options\":[\"text-davinci-003\",\"text-davinci-002\",\"text-curie-001\",\"text-babbage-001\",\"text-ada-001\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"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\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"presence_penalty\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":0,\"password\":false,\"name\":\"presence_penalty\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.7,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"top_p\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"top_p\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"OpenAI\"},\"description\":\"OpenAI large language models.\",\"base_classes\":[\"OpenAI\",\"BaseLLM\",\"BaseOpenAI\",\"BaseLanguageModel\"],\"display_name\":\"OpenAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"OpenAI-3ZVDh\"},\"selected\":true,\"positionAbsolute\":{\"x\":-4.0041891741949485,\"y\":-114.02615182719649},\"dragging\":false},{\"width\":384,\"height\":338,\"id\":\"LLMChain-RFYXY\",\"type\":\"genericNode\",\"position\":{\"x\":586.672100458868,\"y\":10.967049167706678},\"data\":{\"type\":\"LLMChain\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Callable, Optional, Union\\n\\nfrom langchain.chains import LLMChain\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import (\\n BaseLanguageModel,\\n BaseMemory,\\n BasePromptTemplate,\\n Chain,\\n)\\n\\n\\nclass LLMChainComponent(CustomComponent):\\n display_name = \\\"LLMChain\\\"\\n description = \\\"Chain to run queries against LLMs\\\"\\n\\n def build_config(self):\\n return {\\n \\\"prompt\\\": {\\\"display_name\\\": \\\"Prompt\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n prompt: BasePromptTemplate,\\n llm: BaseLanguageModel,\\n memory: Optional[BaseMemory] = None,\\n ) -> Union[Chain, Callable]:\\n return LLMChain(prompt=prompt, llm=llm, memory=memory)\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false}},\"description\":\"Chain to run queries against LLMs\",\"base_classes\":[\"Chain\",\"Callable\"],\"display_name\":\"LLMChain\",\"custom_fields\":{\"llm\":null,\"memory\":null,\"prompt\":null},\"output_types\":[\"LLMChain\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"LLMChain-RFYXY\"},\"positionAbsolute\":{\"x\":586.672100458868,\"y\":10.967049167706678}},{\"width\":384,\"height\":242,\"id\":\"ChatPromptTemplate-ce1sg\",\"type\":\"genericNode\",\"position\":{\"x\":395.598984452791,\"y\":612.188491773035},\"data\":{\"type\":\"ChatPromptTemplate\",\"node\":{\"template\":{\"messages\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"messages\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseMessagePromptTemplate\",\"list\":true},\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false},\"input_types\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_types\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"input_variables\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":true},\"partial_variables\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"validate_template\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"validate_template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatPromptTemplate\"},\"description\":\"A prompt template for chat models.\",\"base_classes\":[\"ChatPromptTemplate\",\"BaseChatPromptTemplate\",\"BasePromptTemplate\"],\"display_name\":\"ChatPromptTemplate\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/how_to/prompts\",\"beta\":false,\"error\":null},\"id\":\"ChatPromptTemplate-ce1sg\"},\"selected\":false,\"positionAbsolute\":{\"x\":395.598984452791,\"y\":612.188491773035},\"dragging\":false}],\"edges\":[{\"source\":\"OpenAI-3ZVDh\",\"sourceHandle\":\"{œbaseClassesœ:[œOpenAIœ,œBaseLLMœ,œBaseOpenAIœ,œBaseLanguageModelœ],œdataTypeœ:œOpenAIœ,œidœ:œOpenAI-3ZVDhœ}\",\"target\":\"LLMChain-RFYXY\",\"targetHandle\":\"{œfieldNameœ:œllmœ,œidœ:œLLMChain-RFYXYœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"llm\",\"id\":\"LLMChain-RFYXY\",\"inputTypes\":null,\"type\":\"BaseLanguageModel\"},\"sourceHandle\":{\"baseClasses\":[\"OpenAI\",\"BaseLLM\",\"BaseOpenAI\",\"BaseLanguageModel\"],\"dataType\":\"OpenAI\",\"id\":\"OpenAI-3ZVDh\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-OpenAI-3ZVDh{œbaseClassesœ:[œOpenAIœ,œBaseLLMœ,œBaseOpenAIœ,œBaseLanguageModelœ],œdataTypeœ:œOpenAIœ,œidœ:œOpenAI-3ZVDhœ}-LLMChain-RFYXY{œfieldNameœ:œllmœ,œidœ:œLLMChain-RFYXYœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\"},{\"source\":\"ChatPromptTemplate-ce1sg\",\"sourceHandle\":\"{œbaseClassesœ:[œChatPromptTemplateœ,œBaseChatPromptTemplateœ,œBasePromptTemplateœ],œdataTypeœ:œChatPromptTemplateœ,œidœ:œChatPromptTemplate-ce1sgœ}\",\"target\":\"LLMChain-RFYXY\",\"targetHandle\":\"{œfieldNameœ:œpromptœ,œidœ:œLLMChain-RFYXYœ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"prompt\",\"id\":\"LLMChain-RFYXY\",\"inputTypes\":null,\"type\":\"BasePromptTemplate\"},\"sourceHandle\":{\"baseClasses\":[\"ChatPromptTemplate\",\"BaseChatPromptTemplate\",\"BasePromptTemplate\"],\"dataType\":\"ChatPromptTemplate\",\"id\":\"ChatPromptTemplate-ce1sg\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-ChatPromptTemplate-ce1sg{œbaseClassesœ:[œChatPromptTemplateœ,œBaseChatPromptTemplateœ,œBasePromptTemplateœ],œdataTypeœ:œChatPromptTemplateœ,œidœ:œChatPromptTemplate-ce1sgœ}-LLMChain-RFYXY{œfieldNameœ:œpromptœ,œidœ:œLLMChain-RFYXYœ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\"}],\"viewport\":{\"x\":8.832868402772647,\"y\":189.85443326477025,\"zoom\":0.6070974421975235}},\"is_component\":false,\"updated_at\":\"2023-12-04T22:50:19.584160\",\"folder\":null,\"id\":\"103766f0-1f50-427a-9ba7-2ab73343c524\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Perky Easley\",\"description\":\"Your Toolkit for Text Generation.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":626,\"id\":\"ChatOpenAI-VPh47\",\"type\":\"genericNode\",\"position\":{\"x\":-328.5742193020408,\"y\":-420.1025929438987},\"data\":{\"type\":\"ChatOpenAI\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"list\":true},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"default_query\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"http_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"http_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":2,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"gpt-3.5-turbo\",\"password\":false,\"options\":[\"gpt-4-1106-preview\",\"gpt-4\",\"gpt-4-32k\",\"gpt-3.5-turbo\",\"gpt-3.5-turbo-16k\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"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\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.7,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatOpenAI\"},\"description\":\"`OpenAI` Chat large language models API.\",\"base_classes\":[\"BaseChatModel\",\"ChatOpenAI\",\"BaseLanguageModel\",\"BaseLLM\"],\"display_name\":\"ChatOpenAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"ChatOpenAI-VPh47\"},\"selected\":false,\"positionAbsolute\":{\"x\":-328.5742193020408,\"y\":-420.1025929438987},\"dragging\":false},{\"width\":384,\"height\":338,\"id\":\"LLMChain-NgFyo\",\"type\":\"genericNode\",\"position\":{\"x\":225.3113389084088,\"y\":-193.3520019494289},\"data\":{\"type\":\"LLMChain\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Callable, Optional, Union\\n\\nfrom langchain.chains import LLMChain\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import (\\n BaseLanguageModel,\\n BaseMemory,\\n BasePromptTemplate,\\n Chain,\\n)\\n\\n\\nclass LLMChainComponent(CustomComponent):\\n display_name = \\\"LLMChain\\\"\\n description = \\\"Chain to run queries against LLMs\\\"\\n\\n def build_config(self):\\n return {\\n \\\"prompt\\\": {\\\"display_name\\\": \\\"Prompt\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n prompt: BasePromptTemplate,\\n llm: BaseLanguageModel,\\n memory: Optional[BaseMemory] = None,\\n ) -> Union[Chain, Callable]:\\n return LLMChain(prompt=prompt, llm=llm, memory=memory)\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false}},\"description\":\"Chain to run queries against LLMs\",\"base_classes\":[\"Chain\",\"Callable\"],\"display_name\":\"LLMChain\",\"custom_fields\":{\"llm\":null,\"memory\":null,\"prompt\":null},\"output_types\":[\"LLMChain\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"LLMChain-NgFyo\"},\"selected\":false,\"positionAbsolute\":{\"x\":225.3113389084088,\"y\":-193.3520019494289},\"dragging\":false},{\"width\":384,\"height\":374,\"id\":\"PromptTemplate-oAFjh\",\"type\":\"genericNode\",\"position\":{\"x\":-342.62522294052764,\"y\":391.20629510686103},\"data\":{\"type\":\"PromptTemplate\",\"node\":{\"template\":{\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false},\"input_types\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_types\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"input_variables\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":true,\"value\":[\"links\"]},\"partial_variables\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"template\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"prompt\",\"list\":false,\"value\":\"Answer everything with links\\n{links}\"},\"template_format\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"f-string\",\"password\":false,\"name\":\"template_format\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":false},\"validate_template\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"validate_template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"PromptTemplate\",\"links\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"links\",\"display_name\":\"links\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"A prompt template for a language model.\",\"base_classes\":[\"BasePromptTemplate\",\"StringPromptTemplate\",\"PromptTemplate\"],\"name\":\"\",\"display_name\":\"PromptTemplate\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/\",\"custom_fields\":{\"\":[\"links\"]},\"output_types\":[],\"full_path\":null,\"field_formatters\":{},\"beta\":false,\"error\":null},\"id\":\"PromptTemplate-oAFjh\"},\"selected\":true,\"dragging\":false,\"positionAbsolute\":{\"x\":-342.62522294052764,\"y\":391.20629510686103}}],\"edges\":[{\"source\":\"ChatOpenAI-VPh47\",\"sourceHandle\":\"{œbaseClassesœ:[œBaseChatModelœ,œChatOpenAIœ,œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-VPh47œ}\",\"target\":\"LLMChain-NgFyo\",\"targetHandle\":\"{œfieldNameœ:œllmœ,œidœ:œLLMChain-NgFyoœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"llm\",\"id\":\"LLMChain-NgFyo\",\"inputTypes\":null,\"type\":\"BaseLanguageModel\"},\"sourceHandle\":{\"baseClasses\":[\"BaseChatModel\",\"ChatOpenAI\",\"BaseLanguageModel\",\"BaseLLM\"],\"dataType\":\"ChatOpenAI\",\"id\":\"ChatOpenAI-VPh47\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-ChatOpenAI-VPh47{œbaseClassesœ:[œBaseChatModelœ,œChatOpenAIœ,œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-VPh47œ}-LLMChain-NgFyo{œfieldNameœ:œllmœ,œidœ:œLLMChain-NgFyoœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\"},{\"source\":\"PromptTemplate-oAFjh\",\"sourceHandle\":\"{œbaseClassesœ:[œBasePromptTemplateœ,œStringPromptTemplateœ,œPromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-oAFjhœ}\",\"target\":\"LLMChain-NgFyo\",\"targetHandle\":\"{œfieldNameœ:œpromptœ,œidœ:œLLMChain-NgFyoœ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"prompt\",\"id\":\"LLMChain-NgFyo\",\"inputTypes\":null,\"type\":\"BasePromptTemplate\"},\"sourceHandle\":{\"baseClasses\":[\"BasePromptTemplate\",\"StringPromptTemplate\",\"PromptTemplate\"],\"dataType\":\"PromptTemplate\",\"id\":\"PromptTemplate-oAFjh\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-PromptTemplate-oAFjh{œbaseClassesœ:[œBasePromptTemplateœ,œStringPromptTemplateœ,œPromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-oAFjhœ}-LLMChain-NgFyo{œfieldNameœ:œpromptœ,œidœ:œLLMChain-NgFyoœ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\"}],\"viewport\":{\"x\":338.6546182690814,\"y\":53.026340800283265,\"zoom\":0.7169776240079143}},\"is_component\":false,\"updated_at\":\"2023-12-04T22:53:25.148460\",\"folder\":null,\"id\":\"a794fc48-5e9b-42a3-924f-7fe610500035\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"(D) Basic Chat (1) (4)\",\"description\":\"Simplest possible chat model\",\"data\":{\"nodes\":[{\"width\":384,\"height\":626,\"id\":\"ChatOpenAI-fBcfh\",\"type\":\"genericNode\",\"position\":{\"x\":228.87326389541306,\"y\":465.8628482073749},\"data\":{\"type\":\"ChatOpenAI\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":6,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"gpt-3.5-turbo\",\"password\":false,\"options\":[\"gpt-3.5-turbo-0613\",\"gpt-3.5-turbo\",\"gpt-3.5-turbo-16k-0613\",\"gpt-3.5-turbo-16k\",\"gpt-4-0613\",\"gpt-4-32k-0613\",\"gpt-4\",\"gpt-4-32k\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"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\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false,\"value\":60},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.7,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatOpenAI\"},\"description\":\"`OpenAI` Chat large language models API.\",\"base_classes\":[\"BaseChatModel\",\"BaseLanguageModel\",\"ChatOpenAI\",\"BaseLLM\"],\"display_name\":\"ChatOpenAI\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai\"},\"id\":\"ChatOpenAI-fBcfh\",\"value\":null},\"selected\":false,\"dragging\":false,\"positionAbsolute\":{\"x\":228.87326389541306,\"y\":465.8628482073749}},{\"width\":384,\"height\":310,\"id\":\"ConversationChain-bVNex\",\"type\":\"genericNode\",\"position\":{\"x\":806,\"y\":554},\"data\":{\"type\":\"ConversationChain\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":{\"_type\":\"default\"},\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLLMOutputParser\",\"list\":false},\"prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"history\",\"input\"],\"output_parser\":null,\"partial_variables\":{},\"template\":\"The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.\\n\\nCurrent conversation:\\n{history}\\nHuman: {input}\\nAI:\",\"template_format\":\"f-string\",\"validate_template\":true,\"_type\":\"prompt\"},\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"input_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"input\",\"password\":false,\"name\":\"input_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"llm_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"llm_kwargs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"output_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"response\",\"password\":false,\"name\":\"output_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"return_final_only\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"return_final_only\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ConversationChain\"},\"description\":\"Chain to have a conversation and load context from memory.\",\"base_classes\":[\"ConversationChain\",\"Chain\",\"LLMChain\",\"function\"],\"display_name\":\"ConversationChain\",\"documentation\":\"\"},\"id\":\"ConversationChain-bVNex\",\"value\":null},\"selected\":false,\"dragging\":false,\"positionAbsolute\":{\"x\":806,\"y\":554}}],\"edges\":[{\"source\":\"ChatOpenAI-fBcfh\",\"sourceHandle\":\"{œbaseClassesœ:[œBaseChatModelœ,œBaseLanguageModelœ,œChatOpenAIœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-fBcfhœ}\",\"target\":\"ConversationChain-bVNex\",\"targetHandle\":\"{œfieldNameœ:œllmœ,œidœ:œConversationChain-bVNexœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"className\":\"stroke-gray-900 stroke-connection\",\"id\":\"reactflow__edge-ChatOpenAI-fBcfh{œbaseClassesœ:[œBaseChatModelœ,œBaseLanguageModelœ,œChatOpenAIœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-fBcfhœ}-ConversationChain-bVNex{œfieldNameœ:œllmœ,œidœ:œConversationChain-bVNexœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"data\":{\"sourceHandle\":{\"baseClasses\":[\"BaseChatModel\",\"BaseLanguageModel\",\"ChatOpenAI\",\"BaseLLM\"],\"dataType\":\"ChatOpenAI\",\"id\":\"ChatOpenAI-fBcfh\"},\"targetHandle\":{\"fieldName\":\"llm\",\"id\":\"ConversationChain-bVNex\",\"inputTypes\":null,\"type\":\"BaseLanguageModel\"}},\"style\":{\"stroke\":\"#555\"},\"animated\":false}],\"viewport\":{\"x\":-118.21416568593895,\"y\":-240.64815770363373,\"zoom\":0.7642485855675408}},\"is_component\":false,\"updated_at\":\"2023-12-04T22:57:55.879806\",\"folder\":null,\"id\":\"bddebeea-b80a-4b28-8895-c4425687dcce\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Directory Loader\",\"description\":\"Generic File Loader\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"import os\\n\\nfrom langchain.schema import Document\\nfrom langflow import CustomComponent\\nimport glob\\n\\nclass DirectoryLoaderComponent(CustomComponent):\\n display_name: str = \\\"Directory Loader\\\"\\n description: str = \\\"Generic File Loader\\\"\\n beta = True\\n loaders_info = [\\n {\\n \\\"loader\\\": \\\"AirbyteJSONLoader\\\",\\n \\\"name\\\": \\\"Airbyte JSON (.jsonl)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.AirbyteJSONLoader\\\",\\n \\\"defaultFor\\\": [\\\"jsonl\\\"],\\n \\\"allowdTypes\\\": [\\\"jsonl\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"JSONLoader\\\",\\n \\\"name\\\": \\\"JSON (.json)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.JSONLoader\\\",\\n \\\"defaultFor\\\": [\\\"json\\\"],\\n \\\"allowdTypes\\\": [\\\"json\\\"],\\n \\\"kwargs\\\": {\\\"jq_schema\\\": \\\".\\\", \\\"text_content\\\": False}\\n },\\n {\\n \\\"loader\\\": \\\"BSHTMLLoader\\\",\\n \\\"name\\\": \\\"BeautifulSoup4 HTML (.html, .htm)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.BSHTMLLoader\\\",\\n \\\"allowdTypes\\\": [\\\"html\\\", \\\"htm\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"CSVLoader\\\",\\n \\\"name\\\": \\\"CSV (.csv)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.CSVLoader\\\",\\n \\\"defaultFor\\\": [\\\"csv\\\"],\\n \\\"allowdTypes\\\": [\\\"csv\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"CoNLLULoader\\\",\\n \\\"name\\\": \\\"CoNLL-U (.conllu)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.CoNLLULoader\\\",\\n \\\"defaultFor\\\": [\\\"conllu\\\"],\\n \\\"allowdTypes\\\": [\\\"conllu\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"EverNoteLoader\\\",\\n \\\"name\\\": \\\"EverNote (.enex)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.EverNoteLoader\\\",\\n \\\"defaultFor\\\": [\\\"enex\\\"],\\n \\\"allowdTypes\\\": [\\\"enex\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"FacebookChatLoader\\\",\\n \\\"name\\\": \\\"Facebook Chat (.json)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.FacebookChatLoader\\\",\\n \\\"allowdTypes\\\": [\\\"json\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"OutlookMessageLoader\\\",\\n \\\"name\\\": \\\"Outlook Message (.msg)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.OutlookMessageLoader\\\",\\n \\\"defaultFor\\\": [\\\"msg\\\"],\\n \\\"allowdTypes\\\": [\\\"msg\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"PyPDFLoader\\\",\\n \\\"name\\\": \\\"PyPDF (.pdf)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.PyPDFLoader\\\",\\n \\\"defaultFor\\\": [\\\"pdf\\\"],\\n \\\"allowdTypes\\\": [\\\"pdf\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"STRLoader\\\",\\n \\\"name\\\": \\\"Subtitle (.str)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.STRLoader\\\",\\n \\\"defaultFor\\\": [\\\"str\\\"],\\n \\\"allowdTypes\\\": [\\\"str\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"TextLoader\\\",\\n \\\"name\\\": \\\"Text (.txt)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.TextLoader\\\",\\n \\\"defaultFor\\\": [\\\"txt\\\"],\\n \\\"allowdTypes\\\": [\\\"txt\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"UnstructuredEmailLoader\\\",\\n \\\"name\\\": \\\"Unstructured Email (.eml)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.UnstructuredEmailLoader\\\",\\n \\\"defaultFor\\\": [\\\"eml\\\"],\\n \\\"allowdTypes\\\": [\\\"eml\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"UnstructuredHTMLLoader\\\",\\n \\\"name\\\": \\\"Unstructured HTML (.html, .htm)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.UnstructuredHTMLLoader\\\",\\n \\\"defaultFor\\\": [\\\"html\\\", \\\"htm\\\"],\\n \\\"allowdTypes\\\": [\\\"html\\\", \\\"htm\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"UnstructuredMarkdownLoader\\\",\\n \\\"name\\\": \\\"Unstructured Markdown (.md)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.UnstructuredMarkdownLoader\\\",\\n \\\"defaultFor\\\": [\\\"md\\\"],\\n \\\"allowdTypes\\\": [\\\"md\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"UnstructuredPowerPointLoader\\\",\\n \\\"name\\\": \\\"Unstructured PowerPoint (.pptx)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.UnstructuredPowerPointLoader\\\",\\n \\\"defaultFor\\\": [\\\"pptx\\\"],\\n \\\"allowdTypes\\\": [\\\"pptx\\\"],\\n },\\n {\\n \\\"loader\\\": \\\"UnstructuredWordLoader\\\",\\n \\\"name\\\": \\\"Unstructured Word (.docx)\\\",\\n \\\"import\\\": \\\"langchain.document_loaders.UnstructuredWordLoader\\\",\\n \\\"defaultFor\\\": [\\\"docx\\\"],\\n \\\"allowdTypes\\\": [\\\"docx\\\"],\\n },\\n]\\n\\n\\n def build_config(self):\\n loader_options = [\\\"Automatic\\\"] + [\\n loader_info[\\\"name\\\"] for loader_info in self.loaders_info\\n ]\\n\\n file_types = []\\n suffixes = []\\n\\n for loader_info in self.loaders_info:\\n if \\\"allowedTypes\\\" in loader_info:\\n file_types.extend(loader_info[\\\"allowedTypes\\\"])\\n suffixes.extend([f\\\".{ext}\\\" for ext in loader_info[\\\"allowedTypes\\\"]])\\n\\n return {\\n \\\"directory_path\\\": {\\n \\\"display_name\\\": \\\"Directory Path\\\",\\n \\\"required\\\": True,\\n },\\n \\\"loader\\\": {\\n \\\"display_name\\\": \\\"Loader\\\",\\n \\\"is_list\\\": True,\\n \\\"required\\\": True,\\n \\\"options\\\": loader_options,\\n \\\"value\\\": \\\"Automatic\\\",\\n },\\n }\\n\\n def build(self, directory_path: str, loader: str) -> Document:\\n # Verifique se o diretório existe\\n if not os.path.exists(directory_path):\\n raise ValueError(f\\\"Directory not found: {directory_path}\\\")\\n\\n files = glob.glob(directory_path + \\\"/*.*\\\")\\n\\n\\n loader_info = None\\n if loader == \\\"Automatic\\\":\\n for file in files:\\n file_type = file.split(\\\".\\\")[-1]\\n\\n\\n for info in self.loaders_info:\\n if \\\"defaultFor\\\" in info:\\n if file_type in info[\\\"defaultFor\\\"]:\\n loader_info = info\\n break\\n if loader_info:\\n break\\n\\n if not loader_info:\\n raise ValueError(\\n \\\"No default loader found for any file in the directory\\\"\\n )\\n\\n else:\\n for info in self.loaders_info:\\n if info[\\\"name\\\"] == loader:\\n loader_info = info\\n break\\n\\n if not loader_info:\\n raise ValueError(f\\\"Loader {loader} not found in the loader info list\\\")\\n\\n loader_import = loader_info[\\\"import\\\"]\\n module_name, class_name = loader_import.rsplit(\\\".\\\", 1)\\n\\n try:\\n # Importe o loader dinamicamente\\n loader_module = __import__(module_name, fromlist=[class_name])\\n loader_instance = getattr(loader_module, class_name)\\n except ImportError as e:\\n raise ValueError(\\n f\\\"Loader {loader} could not be imported\\\\nLoader info:\\\\n{loader_info}\\\"\\n ) from e\\n\\n results = []\\n for file in files:\\n file_path = os.path.join(directory_path, file)\\n kwargs = loader_info.get(\\\"kwargs\\\", {})\\n result = loader_instance(file_path=file_path, **kwargs).load()\\n results.append(result)\\n self.status = results\\n return results\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"directory_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"directory_path\",\"display_name\":\"Directory Path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"/Users/ogabrielluiz/Projects/langflow2/docs\"},\"loader\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"Automatic\",\"password\":false,\"options\":[\"Automatic\",\"Airbyte JSON (.jsonl)\",\"JSON (.json)\",\"BeautifulSoup4 HTML (.html, .htm)\",\"CSV (.csv)\",\"CoNLL-U (.conllu)\",\"EverNote (.enex)\",\"Facebook Chat (.json)\",\"Outlook Message (.msg)\",\"PyPDF (.pdf)\",\"Subtitle (.str)\",\"Text (.txt)\",\"Unstructured Email (.eml)\",\"Unstructured HTML (.html, .htm)\",\"Unstructured Markdown (.md)\",\"Unstructured PowerPoint (.pptx)\",\"Unstructured Word (.docx)\"],\"name\":\"loader\",\"display_name\":\"Loader\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true}},\"description\":\"Generic File Loader\",\"base_classes\":[\"Document\"],\"display_name\":\"Directory Loader\",\"custom_fields\":{\"directory_path\":null,\"loader\":null},\"output_types\":[\"Document\"],\"documentation\":\"\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"CustomComponent-Ip6tG\"},\"id\":\"CustomComponent-Ip6tG\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-04T22:58:38.570303\",\"folder\":null,\"id\":\"5fe4debc-b6a7-45d4-a694-c02d8aa93b08\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Whisper Transcriber\",\"description\":\"Converts audio to text using OpenAI's Whisper.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom typing import Optional, List, Dict, Union\\nfrom langflow.field_typing import (\\n AgentExecutor,\\n BaseChatMemory,\\n BaseLanguageModel,\\n BaseLLM,\\n BaseLoader,\\n BaseMemory,\\n BaseOutputParser,\\n BasePromptTemplate,\\n BaseRetriever,\\n Callable,\\n Chain,\\n ChatPromptTemplate,\\n Data,\\n Document,\\n Embeddings,\\n NestedDict,\\n Object,\\n PromptTemplate,\\n TextSplitter,\\n Tool,\\n VectorStore,\\n)\\n\\nfrom openai import OpenAI\\nclass Component(CustomComponent):\\n display_name: str = \\\"Whisper Transcriber\\\"\\n description: str = \\\"Converts audio to text using OpenAI's Whisper.\\\"\\n\\n def build_config(self):\\n return {\\\"audio\\\":{\\\"field_type\\\":\\\"file\\\",\\\"suffixes\\\":[\\\".mp3\\\", \\\".mp4\\\", \\\".m4a\\\"]},\\\"OpenAIKey\\\":{\\\"field_type\\\":\\\"str\\\",\\\"password\\\":True}}\\n\\n def build(self, audio:str, OpenAIKey:str) -> str:\\n \\n # TODO: if output path, persist & load it instead\\n \\n client = OpenAI(api_key=OpenAIKey)\\n \\n audio_file= open(audio, \\\"rb\\\")\\n transcript = client.audio.transcriptions.create(\\n model=\\\"whisper-1\\\", \\n file=audio_file,\\n response_format=\\\"text\\\"\\n )\\n \\n \\n self.status = transcript\\n return transcript\\n\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"OpenAIKey\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"OpenAIKey\",\"display_name\":\"OpenAIKey\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"audio\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"suffixes\":[\".mp3\",\".mp4\",\".m4a\"],\"password\":false,\"name\":\"audio\",\"display_name\":\"audio\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"file_path\":\"/Users/rodrigonader/Library/Caches/langflow/19b3e1c9-90bf-405f-898a-e982f47adf76/a3308ce7e9b10088fcd985aabb6d17b012c6b6e81ff8806356574474c9d10229.m4a\",\"value\":\"Audio Recording 2023-11-29 at 12.12.04.m4a\"}},\"description\":\"Converts audio to text using OpenAI's Whisper.\",\"base_classes\":[\"str\"],\"display_name\":\"Whisper Transcriber\",\"custom_fields\":{\"OpenAIKey\":null,\"audio\":null},\"output_types\":[\"str\"],\"documentation\":\"\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"CustomComponent-GJRrs\"},\"id\":\"CustomComponent-GJRrs\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-04T22:58:39.710502\",\"folder\":null,\"id\":\"bd9e911d-46bd-429f-aa75-dd740430695e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Youtube Conversation\",\"description\":\"Craft Meaningful Interactions, Generate Value.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":589,\"id\":\"CustomComponent-h0NSI\",\"type\":\"genericNode\",\"position\":{\"x\":714.9531293402755,\"y\":0.4994374160582993},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"import requests\\nfrom langflow import CustomComponent\\nfrom langchain.llms.base import BaseLLM\\nfrom langchain.schema import Document\\nfrom langchain.document_loaders import YoutubeLoader\\n\\n# Example usage:\\n\\nclass YourComponent(CustomComponent):\\n display_name: str = \\\"YouTube Transcript\\\"\\n description: str = \\\"YouTube transcripts refer to the written text versions of the spoken content in a video uploaded to the YouTube platform.\\\"\\n\\n def build_config(self):\\n return {\\\"url\\\": {\\\"multiline\\\": True, \\\"required\\\": True}}\\n\\n def build(self, url: str, llm: BaseLLM, dependencies: Document, language: str = \\\"en\\\") -> Document:\\n dependencies\\n response = requests.get(url)\\n loader = YoutubeLoader.from_youtube_url(url, add_video_info=True, language=[language])\\n result = loader.load()\\n self.repr_value = str(result)\\n return result\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"dependencies\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"dependencies\",\"display_name\":\"dependencies\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":false},\"language\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"en\",\"password\":false,\"name\":\"language\",\"display_name\":\"language\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLLM\",\"list\":false},\"url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"YouTube transcripts refer to the written text versions of the spoken content in a video uploaded to the YouTube platform.\",\"base_classes\":[\"Document\"],\"display_name\":\"YouTube Transcript\",\"custom_fields\":{\"dependencies\":null,\"language\":null,\"llm\":null,\"url\":null},\"output_types\":[\"Document\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-h0NSI\"},\"selected\":false,\"dragging\":false,\"positionAbsolute\":{\"x\":714.9531293402755,\"y\":0.4994374160582993}},{\"width\":384,\"height\":629,\"id\":\"ChatOpenAI-q61Y2\",\"type\":\"genericNode\",\"position\":{\"x\":18,\"y\":625.5521045590924},\"data\":{\"type\":\"ChatOpenAI\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":6,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false,\"value\":\"\"},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"gpt-3.5-turbo-16k\",\"password\":false,\"options\":[\"gpt-3.5-turbo-0613\",\"gpt-3.5-turbo\",\"gpt-3.5-turbo-16k-0613\",\"gpt-3.5-turbo-16k\",\"gpt-4-0613\",\"gpt-4-32k-0613\",\"gpt-4\",\"gpt-4-32k\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.7,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatOpenAI\"},\"description\":\"`OpenAI` Chat large language models API.\",\"base_classes\":[\"ChatOpenAI\",\"BaseChatModel\",\"BaseLanguageModel\",\"BaseLLM\"],\"display_name\":\"ChatOpenAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"ChatOpenAI-q61Y2\"},\"selected\":false,\"positionAbsolute\":{\"x\":18,\"y\":625.5521045590924},\"dragging\":false},{\"width\":384,\"height\":539,\"id\":\"Chroma-gVhy7\",\"type\":\"genericNode\",\"position\":{\"x\":2110.365967257855,\"y\":805.0149521868784},\"data\":{\"type\":\"Chroma\",\"node\":{\"template\":{\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"chromadb.Client\",\"list\":false},\"client_settings\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client_settings\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"chromadb.config.Setting\",\"list\":true},\"documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":true},\"embedding\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Embeddings\",\"list\":false},\"chroma_server_cors_allow_origins\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_cors_allow_origins\",\"display_name\":\"Chroma Server CORS Allow Origins\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"chroma_server_grpc_port\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_grpc_port\",\"display_name\":\"Chroma Server GRPC Port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"chroma_server_host\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_host\",\"display_name\":\"Chroma Server Host\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"chroma_server_http_port\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_http_port\",\"display_name\":\"Chroma Server HTTP Port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"chroma_server_ssl_enabled\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"chroma_server_ssl_enabled\",\"display_name\":\"Chroma Server SSL Enabled\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"collection_metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"collection_metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"collection_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"langchain\",\"password\":false,\"name\":\"collection_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"ids\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"ids\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"metadatas\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":true},\"persist\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"persist\",\"display_name\":\"Persist\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"persist_directory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"persist_directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"search_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"NestedDict\",\"list\":false},\"_type\":\"Chroma\"},\"description\":\"Create a Chroma vectorstore from a raw documents.\",\"base_classes\":[\"VectorStore\",\"Chroma\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"Chroma\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/chroma\",\"beta\":false,\"error\":null},\"id\":\"Chroma-gVhy7\"},\"selected\":false,\"positionAbsolute\":{\"x\":2110.365967257855,\"y\":805.0149521868784},\"dragging\":false},{\"width\":384,\"height\":501,\"id\":\"RecursiveCharacterTextSplitter-1eaOX\",\"type\":\"genericNode\",\"position\":{\"x\":1409.2872773444874,\"y\":19.45456141103284},\"data\":{\"type\":\"RecursiveCharacterTextSplitter\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional\\nfrom langflow import CustomComponent\\nfrom langchain.schema import Document\\nfrom langflow.utils.util import build_loader_repr_from_documents\\n\\n\\nclass RecursiveCharacterTextSplitterComponent(CustomComponent):\\n display_name: str = \\\"Recursive Character Text Splitter\\\"\\n description: str = \\\"Split text into chunks of a specified length.\\\"\\n documentation: str = \\\"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\\\"\\n\\n def build_config(self):\\n return {\\n \\\"documents\\\": {\\n \\\"display_name\\\": \\\"Documents\\\",\\n \\\"info\\\": \\\"The documents to split.\\\",\\n },\\n \\\"separators\\\": {\\n \\\"display_name\\\": \\\"Separators\\\",\\n \\\"info\\\": 'The characters to split on.\\\\nIf left empty defaults to [\\\"\\\\\\\\n\\\\\\\\n\\\", \\\"\\\\\\\\n\\\", \\\" \\\", \\\"\\\"].',\\n \\\"is_list\\\": True,\\n },\\n \\\"chunk_size\\\": {\\n \\\"display_name\\\": \\\"Chunk Size\\\",\\n \\\"info\\\": \\\"The maximum length of each chunk.\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 1000,\\n },\\n \\\"chunk_overlap\\\": {\\n \\\"display_name\\\": \\\"Chunk Overlap\\\",\\n \\\"info\\\": \\\"The amount of overlap between chunks.\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 200,\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n documents: list[Document],\\n separators: Optional[list[str]] = None,\\n chunk_size: Optional[int] = 1000,\\n chunk_overlap: Optional[int] = 200,\\n ) -> list[Document]:\\n \\\"\\\"\\\"\\n Split text into chunks of a specified length.\\n\\n Args:\\n separators (list[str]): The characters to split on.\\n chunk_size (int): The maximum length of each chunk.\\n chunk_overlap (int): The amount of overlap between chunks.\\n length_function (function): The function to use to calculate the length of the text.\\n\\n Returns:\\n list[str]: The chunks of text.\\n \\\"\\\"\\\"\\n from langchain.text_splitter import RecursiveCharacterTextSplitter\\n\\n if separators == \\\"\\\":\\n separators = None\\n elif separators:\\n # check if the separators list has escaped characters\\n # if there are escaped characters, unescape them\\n separators = [x.encode().decode(\\\"unicode-escape\\\") for x in separators]\\n\\n # Make sure chunk_size and chunk_overlap are ints\\n if isinstance(chunk_size, str):\\n chunk_size = int(chunk_size)\\n if isinstance(chunk_overlap, str):\\n chunk_overlap = int(chunk_overlap)\\n splitter = RecursiveCharacterTextSplitter(\\n separators=separators,\\n chunk_size=chunk_size,\\n chunk_overlap=chunk_overlap,\\n )\\n\\n docs = splitter.split_documents(documents)\\n self.repr_value = build_loader_repr_from_documents(docs)\\n return docs\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"chunk_overlap\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"50\",\"password\":false,\"name\":\"chunk_overlap\",\"display_name\":\"Chunk Overlap\",\"advanced\":false,\"dynamic\":false,\"info\":\"The amount of overlap between chunks.\",\"type\":\"int\",\"list\":false},\"chunk_size\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"400\",\"password\":false,\"name\":\"chunk_size\",\"display_name\":\"Chunk Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"The maximum length of each chunk.\",\"type\":\"int\",\"list\":false},\"documents\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"The documents to split.\",\"type\":\"Document\",\"list\":true},\"separators\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"separators\",\"display_name\":\"Separators\",\"advanced\":false,\"dynamic\":false,\"info\":\"The characters to split on.\\nIf left empty defaults to [\\\"\\\\n\\\\n\\\", \\\"\\\\n\\\", \\\" \\\", \\\"\\\"].\",\"type\":\"str\",\"list\":true,\"value\":[\" \"]}},\"description\":\"Split text into chunks of a specified length.\",\"base_classes\":[\"Document\"],\"display_name\":\"Recursive Character Text Splitter\",\"custom_fields\":{\"chunk_overlap\":null,\"chunk_size\":null,\"documents\":null,\"separators\":null},\"output_types\":[\"RecursiveCharacterTextSplitter\"],\"documentation\":\"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\",\"beta\":true,\"error\":null},\"id\":\"RecursiveCharacterTextSplitter-1eaOX\"},\"selected\":false,\"positionAbsolute\":{\"x\":1409.2872773444874,\"y\":19.45456141103284},\"dragging\":false},{\"width\":384,\"height\":367,\"id\":\"OpenAIEmbeddings-cduOO\",\"type\":\"genericNode\",\"position\":{\"x\":1405.1176670984544,\"y\":1029.459061269321},\"data\":{\"type\":\"OpenAIEmbeddings\",\"node\":{\"template\":{\"allowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[],\"password\":false,\"name\":\"allowed_special\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Literal'all'\",\"list\":true},\"disallowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"all\",\"password\":false,\"name\":\"disallowed_special\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Literal'all'\",\"list\":true},\"chunk_size\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1000,\"password\":false,\"name\":\"chunk_size\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"deployment\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-embedding-ada-002\",\"password\":false,\"name\":\"deployment\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"embedding_ctx_length\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":8191,\"password\":false,\"name\":\"embedding_ctx_length\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"{'Authorization': 'Bearer '}\",\"password\":false,\"name\":\"headers\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":6,\"password\":false,\"name\":\"max_retries\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"model\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-embedding-ada-002\",\"password\":false,\"name\":\"model\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_api_type\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_type\",\"display_name\":\"OpenAI API Type\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"openai_api_version\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_version\",\"display_name\":\"OpenAI API Version\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"show_progress_bar\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"show_progress_bar\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"skip_empty\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"skip_empty\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"_type\":\"OpenAIEmbeddings\"},\"description\":\"OpenAI embedding models.\",\"base_classes\":[\"Embeddings\",\"OpenAIEmbeddings\"],\"display_name\":\"OpenAIEmbeddings\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"OpenAIEmbeddings-cduOO\"},\"selected\":false,\"positionAbsolute\":{\"x\":1405.1176670984544,\"y\":1029.459061269321}},{\"width\":384,\"height\":339,\"id\":\"RetrievalQA-4PmTB\",\"type\":\"genericNode\",\"position\":{\"x\":2711.7786966415715,\"y\":709.4450828605463},\"data\":{\"type\":\"RetrievalQA\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"combine_documents_chain\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"combine_documents_chain\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseCombineDocumentsChain\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"retriever\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseRetriever\",\"list\":false},\"input_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"query\",\"password\":false,\"name\":\"input_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"output_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"result\",\"password\":false,\"name\":\"output_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"return_source_documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"return_source_documents\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"RetrievalQA\"},\"description\":\"Chain for question-answering against an index.\",\"base_classes\":[\"RetrievalQA\",\"Chain\",\"BaseRetrievalQA\",\"function\"],\"display_name\":\"RetrievalQA\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/chains/popular/vector_db_qa\",\"beta\":false,\"error\":null},\"id\":\"RetrievalQA-4PmTB\"},\"selected\":false,\"positionAbsolute\":{\"x\":2711.7786966415715,\"y\":709.4450828605463}},{\"width\":384,\"height\":333,\"id\":\"CombineDocsChain-yk0JF\",\"type\":\"genericNode\",\"position\":{\"x\":2125.6202053356537,\"y\":199.07708924409633},\"data\":{\"type\":\"CombineDocsChain\",\"node\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"chain_type\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"stuff\",\"password\":false,\"options\":[\"stuff\",\"map_reduce\",\"map_rerank\",\"refine\"],\"name\":\"chain_type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"_type\":\"load_qa_chain\"},\"description\":\"Load question answering chain.\",\"base_classes\":[\"BaseCombineDocumentsChain\",\"function\"],\"display_name\":\"CombineDocsChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"id\":\"CombineDocsChain-yk0JF\"},\"selected\":false,\"positionAbsolute\":{\"x\":2125.6202053356537,\"y\":199.07708924409633},\"dragging\":false},{\"width\":384,\"height\":417,\"id\":\"CustomComponent-y6Wg0\",\"type\":\"genericNode\",\"position\":{\"x\":39.400034102832365,\"y\":-499.03551482195707},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\n\\nimport subprocess\\n\\nfrom langchain.llms.base import BaseLLM\\nfrom langchain.chains import LLMChain\\nfrom langchain.prompts import PromptTemplate\\nfrom langchain.schema import Document\\n\\nfrom typing import List\\n\\n\\nclass YourComponent(CustomComponent):\\n display_name: str = \\\"PIP Install\\\"\\n description: str = \\\"Create any custom component you want!\\\"\\n\\n def build(self, libs: List[str]) -> Document:\\n def install_package(package_name):\\n subprocess.check_call([\\\"pip\\\", \\\"install\\\", package_name])\\n for lib in libs:\\n install_package(lib)\\n return \\\"\\\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"libs\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"libs\",\"display_name\":\"libs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true,\"value\":[\"requests\",\"pytube\"]}},\"description\":\"Create any custom component you want!\",\"base_classes\":[\"Document\"],\"display_name\":\"PIP Install\",\"custom_fields\":{\"libs\":null},\"output_types\":[],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-y6Wg0\"},\"selected\":false,\"dragging\":false,\"positionAbsolute\":{\"x\":39.400034102832365,\"y\":-499.03551482195707}}],\"edges\":[{\"source\":\"ChatOpenAI-q61Y2\",\"sourceHandle\":\"{œbaseClassesœ:[œChatOpenAIœ,œBaseChatModelœ,œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-q61Y2œ}\",\"target\":\"CustomComponent-h0NSI\",\"targetHandle\":\"{œfieldNameœ:œllmœ,œidœ:œCustomComponent-h0NSIœ,œinputTypesœ:null,œtypeœ:œBaseLLMœ}\",\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-ChatOpenAI-q61Y2{œbaseClassesœ:[œChatOpenAIœ,œBaseChatModelœ,œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-q61Y2œ}-CustomComponent-h0NSI{œfieldNameœ:œllmœ,œidœ:œCustomComponent-h0NSIœ,œinputTypesœ:null,œtypeœ:œBaseLLMœ}\",\"data\":{\"sourceHandle\":{\"baseClasses\":[\"ChatOpenAI\",\"BaseChatModel\",\"BaseLanguageModel\",\"BaseLLM\"],\"dataType\":\"ChatOpenAI\",\"id\":\"ChatOpenAI-q61Y2\"},\"targetHandle\":{\"fieldName\":\"llm\",\"id\":\"CustomComponent-h0NSI\",\"inputTypes\":null,\"type\":\"BaseLLM\"}}},{\"source\":\"CustomComponent-y6Wg0\",\"sourceHandle\":\"{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-y6Wg0œ}\",\"target\":\"CustomComponent-h0NSI\",\"targetHandle\":\"{œfieldNameœ:œdependenciesœ,œidœ:œCustomComponent-h0NSIœ,œinputTypesœ:null,œtypeœ:œDocumentœ}\",\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-y6Wg0{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-y6Wg0œ}-CustomComponent-h0NSI{œfieldNameœ:œdependenciesœ,œidœ:œCustomComponent-h0NSIœ,œinputTypesœ:null,œtypeœ:œDocumentœ}\",\"data\":{\"sourceHandle\":{\"baseClasses\":[\"Document\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-y6Wg0\"},\"targetHandle\":{\"fieldName\":\"dependencies\",\"id\":\"CustomComponent-h0NSI\",\"inputTypes\":null,\"type\":\"Document\"}}},{\"source\":\"CustomComponent-h0NSI\",\"sourceHandle\":\"{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-h0NSIœ}\",\"target\":\"RecursiveCharacterTextSplitter-1eaOX\",\"targetHandle\":\"{œfieldNameœ:œdocumentsœ,œidœ:œRecursiveCharacterTextSplitter-1eaOXœ,œinputTypesœ:null,œtypeœ:œDocumentœ}\",\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-h0NSI{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-h0NSIœ}-RecursiveCharacterTextSplitter-1eaOX{œfieldNameœ:œdocumentsœ,œidœ:œRecursiveCharacterTextSplitter-1eaOXœ,œinputTypesœ:null,œtypeœ:œDocumentœ}\",\"data\":{\"sourceHandle\":{\"baseClasses\":[\"Document\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-h0NSI\"},\"targetHandle\":{\"fieldName\":\"documents\",\"id\":\"RecursiveCharacterTextSplitter-1eaOX\",\"inputTypes\":null,\"type\":\"Document\"}},\"selected\":false},{\"source\":\"OpenAIEmbeddings-cduOO\",\"sourceHandle\":\"{œbaseClassesœ:[œEmbeddingsœ,œOpenAIEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-cduOOœ}\",\"target\":\"Chroma-gVhy7\",\"targetHandle\":\"{œfieldNameœ:œembeddingœ,œidœ:œChroma-gVhy7œ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}\",\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-OpenAIEmbeddings-cduOO{œbaseClassesœ:[œEmbeddingsœ,œOpenAIEmbeddingsœ],œdataTypeœ:œOpenAIEmbeddingsœ,œidœ:œOpenAIEmbeddings-cduOOœ}-Chroma-gVhy7{œfieldNameœ:œembeddingœ,œidœ:œChroma-gVhy7œ,œinputTypesœ:null,œtypeœ:œEmbeddingsœ}\",\"data\":{\"sourceHandle\":{\"baseClasses\":[\"Embeddings\",\"OpenAIEmbeddings\"],\"dataType\":\"OpenAIEmbeddings\",\"id\":\"OpenAIEmbeddings-cduOO\"},\"targetHandle\":{\"fieldName\":\"embedding\",\"id\":\"Chroma-gVhy7\",\"inputTypes\":null,\"type\":\"Embeddings\"}}},{\"source\":\"RecursiveCharacterTextSplitter-1eaOX\",\"sourceHandle\":\"{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-1eaOXœ}\",\"target\":\"Chroma-gVhy7\",\"targetHandle\":\"{œfieldNameœ:œdocumentsœ,œidœ:œChroma-gVhy7œ,œinputTypesœ:null,œtypeœ:œDocumentœ}\",\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-RecursiveCharacterTextSplitter-1eaOX{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-1eaOXœ}-Chroma-gVhy7{œfieldNameœ:œdocumentsœ,œidœ:œChroma-gVhy7œ,œinputTypesœ:null,œtypeœ:œDocumentœ}\",\"data\":{\"sourceHandle\":{\"baseClasses\":[\"Document\"],\"dataType\":\"RecursiveCharacterTextSplitter\",\"id\":\"RecursiveCharacterTextSplitter-1eaOX\"},\"targetHandle\":{\"fieldName\":\"documents\",\"id\":\"Chroma-gVhy7\",\"inputTypes\":null,\"type\":\"Document\"}}},{\"source\":\"ChatOpenAI-q61Y2\",\"sourceHandle\":\"{œbaseClassesœ:[œChatOpenAIœ,œBaseChatModelœ,œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-q61Y2œ}\",\"target\":\"CombineDocsChain-yk0JF\",\"targetHandle\":\"{œfieldNameœ:œllmœ,œidœ:œCombineDocsChain-yk0JFœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-ChatOpenAI-q61Y2{œbaseClassesœ:[œChatOpenAIœ,œBaseChatModelœ,œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-q61Y2œ}-CombineDocsChain-yk0JF{œfieldNameœ:œllmœ,œidœ:œCombineDocsChain-yk0JFœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"data\":{\"sourceHandle\":{\"baseClasses\":[\"ChatOpenAI\",\"BaseChatModel\",\"BaseLanguageModel\",\"BaseLLM\"],\"dataType\":\"ChatOpenAI\",\"id\":\"ChatOpenAI-q61Y2\"},\"targetHandle\":{\"fieldName\":\"llm\",\"id\":\"CombineDocsChain-yk0JF\",\"inputTypes\":null,\"type\":\"BaseLanguageModel\"}}},{\"source\":\"CombineDocsChain-yk0JF\",\"sourceHandle\":\"{œbaseClassesœ:[œBaseCombineDocumentsChainœ,œfunctionœ],œdataTypeœ:œCombineDocsChainœ,œidœ:œCombineDocsChain-yk0JFœ}\",\"target\":\"RetrievalQA-4PmTB\",\"targetHandle\":\"{œfieldNameœ:œcombine_documents_chainœ,œidœ:œRetrievalQA-4PmTBœ,œinputTypesœ:null,œtypeœ:œBaseCombineDocumentsChainœ}\",\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CombineDocsChain-yk0JF{œbaseClassesœ:[œBaseCombineDocumentsChainœ,œfunctionœ],œdataTypeœ:œCombineDocsChainœ,œidœ:œCombineDocsChain-yk0JFœ}-RetrievalQA-4PmTB{œfieldNameœ:œcombine_documents_chainœ,œidœ:œRetrievalQA-4PmTBœ,œinputTypesœ:null,œtypeœ:œBaseCombineDocumentsChainœ}\",\"data\":{\"sourceHandle\":{\"baseClasses\":[\"BaseCombineDocumentsChain\",\"function\"],\"dataType\":\"CombineDocsChain\",\"id\":\"CombineDocsChain-yk0JF\"},\"targetHandle\":{\"fieldName\":\"combine_documents_chain\",\"id\":\"RetrievalQA-4PmTB\",\"inputTypes\":null,\"type\":\"BaseCombineDocumentsChain\"}}},{\"source\":\"Chroma-gVhy7\",\"sourceHandle\":\"{œbaseClassesœ:[œVectorStoreœ,œChromaœ,œBaseRetrieverœ,œVectorStoreRetrieverœ],œdataTypeœ:œChromaœ,œidœ:œChroma-gVhy7œ}\",\"target\":\"RetrievalQA-4PmTB\",\"targetHandle\":\"{œfieldNameœ:œretrieverœ,œidœ:œRetrievalQA-4PmTBœ,œinputTypesœ:null,œtypeœ:œBaseRetrieverœ}\",\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-Chroma-gVhy7{œbaseClassesœ:[œVectorStoreœ,œChromaœ,œBaseRetrieverœ,œVectorStoreRetrieverœ],œdataTypeœ:œChromaœ,œidœ:œChroma-gVhy7œ}-RetrievalQA-4PmTB{œfieldNameœ:œretrieverœ,œidœ:œRetrievalQA-4PmTBœ,œinputTypesœ:null,œtypeœ:œBaseRetrieverœ}\",\"data\":{\"sourceHandle\":{\"baseClasses\":[\"VectorStore\",\"Chroma\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"dataType\":\"Chroma\",\"id\":\"Chroma-gVhy7\"},\"targetHandle\":{\"fieldName\":\"retriever\",\"id\":\"RetrievalQA-4PmTB\",\"inputTypes\":null,\"type\":\"BaseRetriever\"}}}],\"viewport\":{\"x\":189.54413265004274,\"y\":259.89949657174975,\"zoom\":0.291027508374689}},\"is_component\":false,\"updated_at\":\"2023-12-04T22:59:08.830269\",\"folder\":null,\"id\":\"bc7eb94b-6fc6-49cb-9b19-35347ba51afa\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Web Scraper - Content & Links\",\"description\":\"Fetch and parse text and links from a given URL.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":338,\"id\":\"LLMChain-H7PBy\",\"type\":\"genericNode\",\"position\":{\"x\":1682.494010974207,\"y\":275.5701585623092},\"data\":{\"type\":\"LLMChain\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Callable, Optional, Union\\n\\nfrom langchain.chains import LLMChain\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import (\\n BaseLanguageModel,\\n BaseMemory,\\n BasePromptTemplate,\\n Chain,\\n)\\n\\n\\nclass LLMChainComponent(CustomComponent):\\n display_name = \\\"LLMChain\\\"\\n description = \\\"Chain to run queries against LLMs\\\"\\n\\n def build_config(self):\\n return {\\n \\\"prompt\\\": {\\\"display_name\\\": \\\"Prompt\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n prompt: BasePromptTemplate,\\n llm: BaseLanguageModel,\\n memory: Optional[BaseMemory] = None,\\n ) -> Union[Chain, Callable]:\\n return LLMChain(prompt=prompt, llm=llm, memory=memory)\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false}},\"description\":\"Chain to run queries against LLMs\",\"base_classes\":[\"Chain\",\"Callable\"],\"display_name\":\"LLMChain\",\"custom_fields\":{\"llm\":null,\"memory\":null,\"prompt\":null},\"output_types\":[\"LLMChain\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"LLMChain-H7PBy\"},\"selected\":false,\"positionAbsolute\":{\"x\":1682.494010974207,\"y\":275.5701585623092},\"dragging\":false},{\"width\":384,\"height\":626,\"id\":\"ChatOpenAI-VSAdc\",\"type\":\"genericNode\",\"position\":{\"x\":1002.4263147022411,\"y\":-198.2305006886859},\"data\":{\"type\":\"ChatOpenAI\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"list\":true},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"default_query\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"http_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"http_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":2,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false,\"value\":\"\"},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"gpt-4-1106-preview\",\"password\":false,\"options\":[\"gpt-4-1106-preview\",\"gpt-4\",\"gpt-4-32k\",\"gpt-3.5-turbo\",\"gpt-3.5-turbo-16k\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"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\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"0.1\",\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatOpenAI\"},\"description\":\"`OpenAI` Chat large language models API.\",\"base_classes\":[\"BaseLanguageModel\",\"ChatOpenAI\",\"BaseChatModel\",\"BaseLLM\"],\"display_name\":\"ChatOpenAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"ChatOpenAI-VSAdc\"},\"selected\":false,\"dragging\":false,\"positionAbsolute\":{\"x\":1002.4263147022411,\"y\":-198.2305006886859}},{\"width\":384,\"height\":374,\"data\":{\"id\":\"GroupNode-WXPMk\",\"type\":\"PromptTemplate\",\"node\":{\"output_types\":[],\"display_name\":\"Web Scraper\",\"documentation\":\"\",\"base_classes\":[\"StringPromptTemplate\",\"BasePromptTemplate\",\"PromptTemplate\"],\"description\":\"Fetch and parse text and links from a given URL.\",\"template\":{\"code_CustomComponent-f6nOg\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport html2text\\n\\nclass ConvertHTMLToMarkdown(CustomComponent):\\n display_name: str = \\\"HTML to Markdown\\\"\\n description: str = \\\"Converts HTML content to Markdown format.\\\"\\n\\n def build(self, html_content: Data, ignore_links: bool = False) -> str:\\n converter = html2text.HTML2Text()\\n converter.ignore_links = ignore_links\\n markdown_text = converter.handle(html_content)\\n self.status = markdown_text\\n return markdown_text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-f6nOg\",\"field\":\"code\"},\"display_name\":\"Code\"},\"ignore_links_CustomComponent-f6nOg\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"ignore_links\",\"display_name\":\"ignore_links\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-f6nOg\",\"field\":\"ignore_links\"}},\"code_CustomComponent-FGzJJ\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport requests\\n\\nclass FetchHTMLContent(CustomComponent):\\n display_name: str = \\\"Fetch HTML\\\"\\n description: str = \\\"Fetches HTML content from a specified URL.\\\"\\n \\n def build_config(self):\\n return {\\\"url\\\": {\\\"input_types\\\": [\\\"Data\\\", \\\"str\\\"]}} \\n\\n def build(self, url: str) -> Data:\\n response = requests.get(url)\\n self.status = str(response) + \\\"\\\\n\\\\n\\\" + response.text\\n return response.text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-FGzJJ\",\"field\":\"code\"},\"display_name\":\"Code\"},\"code_CustomComponent-0XtUN\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from bs4 import BeautifulSoup\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\nclass ParseHTMLContent(CustomComponent):\\n display_name: str = \\\"Parse HTML\\\"\\n description: str = \\\"Parses HTML content using Beautiful Soup.\\\"\\n\\n def build(self, html_content: Data) -> Data:\\n soup = BeautifulSoup(html_content, 'html.parser')\\n self.status = soup\\n return soup\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-0XtUN\",\"field\":\"code\"},\"display_name\":\"Code\"},\"code_CustomComponent-ODIcp\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\nclass ExtractHyperlinks(CustomComponent):\\n display_name: str = \\\"Extract Hyperlinks\\\"\\n description: str = \\\"Extracts hyperlinks from parsed HTML content.\\\"\\n\\n def build(self, soup: Data) -> list:\\n links = [link.get('href') for link in soup.find_all('a')]\\n self.status = links\\n return links\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-ODIcp\",\"field\":\"code\"},\"display_name\":\"Code\"},\"output_parser_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"output_parser\"},\"display_name\":\"Output Parser\"},\"input_types_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_types\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"input_types\"},\"display_name\":\"Input Types\"},\"input_variables_PromptTemplate-H9Udy\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":true,\"value\":[\"url\",\"html_markdown\",\"html_links\",\"query\"],\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"input_variables\"},\"display_name\":\"Input Variables\"},\"partial_variables_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"partial_variables\"},\"display_name\":\"Partial Variables\"},\"template_PromptTemplate-H9Udy\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"template\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"prompt\",\"list\":false,\"value\":\"Below is the HTML content (as markdown) and hyperlinks extracted from {url}\\n\\n---\\n\\nContent:\\n\\n{html_markdown}\\n\\n---\\n\\nLinks:\\n\\n{html_links}\\n\\n---\\n\\nAnswer the user query as best as possible.\\n\\nUser query:\\n\\n{query}\\n\\nAnswer:\\n\",\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"template\"},\"display_name\":\"Template\"},\"template_format_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"f-string\",\"password\":false,\"name\":\"template_format\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"template_format\"},\"display_name\":\"Template Format\"},\"validate_template_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"validate_template\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"bool\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"validate_template\"},\"display_name\":\"Validate Template\"},\"query_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"give me links\",\"password\":false,\"name\":\"query\",\"display_name\":\"query\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"query\"}},\"code_CustomComponent-OACE0\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport requests\\n\\nclass FetchHTMLContent(CustomComponent):\\n display_name: str = \\\"URL Input\\\"\\n\\n def build(self, url: str) -> str:\\n return url\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-OACE0\",\"field\":\"code\"},\"display_name\":\"Code\"},\"url_CustomComponent-OACE0\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-OACE0\",\"field\":\"url\"},\"value\":\"https://paperswithcode.com/\"},\"code_CustomComponent-whsQ5\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\n\\nclass ListToMarkdownBullets(CustomComponent):\\n display_name: str = \\\"List to Bullets\\\"\\n description: str = \\\"Converts a Python list into Markdown bullet points.\\\"\\n\\n def build(self, items: list) -> str:\\n markdown_bullets = '\\\\n'.join(f'* {item}' for item in items)\\n return markdown_bullets\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-whsQ5\",\"field\":\"code\"},\"display_name\":\"Code\"}},\"flow\":{\"data\":{\"nodes\":[{\"width\":384,\"height\":405,\"id\":\"CustomComponent-f6nOg\",\"type\":\"genericNode\",\"position\":{\"x\":665.2835942536854,\"y\":-371.7823429271119},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport html2text\\n\\nclass ConvertHTMLToMarkdown(CustomComponent):\\n display_name: str = \\\"HTML to Markdown\\\"\\n description: str = \\\"Converts HTML content to Markdown format.\\\"\\n\\n def build(self, html_content: Data, ignore_links: bool = False) -> str:\\n converter = html2text.HTML2Text()\\n converter.ignore_links = ignore_links\\n markdown_text = converter.handle(html_content)\\n self.status = markdown_text\\n return markdown_text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"html_content\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"html_content\",\"display_name\":\"html_content\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false},\"ignore_links\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"ignore_links\",\"display_name\":\"ignore_links\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false}},\"description\":\"Converts HTML content to Markdown format.\",\"base_classes\":[\"str\"],\"display_name\":\"HTML to Markdown\",\"custom_fields\":{\"html_content\":null,\"ignore_links\":null},\"output_types\":[\"str\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-f6nOg\"},\"selected\":true,\"positionAbsolute\":{\"x\":665.2835942536854,\"y\":-371.7823429271119},\"dragging\":false},{\"width\":384,\"height\":375,\"id\":\"CustomComponent-FGzJJ\",\"type\":\"genericNode\",\"position\":{\"x\":-464.4553400967736,\"y\":-225.62715888255525},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport requests\\n\\nclass FetchHTMLContent(CustomComponent):\\n display_name: str = \\\"Fetch HTML\\\"\\n description: str = \\\"Fetches HTML content from a specified URL.\\\"\\n \\n def build_config(self):\\n return {\\\"url\\\": {\\\"input_types\\\": [\\\"Data\\\", \\\"str\\\"]}} \\n\\n def build(self, url: str) -> Data:\\n response = requests.get(url)\\n self.status = str(response) + \\\"\\\\n\\\\n\\\" + response.text\\n return response.text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"input_types\":[\"Data\",\"str\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"}},\"description\":\"Fetches HTML content from a specified URL.\",\"base_classes\":[\"Data\"],\"display_name\":\"Fetch HTML\",\"custom_fields\":{\"url\":null},\"output_types\":[\"Data\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-FGzJJ\"},\"selected\":true,\"dragging\":false,\"positionAbsolute\":{\"x\":-464.4553400967736,\"y\":-225.62715888255525}},{\"width\":384,\"height\":329,\"id\":\"CustomComponent-0XtUN\",\"type\":\"genericNode\",\"position\":{\"x\":214.00059169497604,\"y\":177.27071061129823},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from bs4 import BeautifulSoup\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\nclass ParseHTMLContent(CustomComponent):\\n display_name: str = \\\"Parse HTML\\\"\\n description: str = \\\"Parses HTML content using Beautiful Soup.\\\"\\n\\n def build(self, html_content: Data) -> Data:\\n soup = BeautifulSoup(html_content, 'html.parser')\\n self.status = soup\\n return soup\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"html_content\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"html_content\",\"display_name\":\"html_content\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false}},\"description\":\"Parses HTML content using Beautiful Soup.\",\"base_classes\":[\"Data\"],\"display_name\":\"Parse HTML\",\"custom_fields\":{\"html_content\":null},\"output_types\":[\"Data\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-0XtUN\"},\"selected\":true,\"dragging\":false,\"positionAbsolute\":{\"x\":214.00059169497604,\"y\":177.27071061129823}},{\"width\":384,\"height\":329,\"id\":\"CustomComponent-ODIcp\",\"type\":\"genericNode\",\"position\":{\"x\":845.0502195222412,\"y\":366.54344452019404},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\nclass ExtractHyperlinks(CustomComponent):\\n display_name: str = \\\"Extract Hyperlinks\\\"\\n description: str = \\\"Extracts hyperlinks from parsed HTML content.\\\"\\n\\n def build(self, soup: Data) -> list:\\n links = [link.get('href') for link in soup.find_all('a')]\\n self.status = links\\n return links\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"soup\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"soup\",\"display_name\":\"soup\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false}},\"description\":\"Extracts hyperlinks from parsed HTML content.\",\"base_classes\":[\"list\"],\"display_name\":\"Extract Hyperlinks\",\"custom_fields\":{\"soup\":null},\"output_types\":[\"list\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-ODIcp\"},\"selected\":true,\"positionAbsolute\":{\"x\":845.0502195222412,\"y\":366.54344452019404},\"dragging\":false},{\"width\":384,\"height\":657,\"id\":\"PromptTemplate-H9Udy\",\"type\":\"genericNode\",\"position\":{\"x\":2230.389721706283,\"y\":584.4905083765256},\"data\":{\"type\":\"PromptTemplate\",\"node\":{\"template\":{\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false},\"input_types\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_types\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"input_variables\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":true,\"value\":[\"url\",\"html_markdown\",\"html_links\",\"query\"]},\"partial_variables\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"template\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"prompt\",\"list\":false,\"value\":\"Below is the HTML content (as markdown) and hyperlinks extracted from {url}\\n\\n---\\n\\nContent:\\n\\n{html_markdown}\\n\\n---\\n\\nLinks:\\n\\n{html_links}\\n\\n---\\n\\nAnswer the user query as best as possible.\\n\\nUser query:\\n\\n{query}\\n\\nAnswer:\\n\"},\"template_format\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"f-string\",\"password\":false,\"name\":\"template_format\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":false},\"validate_template\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"validate_template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"PromptTemplate\",\"url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"html_markdown\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"html_markdown\",\"display_name\":\"html_markdown\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"html_links\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"html_links\",\"display_name\":\"html_links\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"query\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"query\",\"display_name\":\"query\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"A prompt template for a language model.\",\"base_classes\":[\"StringPromptTemplate\",\"BasePromptTemplate\",\"PromptTemplate\"],\"name\":\"\",\"display_name\":\"PromptTemplate\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/\",\"custom_fields\":{\"\":[\"url\",\"html_markdown\",\"html_links\",\"query\"]},\"output_types\":[],\"full_path\":null,\"field_formatters\":{},\"beta\":false,\"error\":null},\"id\":\"PromptTemplate-H9Udy\"},\"selected\":true,\"positionAbsolute\":{\"x\":2230.389721706283,\"y\":584.4905083765256},\"dragging\":false},{\"width\":384,\"height\":347,\"id\":\"CustomComponent-OACE0\",\"type\":\"genericNode\",\"position\":{\"x\":-1155.9497945157625,\"y\":198.13583204151553},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport requests\\n\\nclass FetchHTMLContent(CustomComponent):\\n display_name: str = \\\"URL Input\\\"\\n\\n def build(self, url: str) -> str:\\n return url\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":null,\"base_classes\":[\"str\"],\"display_name\":\"URL Input\",\"custom_fields\":{\"url\":null},\"output_types\":[\"str\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-OACE0\"},\"selected\":true,\"positionAbsolute\":{\"x\":-1155.9497945157625,\"y\":198.13583204151553},\"dragging\":false},{\"width\":384,\"height\":329,\"id\":\"CustomComponent-whsQ5\",\"type\":\"genericNode\",\"position\":{\"x\":1396.5574076376327,\"y\":694.8308714574463},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\n\\nclass ListToMarkdownBullets(CustomComponent):\\n display_name: str = \\\"List to Bullets\\\"\\n description: str = \\\"Converts a Python list into Markdown bullet points.\\\"\\n\\n def build(self, items: list) -> str:\\n markdown_bullets = '\\\\n'.join(f'* {item}' for item in items)\\n return markdown_bullets\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"items\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"items\",\"display_name\":\"items\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"list\",\"list\":true}},\"description\":\"Converts a Python list into Markdown bullet points.\",\"base_classes\":[\"str\"],\"display_name\":\"List to Bullets\",\"custom_fields\":{\"items\":null},\"output_types\":[\"str\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-whsQ5\"},\"selected\":true,\"positionAbsolute\":{\"x\":1396.5574076376327,\"y\":694.8308714574463},\"dragging\":false}],\"edges\":[{\"source\":\"CustomComponent-FGzJJ\",\"sourceHandle\":\"{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-FGzJJœ}\",\"target\":\"CustomComponent-f6nOg\",\"targetHandle\":\"{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-f6nOgœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_content\",\"id\":\"CustomComponent-f6nOg\",\"inputTypes\":null,\"type\":\"Data\"},\"sourceHandle\":{\"baseClasses\":[\"Data\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-FGzJJ\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-FGzJJ{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-FGzJJœ}-CustomComponent-f6nOg{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-f6nOgœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"selected\":true},{\"source\":\"CustomComponent-FGzJJ\",\"sourceHandle\":\"{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-FGzJJœ}\",\"target\":\"CustomComponent-0XtUN\",\"targetHandle\":\"{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-0XtUNœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_content\",\"id\":\"CustomComponent-0XtUN\",\"inputTypes\":null,\"type\":\"Data\"},\"sourceHandle\":{\"baseClasses\":[\"Data\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-FGzJJ\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-FGzJJ{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-FGzJJœ}-CustomComponent-0XtUN{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-0XtUNœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"selected\":true},{\"source\":\"CustomComponent-0XtUN\",\"sourceHandle\":\"{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-0XtUNœ}\",\"target\":\"CustomComponent-ODIcp\",\"targetHandle\":\"{œfieldNameœ:œsoupœ,œidœ:œCustomComponent-ODIcpœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"soup\",\"id\":\"CustomComponent-ODIcp\",\"inputTypes\":null,\"type\":\"Data\"},\"sourceHandle\":{\"baseClasses\":[\"Data\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-0XtUN\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-0XtUN{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-0XtUNœ}-CustomComponent-ODIcp{œfieldNameœ:œsoupœ,œidœ:œCustomComponent-ODIcpœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"selected\":true},{\"source\":\"CustomComponent-OACE0\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-OACE0œ}\",\"target\":\"CustomComponent-FGzJJ\",\"targetHandle\":\"{œfieldNameœ:œurlœ,œidœ:œCustomComponent-FGzJJœ,œinputTypesœ:[œDataœ,œstrœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"url\",\"id\":\"CustomComponent-FGzJJ\",\"inputTypes\":[\"Data\",\"str\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-OACE0\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-OACE0{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-OACE0œ}-CustomComponent-FGzJJ{œfieldNameœ:œurlœ,œidœ:œCustomComponent-FGzJJœ,œinputTypesœ:[œDataœ,œstrœ],œtypeœ:œstrœ}\",\"selected\":true},{\"source\":\"CustomComponent-OACE0\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-OACE0œ}\",\"target\":\"PromptTemplate-H9Udy\",\"targetHandle\":\"{œfieldNameœ:œurlœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"url\",\"id\":\"PromptTemplate-H9Udy\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-OACE0\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-OACE0{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-OACE0œ}-PromptTemplate-H9Udy{œfieldNameœ:œurlœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"selected\":true},{\"source\":\"CustomComponent-ODIcp\",\"sourceHandle\":\"{œbaseClassesœ:[œlistœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-ODIcpœ}\",\"target\":\"CustomComponent-whsQ5\",\"targetHandle\":\"{œfieldNameœ:œitemsœ,œidœ:œCustomComponent-whsQ5œ,œinputTypesœ:null,œtypeœ:œlistœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"items\",\"id\":\"CustomComponent-whsQ5\",\"inputTypes\":null,\"type\":\"list\"},\"sourceHandle\":{\"baseClasses\":[\"list\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-ODIcp\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-ODIcp{œbaseClassesœ:[œlistœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-ODIcpœ}-CustomComponent-whsQ5{œfieldNameœ:œitemsœ,œidœ:œCustomComponent-whsQ5œ,œinputTypesœ:null,œtypeœ:œlistœ}\",\"selected\":true},{\"source\":\"CustomComponent-whsQ5\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-whsQ5œ}\",\"target\":\"PromptTemplate-H9Udy\",\"targetHandle\":\"{œfieldNameœ:œhtml_linksœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_links\",\"id\":\"PromptTemplate-H9Udy\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-whsQ5\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-whsQ5{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-whsQ5œ}-PromptTemplate-H9Udy{œfieldNameœ:œhtml_linksœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"selected\":true},{\"source\":\"CustomComponent-f6nOg\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-f6nOgœ}\",\"target\":\"PromptTemplate-H9Udy\",\"targetHandle\":\"{œfieldNameœ:œhtml_markdownœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_markdown\",\"id\":\"PromptTemplate-H9Udy\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-f6nOg\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-f6nOg{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-f6nOgœ}-PromptTemplate-H9Udy{œfieldNameœ:œhtml_markdownœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"selected\":true}],\"viewport\":{\"x\":343.0346131188585,\"y\":341.89843948642147,\"zoom\":0.24148408223121196}},\"is_component\":false,\"name\":\"Fluffy Ptolemy\",\"description\":\"\",\"id\":\"W5oNW\"}}},\"id\":\"GroupNode-WXPMk\",\"position\":{\"x\":873.0737834322758,\"y\":598.8401015776466},\"type\":\"genericNode\",\"selected\":true,\"dragging\":false,\"positionAbsolute\":{\"x\":873.0737834322758,\"y\":598.8401015776466}}],\"edges\":[{\"source\":\"ChatOpenAI-VSAdc\",\"sourceHandle\":\"{œbaseClassesœ:[œBaseLanguageModelœ,œChatOpenAIœ,œBaseChatModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-VSAdcœ}\",\"target\":\"LLMChain-H7PBy\",\"targetHandle\":\"{œfieldNameœ:œllmœ,œidœ:œLLMChain-H7PByœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"llm\",\"id\":\"LLMChain-H7PBy\",\"inputTypes\":null,\"type\":\"BaseLanguageModel\"},\"sourceHandle\":{\"baseClasses\":[\"BaseLanguageModel\",\"ChatOpenAI\",\"BaseChatModel\",\"BaseLLM\"],\"dataType\":\"ChatOpenAI\",\"id\":\"ChatOpenAI-VSAdc\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-ChatOpenAI-VSAdc{œbaseClassesœ:[œBaseLanguageModelœ,œChatOpenAIœ,œBaseChatModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-VSAdcœ}-LLMChain-H7PBy{œfieldNameœ:œllmœ,œidœ:œLLMChain-H7PByœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\"},{\"source\":\"GroupNode-WXPMk\",\"sourceHandle\":\"{œbaseClassesœ:[œStringPromptTemplateœ,œBasePromptTemplateœ,œPromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œGroupNode-WXPMkœ}\",\"target\":\"LLMChain-H7PBy\",\"targetHandle\":\"{œfieldNameœ:œpromptœ,œidœ:œLLMChain-H7PByœ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"prompt\",\"id\":\"LLMChain-H7PBy\",\"inputTypes\":null,\"type\":\"BasePromptTemplate\"},\"sourceHandle\":{\"baseClasses\":[\"StringPromptTemplate\",\"BasePromptTemplate\",\"PromptTemplate\"],\"dataType\":\"PromptTemplate\",\"id\":\"GroupNode-WXPMk\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-GroupNode-WXPMk{œbaseClassesœ:[œStringPromptTemplateœ,œBasePromptTemplateœ,œPromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œGroupNode-WXPMkœ}-LLMChain-H7PBy{œfieldNameœ:œpromptœ,œidœ:œLLMChain-H7PByœ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\"}],\"viewport\":{\"x\":-348.6161822813733,\"y\":121.40545291211492,\"zoom\":0.6801854262029781}},\"is_component\":false,\"updated_at\":\"2023-12-04T23:22:27.784647\",\"folder\":null,\"id\":\"efc3bf27-3cf1-4561-9a83-3df347572440\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sad Joliot\",\"description\":\"Navigate the Networks of Conversation.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":338,\"id\":\"LLMChain-RQsU1\",\"type\":\"genericNode\",\"position\":{\"x\":514.4440482813261,\"y\":528.164086188516},\"data\":{\"type\":\"LLMChain\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Callable, Optional, Union\\n\\nfrom langchain.chains import LLMChain\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import (\\n BaseLanguageModel,\\n BaseMemory,\\n BasePromptTemplate,\\n Chain,\\n)\\n\\n\\nclass LLMChainComponent(CustomComponent):\\n display_name = \\\"LLMChain\\\"\\n description = \\\"Chain to run queries against LLMs\\\"\\n\\n def build_config(self):\\n return {\\n \\\"prompt\\\": {\\\"display_name\\\": \\\"Prompt\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n prompt: BasePromptTemplate,\\n llm: BaseLanguageModel,\\n memory: Optional[BaseMemory] = None,\\n ) -> Union[Chain, Callable]:\\n return LLMChain(prompt=prompt, llm=llm, memory=memory)\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false}},\"description\":\"Chain to run queries against LLMs\",\"base_classes\":[\"Chain\",\"Callable\"],\"display_name\":\"LLMChain\",\"custom_fields\":{\"llm\":null,\"memory\":null,\"prompt\":null},\"output_types\":[\"LLMChain\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"LLMChain-RQsU1\"},\"selected\":false,\"positionAbsolute\":{\"x\":514.4440482813261,\"y\":528.164086188516}},{\"width\":384,\"height\":626,\"id\":\"ChatOpenAI-NTTcv\",\"type\":\"genericNode\",\"position\":{\"x\":-221.33853765781578,\"y\":-35.48749923706055},\"data\":{\"type\":\"ChatOpenAI\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"list\":true},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"default_query\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"http_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"http_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":2,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"gpt-4-1106-preview\",\"password\":false,\"options\":[\"gpt-4-1106-preview\",\"gpt-4\",\"gpt-4-32k\",\"gpt-3.5-turbo\",\"gpt-3.5-turbo-16k\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"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\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"0.1\",\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatOpenAI\"},\"description\":\"`OpenAI` Chat large language models API.\",\"base_classes\":[\"BaseLanguageModel\",\"ChatOpenAI\",\"BaseChatModel\",\"BaseLLM\"],\"display_name\":\"ChatOpenAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"ChatOpenAI-NTTcv\"},\"selected\":false,\"positionAbsolute\":{\"x\":-221.33853765781578,\"y\":-35.48749923706055}},{\"width\":384,\"height\":374,\"id\":\"PromptTemplate-VELMV\",\"type\":\"genericNode\",\"position\":{\"x\":-222,\"y\":682.560723386973},\"data\":{\"id\":\"PromptTemplate-VELMV\",\"type\":\"PromptTemplate\",\"node\":{\"output_types\":[],\"display_name\":\"Web Scraper\",\"documentation\":\"\",\"base_classes\":[\"StringPromptTemplate\",\"BasePromptTemplate\",\"PromptTemplate\"],\"description\":\"Fetch and parse text and links from a given URL.\",\"template\":{\"code_CustomComponent-f6nOg\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport html2text\\n\\nclass ConvertHTMLToMarkdown(CustomComponent):\\n display_name: str = \\\"HTML to Markdown\\\"\\n description: str = \\\"Converts HTML content to Markdown format.\\\"\\n\\n def build(self, html_content: Data, ignore_links: bool = False) -> str:\\n converter = html2text.HTML2Text()\\n converter.ignore_links = ignore_links\\n markdown_text = converter.handle(html_content)\\n self.status = markdown_text\\n return markdown_text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-f6nOg\",\"field\":\"code\"},\"display_name\":\"Code\"},\"ignore_links_CustomComponent-f6nOg\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"ignore_links\",\"display_name\":\"ignore_links\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-f6nOg\",\"field\":\"ignore_links\"}},\"code_CustomComponent-FGzJJ\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport requests\\n\\nclass FetchHTMLContent(CustomComponent):\\n display_name: str = \\\"Fetch HTML\\\"\\n description: str = \\\"Fetches HTML content from a specified URL.\\\"\\n \\n def build_config(self):\\n return {\\\"url\\\": {\\\"input_types\\\": [\\\"Data\\\", \\\"str\\\"]}} \\n\\n def build(self, url: str) -> Data:\\n response = requests.get(url)\\n self.status = str(response) + \\\"\\\\n\\\\n\\\" + response.text\\n return response.text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-FGzJJ\",\"field\":\"code\"},\"display_name\":\"Code\"},\"code_CustomComponent-0XtUN\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from bs4 import BeautifulSoup\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\nclass ParseHTMLContent(CustomComponent):\\n display_name: str = \\\"Parse HTML\\\"\\n description: str = \\\"Parses HTML content using Beautiful Soup.\\\"\\n\\n def build(self, html_content: Data) -> Data:\\n soup = BeautifulSoup(html_content, 'html.parser')\\n self.status = soup\\n return soup\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-0XtUN\",\"field\":\"code\"},\"display_name\":\"Code\"},\"code_CustomComponent-ODIcp\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\nclass ExtractHyperlinks(CustomComponent):\\n display_name: str = \\\"Extract Hyperlinks\\\"\\n description: str = \\\"Extracts hyperlinks from parsed HTML content.\\\"\\n\\n def build(self, soup: Data) -> list:\\n links = [link.get('href') for link in soup.find_all('a')]\\n self.status = links\\n return links\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-ODIcp\",\"field\":\"code\"},\"display_name\":\"Code\"},\"output_parser_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"output_parser\"},\"display_name\":\"Output Parser\"},\"input_types_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_types\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"input_types\"},\"display_name\":\"Input Types\"},\"input_variables_PromptTemplate-H9Udy\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":true,\"value\":[\"url\",\"html_markdown\",\"html_links\",\"query\"],\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"input_variables\"},\"display_name\":\"Input Variables\"},\"partial_variables_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"partial_variables\"},\"display_name\":\"Partial Variables\"},\"template_PromptTemplate-H9Udy\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"template\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"prompt\",\"list\":false,\"value\":\"Below is the HTML content (as markdown) and hyperlinks extracted from {url}\\n\\n---\\n\\nContent:\\n\\n{html_markdown}\\n\\n---\\n\\nLinks:\\n\\n{html_links}\\n\\n---\\n\\nAnswer the user query as best as possible.\\n\\nUser query:\\n\\n{query}\\n\\nAnswer:\\n\",\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"template\"},\"display_name\":\"Template\"},\"template_format_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"f-string\",\"password\":false,\"name\":\"template_format\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"template_format\"},\"display_name\":\"Template Format\"},\"validate_template_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"validate_template\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"bool\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"validate_template\"},\"display_name\":\"Validate Template\"},\"query_PromptTemplate-H9Udy\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"query\",\"display_name\":\"query\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"proxy\":{\"id\":\"PromptTemplate-H9Udy\",\"field\":\"query\"}},\"code_CustomComponent-OACE0\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport requests\\n\\nclass FetchHTMLContent(CustomComponent):\\n display_name: str = \\\"URL Input\\\"\\n\\n def build(self, url: str) -> str:\\n return url\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-OACE0\",\"field\":\"code\"},\"display_name\":\"Code\"},\"url_CustomComponent-OACE0\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-OACE0\",\"field\":\"url\"},\"value\":\"https://paperswithcode.com/\"},\"code_CustomComponent-whsQ5\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\n\\nclass ListToMarkdownBullets(CustomComponent):\\n display_name: str = \\\"List to Bullets\\\"\\n description: str = \\\"Converts a Python list into Markdown bullet points.\\\"\\n\\n def build(self, items: list) -> str:\\n markdown_bullets = '\\\\n'.join(f'* {item}' for item in items)\\n return markdown_bullets\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"proxy\":{\"id\":\"CustomComponent-whsQ5\",\"field\":\"code\"},\"display_name\":\"Code\"}},\"flow\":{\"data\":{\"nodes\":[{\"width\":384,\"height\":405,\"id\":\"CustomComponent-f6nOg\",\"type\":\"genericNode\",\"position\":{\"x\":665.2835942536854,\"y\":-371.7823429271119},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport html2text\\n\\nclass ConvertHTMLToMarkdown(CustomComponent):\\n display_name: str = \\\"HTML to Markdown\\\"\\n description: str = \\\"Converts HTML content to Markdown format.\\\"\\n\\n def build(self, html_content: Data, ignore_links: bool = False) -> str:\\n converter = html2text.HTML2Text()\\n converter.ignore_links = ignore_links\\n markdown_text = converter.handle(html_content)\\n self.status = markdown_text\\n return markdown_text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"html_content\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"html_content\",\"display_name\":\"html_content\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false},\"ignore_links\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"ignore_links\",\"display_name\":\"ignore_links\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false}},\"description\":\"Converts HTML content to Markdown format.\",\"base_classes\":[\"str\"],\"display_name\":\"HTML to Markdown\",\"custom_fields\":{\"html_content\":null,\"ignore_links\":null},\"output_types\":[\"str\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-f6nOg\"},\"selected\":true,\"positionAbsolute\":{\"x\":665.2835942536854,\"y\":-371.7823429271119},\"dragging\":false},{\"width\":384,\"height\":375,\"id\":\"CustomComponent-FGzJJ\",\"type\":\"genericNode\",\"position\":{\"x\":-464.4553400967736,\"y\":-225.62715888255525},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport requests\\n\\nclass FetchHTMLContent(CustomComponent):\\n display_name: str = \\\"Fetch HTML\\\"\\n description: str = \\\"Fetches HTML content from a specified URL.\\\"\\n \\n def build_config(self):\\n return {\\\"url\\\": {\\\"input_types\\\": [\\\"Data\\\", \\\"str\\\"]}} \\n\\n def build(self, url: str) -> Data:\\n response = requests.get(url)\\n self.status = str(response) + \\\"\\\\n\\\\n\\\" + response.text\\n return response.text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"input_types\":[\"Data\",\"str\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"}},\"description\":\"Fetches HTML content from a specified URL.\",\"base_classes\":[\"Data\"],\"display_name\":\"Fetch HTML\",\"custom_fields\":{\"url\":null},\"output_types\":[\"Data\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-FGzJJ\"},\"selected\":true,\"dragging\":false,\"positionAbsolute\":{\"x\":-464.4553400967736,\"y\":-225.62715888255525}},{\"width\":384,\"height\":329,\"id\":\"CustomComponent-0XtUN\",\"type\":\"genericNode\",\"position\":{\"x\":214.00059169497604,\"y\":177.27071061129823},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from bs4 import BeautifulSoup\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\nclass ParseHTMLContent(CustomComponent):\\n display_name: str = \\\"Parse HTML\\\"\\n description: str = \\\"Parses HTML content using Beautiful Soup.\\\"\\n\\n def build(self, html_content: Data) -> Data:\\n soup = BeautifulSoup(html_content, 'html.parser')\\n self.status = soup\\n return soup\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"html_content\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"html_content\",\"display_name\":\"html_content\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false}},\"description\":\"Parses HTML content using Beautiful Soup.\",\"base_classes\":[\"Data\"],\"display_name\":\"Parse HTML\",\"custom_fields\":{\"html_content\":null},\"output_types\":[\"Data\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-0XtUN\"},\"selected\":true,\"dragging\":false,\"positionAbsolute\":{\"x\":214.00059169497604,\"y\":177.27071061129823}},{\"width\":384,\"height\":329,\"id\":\"CustomComponent-ODIcp\",\"type\":\"genericNode\",\"position\":{\"x\":845.0502195222412,\"y\":366.54344452019404},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\nclass ExtractHyperlinks(CustomComponent):\\n display_name: str = \\\"Extract Hyperlinks\\\"\\n description: str = \\\"Extracts hyperlinks from parsed HTML content.\\\"\\n\\n def build(self, soup: Data) -> list:\\n links = [link.get('href') for link in soup.find_all('a')]\\n self.status = links\\n return links\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"soup\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"soup\",\"display_name\":\"soup\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false}},\"description\":\"Extracts hyperlinks from parsed HTML content.\",\"base_classes\":[\"list\"],\"display_name\":\"Extract Hyperlinks\",\"custom_fields\":{\"soup\":null},\"output_types\":[\"list\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-ODIcp\"},\"selected\":true,\"positionAbsolute\":{\"x\":845.0502195222412,\"y\":366.54344452019404},\"dragging\":false},{\"width\":384,\"height\":657,\"id\":\"PromptTemplate-H9Udy\",\"type\":\"genericNode\",\"position\":{\"x\":2230.389721706283,\"y\":584.4905083765256},\"data\":{\"type\":\"PromptTemplate\",\"node\":{\"template\":{\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false},\"input_types\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_types\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"input_variables\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":true,\"value\":[\"url\",\"html_markdown\",\"html_links\",\"query\"]},\"partial_variables\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"template\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"prompt\",\"list\":false,\"value\":\"Below is the HTML content (as markdown) and hyperlinks extracted from {url}\\n\\n---\\n\\nContent:\\n\\n{html_markdown}\\n\\n---\\n\\nLinks:\\n\\n{html_links}\\n\\n---\\n\\nAnswer the user query as best as possible.\\n\\nUser query:\\n\\n{query}\\n\\nAnswer:\\n\"},\"template_format\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"f-string\",\"password\":false,\"name\":\"template_format\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":false},\"validate_template\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"validate_template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"PromptTemplate\",\"url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"html_markdown\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"html_markdown\",\"display_name\":\"html_markdown\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"html_links\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"html_links\",\"display_name\":\"html_links\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"query\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"query\",\"display_name\":\"query\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"A prompt template for a language model.\",\"base_classes\":[\"StringPromptTemplate\",\"BasePromptTemplate\",\"PromptTemplate\"],\"name\":\"\",\"display_name\":\"PromptTemplate\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/\",\"custom_fields\":{\"\":[\"url\",\"html_markdown\",\"html_links\",\"query\"]},\"output_types\":[],\"full_path\":null,\"field_formatters\":{},\"beta\":false,\"error\":null},\"id\":\"PromptTemplate-H9Udy\"},\"selected\":true,\"positionAbsolute\":{\"x\":2230.389721706283,\"y\":584.4905083765256},\"dragging\":false},{\"width\":384,\"height\":347,\"id\":\"CustomComponent-OACE0\",\"type\":\"genericNode\",\"position\":{\"x\":-1155.9497945157625,\"y\":198.13583204151553},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport requests\\n\\nclass FetchHTMLContent(CustomComponent):\\n display_name: str = \\\"URL Input\\\"\\n\\n def build(self, url: str) -> str:\\n return url\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":null,\"base_classes\":[\"str\"],\"display_name\":\"URL Input\",\"custom_fields\":{\"url\":null},\"output_types\":[\"str\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-OACE0\"},\"selected\":true,\"positionAbsolute\":{\"x\":-1155.9497945157625,\"y\":198.13583204151553},\"dragging\":false},{\"width\":384,\"height\":329,\"id\":\"CustomComponent-whsQ5\",\"type\":\"genericNode\",\"position\":{\"x\":1396.5574076376327,\"y\":694.8308714574463},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\n\\nclass ListToMarkdownBullets(CustomComponent):\\n display_name: str = \\\"List to Bullets\\\"\\n description: str = \\\"Converts a Python list into Markdown bullet points.\\\"\\n\\n def build(self, items: list) -> str:\\n markdown_bullets = '\\\\n'.join(f'* {item}' for item in items)\\n return markdown_bullets\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"items\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"items\",\"display_name\":\"items\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"list\",\"list\":true}},\"description\":\"Converts a Python list into Markdown bullet points.\",\"base_classes\":[\"str\"],\"display_name\":\"List to Bullets\",\"custom_fields\":{\"items\":null},\"output_types\":[\"str\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-whsQ5\"},\"selected\":true,\"positionAbsolute\":{\"x\":1396.5574076376327,\"y\":694.8308714574463},\"dragging\":false}],\"edges\":[{\"source\":\"CustomComponent-FGzJJ\",\"sourceHandle\":\"{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-FGzJJœ}\",\"target\":\"CustomComponent-f6nOg\",\"targetHandle\":\"{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-f6nOgœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_content\",\"id\":\"CustomComponent-f6nOg\",\"inputTypes\":null,\"type\":\"Data\"},\"sourceHandle\":{\"baseClasses\":[\"Data\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-FGzJJ\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-FGzJJ{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-FGzJJœ}-CustomComponent-f6nOg{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-f6nOgœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"selected\":true},{\"source\":\"CustomComponent-FGzJJ\",\"sourceHandle\":\"{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-FGzJJœ}\",\"target\":\"CustomComponent-0XtUN\",\"targetHandle\":\"{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-0XtUNœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_content\",\"id\":\"CustomComponent-0XtUN\",\"inputTypes\":null,\"type\":\"Data\"},\"sourceHandle\":{\"baseClasses\":[\"Data\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-FGzJJ\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-FGzJJ{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-FGzJJœ}-CustomComponent-0XtUN{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-0XtUNœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"selected\":true},{\"source\":\"CustomComponent-0XtUN\",\"sourceHandle\":\"{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-0XtUNœ}\",\"target\":\"CustomComponent-ODIcp\",\"targetHandle\":\"{œfieldNameœ:œsoupœ,œidœ:œCustomComponent-ODIcpœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"soup\",\"id\":\"CustomComponent-ODIcp\",\"inputTypes\":null,\"type\":\"Data\"},\"sourceHandle\":{\"baseClasses\":[\"Data\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-0XtUN\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-0XtUN{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-0XtUNœ}-CustomComponent-ODIcp{œfieldNameœ:œsoupœ,œidœ:œCustomComponent-ODIcpœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"selected\":true},{\"source\":\"CustomComponent-OACE0\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-OACE0œ}\",\"target\":\"CustomComponent-FGzJJ\",\"targetHandle\":\"{œfieldNameœ:œurlœ,œidœ:œCustomComponent-FGzJJœ,œinputTypesœ:[œDataœ,œstrœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"url\",\"id\":\"CustomComponent-FGzJJ\",\"inputTypes\":[\"Data\",\"str\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-OACE0\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-OACE0{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-OACE0œ}-CustomComponent-FGzJJ{œfieldNameœ:œurlœ,œidœ:œCustomComponent-FGzJJœ,œinputTypesœ:[œDataœ,œstrœ],œtypeœ:œstrœ}\",\"selected\":true},{\"source\":\"CustomComponent-OACE0\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-OACE0œ}\",\"target\":\"PromptTemplate-H9Udy\",\"targetHandle\":\"{œfieldNameœ:œurlœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"url\",\"id\":\"PromptTemplate-H9Udy\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-OACE0\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-OACE0{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-OACE0œ}-PromptTemplate-H9Udy{œfieldNameœ:œurlœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"selected\":true},{\"source\":\"CustomComponent-ODIcp\",\"sourceHandle\":\"{œbaseClassesœ:[œlistœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-ODIcpœ}\",\"target\":\"CustomComponent-whsQ5\",\"targetHandle\":\"{œfieldNameœ:œitemsœ,œidœ:œCustomComponent-whsQ5œ,œinputTypesœ:null,œtypeœ:œlistœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"items\",\"id\":\"CustomComponent-whsQ5\",\"inputTypes\":null,\"type\":\"list\"},\"sourceHandle\":{\"baseClasses\":[\"list\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-ODIcp\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-ODIcp{œbaseClassesœ:[œlistœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-ODIcpœ}-CustomComponent-whsQ5{œfieldNameœ:œitemsœ,œidœ:œCustomComponent-whsQ5œ,œinputTypesœ:null,œtypeœ:œlistœ}\",\"selected\":true},{\"source\":\"CustomComponent-whsQ5\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-whsQ5œ}\",\"target\":\"PromptTemplate-H9Udy\",\"targetHandle\":\"{œfieldNameœ:œhtml_linksœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_links\",\"id\":\"PromptTemplate-H9Udy\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-whsQ5\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-whsQ5{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-whsQ5œ}-PromptTemplate-H9Udy{œfieldNameœ:œhtml_linksœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"selected\":true},{\"source\":\"CustomComponent-f6nOg\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-f6nOgœ}\",\"target\":\"PromptTemplate-H9Udy\",\"targetHandle\":\"{œfieldNameœ:œhtml_markdownœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_markdown\",\"id\":\"PromptTemplate-H9Udy\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-f6nOg\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-foreground stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-f6nOg{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-f6nOgœ}-PromptTemplate-H9Udy{œfieldNameœ:œhtml_markdownœ,œidœ:œPromptTemplate-H9Udyœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"selected\":true}],\"viewport\":{\"x\":343.0346131188585,\"y\":341.89843948642147,\"zoom\":0.24148408223121196}},\"is_component\":false,\"name\":\"Fluffy Ptolemy\",\"description\":\"\",\"id\":\"W5oNW\"}}},\"selected\":false,\"positionAbsolute\":{\"x\":-222,\"y\":682.560723386973}}],\"edges\":[{\"source\":\"ChatOpenAI-NTTcv\",\"target\":\"LLMChain-RQsU1\",\"sourceHandle\":\"{œbaseClassesœ:[œBaseLanguageModelœ,œChatOpenAIœ,œBaseChatModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-NTTcvœ}\",\"targetHandle\":\"{œfieldNameœ:œllmœ,œidœ:œLLMChain-RQsU1œ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"id\":\"reactflow__edge-ChatOpenAI-NTTcv{œbaseClassesœ:[œBaseLanguageModelœ,œChatOpenAIœ,œBaseChatModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-NTTcvœ}-LLMChain-RQsU1{œfieldNameœ:œllmœ,œidœ:œLLMChain-RQsU1œ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"llm\",\"id\":\"LLMChain-RQsU1\",\"inputTypes\":null,\"type\":\"BaseLanguageModel\"},\"sourceHandle\":{\"baseClasses\":[\"BaseLanguageModel\",\"ChatOpenAI\",\"BaseChatModel\",\"BaseLLM\"],\"dataType\":\"ChatOpenAI\",\"id\":\"ChatOpenAI-NTTcv\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 \",\"animated\":false,\"selected\":false},{\"source\":\"PromptTemplate-VELMV\",\"target\":\"LLMChain-RQsU1\",\"sourceHandle\":\"{œbaseClassesœ:[œStringPromptTemplateœ,œBasePromptTemplateœ,œPromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-VELMVœ}\",\"targetHandle\":\"{œfieldNameœ:œpromptœ,œidœ:œLLMChain-RQsU1œ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\",\"id\":\"reactflow__edge-PromptTemplate-VELMV{œbaseClassesœ:[œStringPromptTemplateœ,œBasePromptTemplateœ,œPromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-VELMVœ}-LLMChain-RQsU1{œfieldNameœ:œpromptœ,œidœ:œLLMChain-RQsU1œ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"prompt\",\"id\":\"LLMChain-RQsU1\",\"inputTypes\":null,\"type\":\"BasePromptTemplate\"},\"sourceHandle\":{\"baseClasses\":[\"StringPromptTemplate\",\"BasePromptTemplate\",\"PromptTemplate\"],\"dataType\":\"PromptTemplate\",\"id\":\"PromptTemplate-VELMV\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 \",\"animated\":false,\"selected\":false}],\"viewport\":{\"x\":298.29888454238517,\"y\":96.95765543775741,\"zoom\":0.5140569133280332}},\"is_component\":false,\"updated_at\":\"2023-12-04T23:23:08.584967\",\"folder\":null,\"id\":\"5df79f1d-f592-4d59-8c0f-9f3c2f6465b1\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Silly Pasteur\",\"description\":\"Nurture NLP Nodes Here.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":338,\"id\":\"LLMChain-cHel8\",\"type\":\"genericNode\",\"position\":{\"x\":547.3876889720291,\"y\":299.2057833881307},\"data\":{\"type\":\"LLMChain\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Callable, Optional, Union\\n\\nfrom langchain.chains import LLMChain\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import (\\n BaseLanguageModel,\\n BaseMemory,\\n BasePromptTemplate,\\n Chain,\\n)\\n\\n\\nclass LLMChainComponent(CustomComponent):\\n display_name = \\\"LLMChain\\\"\\n description = \\\"Chain to run queries against LLMs\\\"\\n\\n def build_config(self):\\n return {\\n \\\"prompt\\\": {\\\"display_name\\\": \\\"Prompt\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n prompt: BasePromptTemplate,\\n llm: BaseLanguageModel,\\n memory: Optional[BaseMemory] = None,\\n ) -> Union[Chain, Callable]:\\n return LLMChain(prompt=prompt, llm=llm, memory=memory)\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false}},\"description\":\"Chain to run queries against LLMs\",\"base_classes\":[\"Chain\",\"Callable\"],\"display_name\":\"LLMChain\",\"custom_fields\":{\"llm\":null,\"memory\":null,\"prompt\":null},\"output_types\":[\"LLMChain\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"LLMChain-cHel8\"},\"selected\":false,\"positionAbsolute\":{\"x\":547.3876889720291,\"y\":299.2057833881307},\"dragging\":false},{\"width\":384,\"height\":626,\"id\":\"ChatOpenAI-LA6y0\",\"type\":\"genericNode\",\"position\":{\"x\":-272.94405331278074,\"y\":-603.148171441675},\"data\":{\"type\":\"ChatOpenAI\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"list\":true},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"default_query\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"http_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"http_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":2,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"gpt-4-1106-preview\",\"password\":false,\"options\":[\"gpt-4-1106-preview\",\"gpt-4\",\"gpt-4-32k\",\"gpt-3.5-turbo\",\"gpt-3.5-turbo-16k\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"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\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"0.1\",\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatOpenAI\"},\"description\":\"`OpenAI` Chat large language models API.\",\"base_classes\":[\"BaseLanguageModel\",\"ChatOpenAI\",\"BaseChatModel\",\"BaseLLM\"],\"display_name\":\"ChatOpenAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"ChatOpenAI-LA6y0\"},\"selected\":false,\"dragging\":false,\"positionAbsolute\":{\"x\":-272.94405331278074,\"y\":-603.148171441675}},{\"width\":384,\"height\":404,\"id\":\"CustomComponent-ZNoRM\",\"type\":\"genericNode\",\"position\":{\"x\":-103.65741264289085,\"y\":134.62332404764658},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport html2text\\n\\nclass ConvertHTMLToMarkdown(CustomComponent):\\n display_name: str = \\\"HTML to Markdown\\\"\\n description: str = \\\"Converts HTML content to Markdown format.\\\"\\n\\n def build(self, html_content: Data, ignore_links: bool = False) -> str:\\n converter = html2text.HTML2Text()\\n converter.ignore_links = ignore_links\\n markdown_text = converter.handle(html_content)\\n self.status = markdown_text\\n return markdown_text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"display_name\":\"code\"},\"_type\":\"CustomComponent\",\"html_content\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"html_content\",\"display_name\":\"html_content\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false},\"ignore_links\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"ignore_links\",\"display_name\":\"ignore_links\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false}},\"description\":\"Converts HTML content to Markdown format.\",\"base_classes\":[\"str\"],\"display_name\":\"HTML to Markdown\",\"custom_fields\":{\"html_content\":null,\"ignore_links\":null},\"output_types\":[\"str\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-ZNoRM\"},\"selected\":false,\"positionAbsolute\":{\"x\":-103.65741264289085,\"y\":134.62332404764658},\"dragging\":false},{\"width\":384,\"height\":374,\"id\":\"CustomComponent-zNSTv\",\"type\":\"genericNode\",\"position\":{\"x\":-1233.3963469933499,\"y\":280.77850809220325},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport requests\\n\\nclass FetchHTMLContent(CustomComponent):\\n display_name: str = \\\"Fetch HTML\\\"\\n description: str = \\\"Fetches HTML content from a specified URL.\\\"\\n \\n def build_config(self):\\n return {\\\"url\\\": {\\\"input_types\\\": [\\\"Data\\\", \\\"str\\\"]}} \\n\\n def build(self, url: str) -> Data:\\n response = requests.get(url)\\n self.status = str(response) + \\\"\\\\n\\\\n\\\" + response.text\\n return response.text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"display_name\":\"code\"},\"_type\":\"CustomComponent\",\"url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"input_types\":[\"Data\",\"str\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"}},\"description\":\"Fetches HTML content from a specified URL.\",\"base_classes\":[\"Data\"],\"display_name\":\"Fetch HTML\",\"custom_fields\":{\"url\":null},\"output_types\":[\"Data\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-zNSTv\"},\"selected\":false,\"dragging\":false,\"positionAbsolute\":{\"x\":-1233.3963469933499,\"y\":280.77850809220325}},{\"width\":384,\"height\":328,\"id\":\"CustomComponent-Ihm2o\",\"type\":\"genericNode\",\"position\":{\"x\":-554.9404152016002,\"y\":683.6763775860567},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from bs4 import BeautifulSoup\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\nclass ParseHTMLContent(CustomComponent):\\n display_name: str = \\\"Parse HTML\\\"\\n description: str = \\\"Parses HTML content using Beautiful Soup.\\\"\\n\\n def build(self, html_content: Data) -> Data:\\n soup = BeautifulSoup(html_content, 'html.parser')\\n self.status = soup\\n return soup\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"display_name\":\"code\"},\"_type\":\"CustomComponent\",\"html_content\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"html_content\",\"display_name\":\"html_content\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false}},\"description\":\"Parses HTML content using Beautiful Soup.\",\"base_classes\":[\"Data\"],\"display_name\":\"Parse HTML\",\"custom_fields\":{\"html_content\":null},\"output_types\":[\"Data\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-Ihm2o\"},\"selected\":false,\"dragging\":false,\"positionAbsolute\":{\"x\":-554.9404152016002,\"y\":683.6763775860567}},{\"width\":384,\"height\":328,\"id\":\"CustomComponent-6ST9l\",\"type\":\"genericNode\",\"position\":{\"x\":76.10921262566495,\"y\":872.9491114949525},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\nclass ExtractHyperlinks(CustomComponent):\\n display_name: str = \\\"Extract Hyperlinks\\\"\\n description: str = \\\"Extracts hyperlinks from parsed HTML content.\\\"\\n\\n def build(self, soup: Data) -> list:\\n links = [link.get('href') for link in soup.find_all('a')]\\n self.status = links\\n return links\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"display_name\":\"code\"},\"_type\":\"CustomComponent\",\"soup\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"soup\",\"display_name\":\"soup\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false}},\"description\":\"Extracts hyperlinks from parsed HTML content.\",\"base_classes\":[\"list\"],\"display_name\":\"Extract Hyperlinks\",\"custom_fields\":{\"soup\":null},\"output_types\":[\"list\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-6ST9l\"},\"selected\":false,\"positionAbsolute\":{\"x\":76.10921262566495,\"y\":872.9491114949525},\"dragging\":false},{\"width\":384,\"height\":654,\"id\":\"PromptTemplate-l35W1\",\"type\":\"genericNode\",\"position\":{\"x\":1461.4487148097069,\"y\":1090.896175351284},\"data\":{\"type\":\"PromptTemplate\",\"node\":{\"template\":{\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false,\"display_name\":\"output_parser\"},\"input_types\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_types\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false,\"display_name\":\"input_types\"},\"input_variables\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":true,\"value\":[\"url\",\"html_markdown\",\"html_links\",\"query\"],\"display_name\":\"input_variables\"},\"partial_variables\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false,\"display_name\":\"partial_variables\"},\"template\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"prompt\",\"list\":false,\"value\":\"Below is the HTML content (as markdown) and hyperlinks extracted from {url}\\n\\n---\\n\\nContent:\\n\\n{html_markdown}\\n\\n---\\n\\nLinks:\\n\\n{html_links}\\n\\n---\\n\\nAnswer the user query as best as possible.\\n\\nUser query:\\n\\n{query}\\n\\nAnswer:\\n\",\"display_name\":\"template\"},\"template_format\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"f-string\",\"password\":false,\"name\":\"template_format\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":false,\"display_name\":\"template_format\"},\"validate_template\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"validate_template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"bool\",\"list\":false,\"display_name\":\"validate_template\"},\"_type\":\"PromptTemplate\",\"url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"html_markdown\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"html_markdown\",\"display_name\":\"html_markdown\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"html_links\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"html_links\",\"display_name\":\"html_links\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"query\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"query\",\"display_name\":\"query\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"A prompt template for a language model.\",\"base_classes\":[\"StringPromptTemplate\",\"BasePromptTemplate\",\"PromptTemplate\"],\"name\":\"\",\"display_name\":\"PromptTemplate\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/\",\"custom_fields\":{\"\":[\"url\",\"html_markdown\",\"html_links\",\"query\"]},\"output_types\":[],\"full_path\":null,\"field_formatters\":{},\"beta\":false,\"error\":null},\"id\":\"PromptTemplate-l35W1\"},\"selected\":false,\"positionAbsolute\":{\"x\":1461.4487148097069,\"y\":1090.896175351284},\"dragging\":false},{\"width\":384,\"height\":346,\"id\":\"CustomComponent-iTLf4\",\"type\":\"genericNode\",\"position\":{\"x\":-1924.8908014123388,\"y\":704.5414990162741},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nimport requests\\n\\nclass FetchHTMLContent(CustomComponent):\\n display_name: str = \\\"URL Input\\\"\\n\\n def build(self, url: str) -> str:\\n return url\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"display_name\":\"code\"},\"_type\":\"CustomComponent\",\"url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"https://paperswithcode.com/\"}},\"description\":null,\"base_classes\":[\"str\"],\"display_name\":\"URL Input\",\"custom_fields\":{\"url\":null},\"output_types\":[\"str\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-iTLf4\"},\"selected\":false,\"positionAbsolute\":{\"x\":-1924.8908014123388,\"y\":704.5414990162741},\"dragging\":false},{\"width\":384,\"height\":328,\"id\":\"CustomComponent-jWLUz\",\"type\":\"genericNode\",\"position\":{\"x\":627.6164007410564,\"y\":1201.2365384322047},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\n\\nclass ListToMarkdownBullets(CustomComponent):\\n display_name: str = \\\"List to Bullets\\\"\\n description: str = \\\"Converts a Python list into Markdown bullet points.\\\"\\n\\n def build(self, items: list) -> str:\\n markdown_bullets = '\\\\n'.join(f'* {item}' for item in items)\\n return markdown_bullets\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false,\"display_name\":\"code\"},\"_type\":\"CustomComponent\",\"items\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"items\",\"display_name\":\"items\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"list\",\"list\":true}},\"description\":\"Converts a Python list into Markdown bullet points.\",\"base_classes\":[\"str\"],\"display_name\":\"List to Bullets\",\"custom_fields\":{\"items\":null},\"output_types\":[\"str\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"CustomComponent-jWLUz\"},\"selected\":false,\"positionAbsolute\":{\"x\":627.6164007410564,\"y\":1201.2365384322047},\"dragging\":false}],\"edges\":[{\"source\":\"ChatOpenAI-LA6y0\",\"target\":\"LLMChain-cHel8\",\"sourceHandle\":\"{œbaseClassesœ:[œBaseLanguageModelœ,œChatOpenAIœ,œBaseChatModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-LA6y0œ}\",\"targetHandle\":\"{œfieldNameœ:œllmœ,œidœ:œLLMChain-cHel8œ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"id\":\"reactflow__edge-ChatOpenAI-LA6y0{œbaseClassesœ:[œBaseLanguageModelœ,œChatOpenAIœ,œBaseChatModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-LA6y0œ}-LLMChain-cHel8{œfieldNameœ:œllmœ,œidœ:œLLMChain-cHel8œ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"llm\",\"id\":\"LLMChain-cHel8\",\"inputTypes\":null,\"type\":\"BaseLanguageModel\"},\"sourceHandle\":{\"baseClasses\":[\"BaseLanguageModel\",\"ChatOpenAI\",\"BaseChatModel\",\"BaseLLM\"],\"dataType\":\"ChatOpenAI\",\"id\":\"ChatOpenAI-LA6y0\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"selected\":false},{\"source\":\"CustomComponent-zNSTv\",\"sourceHandle\":\"{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-zNSTvœ}\",\"target\":\"CustomComponent-ZNoRM\",\"targetHandle\":\"{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-ZNoRMœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_content\",\"id\":\"CustomComponent-ZNoRM\",\"inputTypes\":null,\"type\":\"Data\"},\"sourceHandle\":{\"baseClasses\":[\"Data\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-zNSTv\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-zNSTv{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-zNSTvœ}-CustomComponent-ZNoRM{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-ZNoRMœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"selected\":false},{\"source\":\"CustomComponent-zNSTv\",\"sourceHandle\":\"{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-zNSTvœ}\",\"target\":\"CustomComponent-Ihm2o\",\"targetHandle\":\"{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-Ihm2oœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_content\",\"id\":\"CustomComponent-Ihm2o\",\"inputTypes\":null,\"type\":\"Data\"},\"sourceHandle\":{\"baseClasses\":[\"Data\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-zNSTv\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-zNSTv{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-zNSTvœ}-CustomComponent-Ihm2o{œfieldNameœ:œhtml_contentœ,œidœ:œCustomComponent-Ihm2oœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"selected\":false},{\"source\":\"CustomComponent-Ihm2o\",\"sourceHandle\":\"{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-Ihm2oœ}\",\"target\":\"CustomComponent-6ST9l\",\"targetHandle\":\"{œfieldNameœ:œsoupœ,œidœ:œCustomComponent-6ST9lœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"soup\",\"id\":\"CustomComponent-6ST9l\",\"inputTypes\":null,\"type\":\"Data\"},\"sourceHandle\":{\"baseClasses\":[\"Data\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-Ihm2o\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-Ihm2o{œbaseClassesœ:[œDataœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-Ihm2oœ}-CustomComponent-6ST9l{œfieldNameœ:œsoupœ,œidœ:œCustomComponent-6ST9lœ,œinputTypesœ:null,œtypeœ:œDataœ}\",\"selected\":false},{\"source\":\"CustomComponent-iTLf4\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-iTLf4œ}\",\"target\":\"CustomComponent-zNSTv\",\"targetHandle\":\"{œfieldNameœ:œurlœ,œidœ:œCustomComponent-zNSTvœ,œinputTypesœ:[œDataœ,œstrœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"url\",\"id\":\"CustomComponent-zNSTv\",\"inputTypes\":[\"Data\",\"str\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-iTLf4\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-iTLf4{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-iTLf4œ}-CustomComponent-zNSTv{œfieldNameœ:œurlœ,œidœ:œCustomComponent-zNSTvœ,œinputTypesœ:[œDataœ,œstrœ],œtypeœ:œstrœ}\",\"selected\":false},{\"source\":\"CustomComponent-iTLf4\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-iTLf4œ}\",\"target\":\"PromptTemplate-l35W1\",\"targetHandle\":\"{œfieldNameœ:œurlœ,œidœ:œPromptTemplate-l35W1œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"url\",\"id\":\"PromptTemplate-l35W1\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-iTLf4\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-iTLf4{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-iTLf4œ}-PromptTemplate-l35W1{œfieldNameœ:œurlœ,œidœ:œPromptTemplate-l35W1œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"selected\":false},{\"source\":\"CustomComponent-6ST9l\",\"sourceHandle\":\"{œbaseClassesœ:[œlistœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-6ST9lœ}\",\"target\":\"CustomComponent-jWLUz\",\"targetHandle\":\"{œfieldNameœ:œitemsœ,œidœ:œCustomComponent-jWLUzœ,œinputTypesœ:null,œtypeœ:œlistœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"items\",\"id\":\"CustomComponent-jWLUz\",\"inputTypes\":null,\"type\":\"list\"},\"sourceHandle\":{\"baseClasses\":[\"list\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-6ST9l\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-6ST9l{œbaseClassesœ:[œlistœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-6ST9lœ}-CustomComponent-jWLUz{œfieldNameœ:œitemsœ,œidœ:œCustomComponent-jWLUzœ,œinputTypesœ:null,œtypeœ:œlistœ}\",\"selected\":false},{\"source\":\"CustomComponent-jWLUz\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-jWLUzœ}\",\"target\":\"PromptTemplate-l35W1\",\"targetHandle\":\"{œfieldNameœ:œhtml_linksœ,œidœ:œPromptTemplate-l35W1œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_links\",\"id\":\"PromptTemplate-l35W1\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-jWLUz\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-jWLUz{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-jWLUzœ}-PromptTemplate-l35W1{œfieldNameœ:œhtml_linksœ,œidœ:œPromptTemplate-l35W1œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"selected\":false},{\"source\":\"CustomComponent-ZNoRM\",\"sourceHandle\":\"{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-ZNoRMœ}\",\"target\":\"PromptTemplate-l35W1\",\"targetHandle\":\"{œfieldNameœ:œhtml_markdownœ,œidœ:œPromptTemplate-l35W1œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"html_markdown\",\"id\":\"PromptTemplate-l35W1\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"str\"],\"dataType\":\"CustomComponent\",\"id\":\"CustomComponent-ZNoRM\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-CustomComponent-ZNoRM{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-ZNoRMœ}-PromptTemplate-l35W1{œfieldNameœ:œhtml_markdownœ,œidœ:œPromptTemplate-l35W1œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"selected\":false},{\"source\":\"PromptTemplate-l35W1\",\"sourceHandle\":\"{œbaseClassesœ:[œStringPromptTemplateœ,œBasePromptTemplateœ,œPromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-l35W1œ}\",\"target\":\"LLMChain-cHel8\",\"targetHandle\":\"{œfieldNameœ:œpromptœ,œidœ:œLLMChain-cHel8œ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"prompt\",\"id\":\"LLMChain-cHel8\",\"inputTypes\":null,\"type\":\"BasePromptTemplate\"},\"sourceHandle\":{\"baseClasses\":[\"StringPromptTemplate\",\"BasePromptTemplate\",\"PromptTemplate\"],\"dataType\":\"PromptTemplate\",\"id\":\"PromptTemplate-l35W1\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-PromptTemplate-mJqEg{œbaseClassesœ:[œStringPromptTemplateœ,œBasePromptTemplateœ,œPromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-mJqEgœ}-LLMChain-cHel8{œfieldNameœ:œpromptœ,œidœ:œLLMChain-cHel8œ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}\"}],\"viewport\":{\"x\":206.6159172940935,\"y\":79.94375811155385,\"zoom\":0.5140569133280333}},\"is_component\":false,\"updated_at\":\"2023-12-06T00:23:04.515144\",\"folder\":null,\"id\":\"ecfb377a-7e88-405d-8d66-7560735ce446\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Lonely Lovelace\",\"description\":\"Smart Chains, Smarter Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-04T23:43:25.925753\",\"folder\":null,\"id\":\"30e71767-6128-40e4-9a6d-b9197b679971\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Custom Component\",\"description\":\"Create any custom component you want!\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\n\\nclass Component(CustomComponent):\\n display_name: str = \\\"Custom Component\\\"\\n description: str = \\\"Create any custom component you want!\\\"\\n documentation: str = \\\"http://docs.langflow.org/components/custom\\\"\\n\\n def build_config(self):\\n return {\\\"param\\\": {\\\"display_name\\\": \\\"Parameter\\\"}}\\n\\n def build(self, param: Data) -> Data:\\n return param\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"param\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"param\",\"display_name\":\"Parameter\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false}},\"description\":\"Create any custom component you want!\",\"base_classes\":[\"Data\"],\"display_name\":\"Custom Component\",\"custom_fields\":{\"param\":null},\"output_types\":[\"CustomComponent\"],\"documentation\":\"http://docs.langflow.org/components/custom\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"CustomComponent-IxJqc\"},\"id\":\"CustomComponent-IxJqc\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-05T22:08:27.080204\",\"folder\":null,\"id\":\"f3c56abb-96d8-4a09-80d3-f60181661b3c\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Amazing Wilson\",\"description\":\"Unfolding Linguistic Possibilities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-05T23:49:58.272677\",\"folder\":null,\"id\":\"324499a6-17a6-49de-96b1-b88955ed2c3d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Nauseous Kowalevski\",\"description\":\"Create, Curate, Communicate with Langflow.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:45:27.084387\",\"folder\":null,\"id\":\"e3168485-d31b-472a-907f-faf833bf7824\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Silly Fermi\",\"description\":\"Craft Meaningful Interactions, Generate Value.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:45:57.134463\",\"folder\":null,\"id\":\"d0ecea5d-bcf9-4b8e-88f0-3c243d309336\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Friendly Ardinghelli\",\"description\":\"Smart Chains, Smarter Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:45:57.138184\",\"folder\":null,\"id\":\"a406912d-b0e2-4954-bef3-ee80c29eca3c\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Gleeful Easley\",\"description\":\"Bridging Prompts for Brilliance.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:45:57.162908\",\"folder\":null,\"id\":\"57b32155-4c6e-4823-ad03-8dd09d8abc62\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Effervescent Varahamihira\",\"description\":\"Create, Chain, Communicate.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:45:57.135644\",\"folder\":null,\"id\":\"18daa0ff-2e5d-457a-95d7-710affec5c4d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Jubilant Joliot\",\"description\":\"Language Architect at Work!\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:45:57.139496\",\"folder\":null,\"id\":\"9255e938-280e-4ce7-91cd-8622126a7d02\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Adoring Carroll\",\"description\":\"Nurture NLP Nodes Here.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:45:57.162240\",\"folder\":null,\"id\":\"a7a680b9-30b1-4438-ac24-da597de443aa\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Berserk Herschel\",\"description\":\"Your Hub for Text Generation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:46:02.593504\",\"folder\":null,\"id\":\"37a439b0-7b1d-45e3-b4fa-5c73669bae3b\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Distracted Joliot\",\"description\":\"Conversational Cartography Unlocked.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:47:04.845238\",\"folder\":null,\"id\":\"4d23fd0a-8ad5-46b9-bb99-eed4138e7426\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Happy Babbage\",\"description\":\"Flow into the Future of Language.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:47:41.899665\",\"folder\":null,\"id\":\"1c8ad5b9-5524-41fe-a762-52c75126b832\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Adoring Brown\",\"description\":\"Unlock the Power of AI in Your Business Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:47:42.702031\",\"folder\":null,\"id\":\"9d4c8e79-4904-4a51-a4bb-146c3d8db10e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Focused Mahavira\",\"description\":\"Design, Develop, Dialogize.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:47:42.786331\",\"folder\":null,\"id\":\"4ba370d1-1f8e-4a23-99aa-99e2cd9b7cbf\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Mad Gates\",\"description\":\"Where Language Meets Logic.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:47:42.838989\",\"folder\":null,\"id\":\"992c3cd3-1d79-4986-8c04-88a34130fa30\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Serene Mcclintock\",\"description\":\"Unlock the Power of AI in Your Business Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:47:42.932723\",\"folder\":null,\"id\":\"a65bfd13-44df-4090-aca0-5057e21e9997\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Berserk Feynman\",\"description\":\"Text Generation Meets Business Transformation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:47:42.931359\",\"folder\":null,\"id\":\"7a05dac5-c005-411d-9994-19d61e71ce78\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sprightly Perlman\",\"description\":\"Interactive Language Weaving.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:47:43.054687\",\"folder\":null,\"id\":\"6db24541-7211-48e6-a792-1a4a99a0ef90\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Jolly Colden\",\"description\":\"Flow into the Future of Language.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:47:43.078384\",\"folder\":null,\"id\":\"13ac42e8-9124-4bf4-9ea2-28671ef2d9a4\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Gleeful Kaku\",\"description\":\"The Power of Language at Your Fingertips.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:50:33.156776\",\"folder\":null,\"id\":\"79262d75-5c62-4b14-b067-f4297f5c912f\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Jovial Khayyam\",\"description\":\"Empowering Communication, Enabling Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:51:13.295375\",\"folder\":null,\"id\":\"3b397e74-d8be-4728-9d6c-05f7c78106a7\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Goofy Mccarthy\",\"description\":\"Building Powerful Solutions with Language Models.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:51:27.632685\",\"folder\":null,\"id\":\"13f4e1fd-45eb-4271-92fd-0d70a31c61d2\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Cocky Lalande\",\"description\":\"Building Intelligent Interactions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:51:54.628983\",\"folder\":null,\"id\":\"9cfd60d8-9311-47b0-b71b-f488f1940bc7\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Romantic Mirzakhani\",\"description\":\"Innovation in Interaction with Langflow.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:52:52.622698\",\"folder\":null,\"id\":\"0d849403-0f75-455d-b4e4-0d622ee3305a\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Grinning Brown\",\"description\":\"Building Powerful Solutions with Language Models.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:53:06.056636\",\"folder\":null,\"id\":\"8643ba14-52d6-4d36-9981-5fd37de5dd76\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Kickass Watt\",\"description\":\"Transform Your Business with Smart Dialogues.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:53:23.338727\",\"folder\":null,\"id\":\"818d3066-bf08-4bf9-adcd-739f8abbfa5d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Goofy Bose\",\"description\":\"Unravel the Art of Articulation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:53:41.218861\",\"folder\":null,\"id\":\"28eff43e-0ecf-47bf-9851-f1492589978e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Optimistic Jennings\",\"description\":\"Create Powerful Connections, Boost Business Value.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:55:03.681106\",\"folder\":null,\"id\":\"68f59069-bc2a-464f-a983-4b61e32e01af\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Pedantic Mirzakhani\",\"description\":\"Language Chainlink Master.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:55:31.761949\",\"folder\":null,\"id\":\"5d981c0e-81b3-44cc-a5be-8f55b92bfed5\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Peppy Murdock\",\"description\":\"Create Powerful Connections, Boost Business Value.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:56:45.548598\",\"folder\":null,\"id\":\"e812ad47-47e8-422b-b94c-84fd0263c9c8\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Berserk Fermat\",\"description\":\"Maximize Impact with Intelligent Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:58:50.234270\",\"folder\":null,\"id\":\"82aaf449-e737-4699-9360-929ab6108dc7\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Tiny Albattani\",\"description\":\"Your Toolkit for Text Generation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T22:59:53.946035\",\"folder\":null,\"id\":\"097cb080-274b-40ca-8dde-7900a949568a\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Cranky Kirch\",\"description\":\"Sculpting Language with Precision.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T23:00:51.745350\",\"folder\":null,\"id\":\"837d7b5f-8495-46a9-b00e-ad1b7ebb52f4\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Effervescent Kowalevski\",\"description\":\"Empowering Language Engineering.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T23:02:53.336102\",\"folder\":null,\"id\":\"3ff079c2-d9f9-4da6-a22a-423fa35670ff\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Radiant Stallman\",\"description\":\"Navigate the Linguistic Landscape, Discover Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T23:03:28.268357\",\"folder\":null,\"id\":\"c8b204dd-3d57-4bc8-aa13-1d559672e75b\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Grinning Swirles\",\"description\":\"Unleashing Linguistic Creativity.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T23:03:51.194455\",\"folder\":null,\"id\":\"a097f083-5880-4cf7-986b-51898bc68e11\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Dreamy Williams\",\"description\":\"Interactive Language Weaving.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T23:04:36.678251\",\"folder\":null,\"id\":\"d9585f63-ce76-42ce-87bc-8e69601ea5c6\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Zany Hawking\",\"description\":\"Flow into the Future of Language.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T23:05:13.672479\",\"folder\":null,\"id\":\"612a01d5-8c8d-4cc1-8c54-35626b7f4a6d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Amazing Kilby\",\"description\":\"Your Toolkit for Text Generation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T23:10:37.047955\",\"folder\":null,\"id\":\"2d05a598-3cdf-452a-bd5d-b0e569e562e3\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Insane Cori\",\"description\":\"Empowering Communication, Enabling Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T23:18:12.426578\",\"folder\":null,\"id\":\"d1769a4a-c1e9-4d74-9273-1e76cfcf21f3\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Compassionate Tesla\",\"description\":\"Graph Your Way to Great Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T23:18:52.806238\",\"folder\":null,\"id\":\"28c5d9dd-0466-4c80-9e58-b1e061cd358d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Jubilant Goldstine\",\"description\":\"Harness the Power of Conversational AI.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-06T23:21:44.488510\",\"folder\":null,\"id\":\"b3c57e4f-28a1-4580-bf08-a9484bdd66e8\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Elegant Curie\",\"description\":\"Building Intelligent Interactions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:23:47.610237\",\"folder\":null,\"id\":\"28fb024e-6ba1-4f5b-83b3-4742b3b8117c\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Twinkly Chandrasekhar\",\"description\":\"Maximize Impact with Intelligent Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:24:29.824530\",\"folder\":null,\"id\":\"d2b87cf7-9167-4030-8e9f-b8d9aa0cadee\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Pensive Nobel\",\"description\":\"Crafting Dialogues that Drive Business Success.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:26:51.210699\",\"folder\":null,\"id\":\"c2c9fbac-7d97-40c2-8055-dff608df9414\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Elated Edison\",\"description\":\"Advanced NLP for Groundbreaking Business Solutions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:27:35.822482\",\"folder\":null,\"id\":\"a55561cd-4b25-473f-bde5-884cbabb0d24\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Dazzling Lichterman\",\"description\":\"Building Linguistic Labyrinths.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:29:09.259381\",\"folder\":null,\"id\":\"9c6fe6d4-8311-4fc6-8b02-2a816d7c059d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Distracted Bhaskara\",\"description\":\"Navigate the Networks of Conversation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:31:36.053452\",\"folder\":null,\"id\":\"ef6fc1ab-aff8-45a1-8cd5-bc8e38565e4d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Evil Watt\",\"description\":\"Building Intelligent Interactions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:32:26.765250\",\"folder\":null,\"id\":\"499b5351-9c09-4934-9f9d-a24be2fd8b24\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Jolly Wien\",\"description\":\"Bridging Prompts for Brilliance.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:33:23.648859\",\"folder\":null,\"id\":\"a57eda91-7f6e-410d-9990-385fe0c724f3\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Nostalgic Spence\",\"description\":\"Mapping Meaningful Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:34:22.576407\",\"folder\":null,\"id\":\"685f685e-8a1b-4b7e-9e21-6cdd72163c91\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Comical Fermat\",\"description\":\"Create, Connect, Converse.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:35:44.920540\",\"folder\":null,\"id\":\"3e061766-b834-4fa4-ba9c-b060925d9703\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Grave Volta\",\"description\":\"Maximize Impact with Intelligent Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:36:33.001572\",\"folder\":null,\"id\":\"135f2fd9-e005-40bc-a9bf-c25107388415\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Gleeful Davinci\",\"description\":\"Create Powerful Connections, Boost Business Value.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:37:16.208823\",\"folder\":null,\"id\":\"167cdb24-7e1c-475b-893a-cca2f125426c\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Hilarious Sinoussi\",\"description\":\"Language Chainlink Master.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:38:07.215401\",\"folder\":null,\"id\":\"48113cab-2c04-493f-8c2e-c8d067826aa2\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Grave Sagan\",\"description\":\"Connect the Dots, Craft Language.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:39:19.479656\",\"folder\":null,\"id\":\"28d292dc-e094-4ffe-a657-178892933267\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Cranky Hoover\",\"description\":\"Crafting Dialogues that Drive Business Success.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:40:02.895859\",\"folder\":null,\"id\":\"0c9849b7-b2d6-4d0e-8334-abfb3ae183dd\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Evil Mendeleev\",\"description\":\"Unfolding Linguistic Possibilities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:40:27.867247\",\"folder\":null,\"id\":\"d78c52e9-1941-4555-9bb9-abd01f176705\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Effervescent Dubinsky\",\"description\":\"Where Language Meets Logic.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:41:23.177402\",\"folder\":null,\"id\":\"5277a04c-b5da-4597-aaa2-a6b66ea11d02\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Cocky Poitras\",\"description\":\"Where Language Meets Logic.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:42:08.731865\",\"folder\":null,\"id\":\"b0d0c8de-4eea-484a-a068-b13e63f7e71c\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Pedantic Ptolemy\",\"description\":\"Crafting Conversations, One Node at a Time.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:42:25.658996\",\"folder\":null,\"id\":\"b6f155e2-03eb-4232-9bab-145463382fe9\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Loving Cray\",\"description\":\"Maximize Impact with Intelligent Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:43:57.530112\",\"folder\":null,\"id\":\"b75c10ca-9ecb-432b-88ab-e1847e836e22\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Effervescent Pasteur\",\"description\":\"Language Models, Mapped and Mastered.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:44:58.979393\",\"folder\":null,\"id\":\"3710eccb-e7a7-41d5-9339-d9c301515d17\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Pensive Gates\",\"description\":\"The Power of Language at Your Fingertips.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:45:42.744174\",\"folder\":null,\"id\":\"fdd2271e-92f6-4349-8c01-2ec0e9e73f13\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Determined Khorana\",\"description\":\"Beyond Text Generation - Unleashing Business Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:46:10.084684\",\"folder\":null,\"id\":\"88b49a97-0888-4fea-8d9b-6ac2cc6d158e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Lonely Booth\",\"description\":\"Design Dialogues with Langflow.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:46:41.577750\",\"folder\":null,\"id\":\"629afe54-8796-45be-a570-e3ac79c60792\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Jubilant Mendeleev\",\"description\":\"Chain the Words, Master Language!\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:47:22.631243\",\"folder\":null,\"id\":\"7bf481b0-73fe-4f5b-a3d4-1263d9d8e827\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Clever Varahamihira\",\"description\":\"Building Powerful Solutions with Language Models.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:49:51.026960\",\"folder\":null,\"id\":\"df9e86b6-56c9-4848-9010-102615314766\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sleepy Stallman\",\"description\":\"Empowering Language Engineering.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:50:14.932236\",\"folder\":null,\"id\":\"3e66994c-9b7a-4b85-a917-65d1959d7352\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Modest Wozniak\",\"description\":\"Advanced NLP for Groundbreaking Business Solutions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:51:13.811894\",\"folder\":null,\"id\":\"d10f924a-5780-4255-9f41-3e102ae03e84\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Hopeful Mirzakhani\",\"description\":\"Graph Your Way to Great Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:52:23.906908\",\"folder\":null,\"id\":\"f3378fa8-ccaf-4a3b-90d6-b8ab0c4e481f\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Awesome Joule\",\"description\":\"Design, Develop, Dialogize.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:52:43.863440\",\"folder\":null,\"id\":\"deaa50e7-a8b1-46b1-856c-334ee781e1c2\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Peppy Shockley\",\"description\":\"Generate, Innovate, Communicate.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:53:43.299699\",\"folder\":null,\"id\":\"c72eac2c-d924-40c6-a102-da524216d090\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Zany Dewey\",\"description\":\"Flow into the Future of Language.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:54:18.848827\",\"folder\":null,\"id\":\"d9425324-bb60-462e-b431-90a536f2bc76\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Gloomy Joliot\",\"description\":\"Language Engineering Excellence.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:58:12.348608\",\"folder\":null,\"id\":\"621ba134-3fac-487c-98cd-96941439f1be\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Backstabbing Franklin\",\"description\":\"Craft Language Connections Here.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:58:13.491824\",\"folder\":null,\"id\":\"86ca9773-c7b7-4a1a-859a-6cbe0ddff206\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sharp Swartz\",\"description\":\"Beyond Text Generation - Unleashing Business Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:58:13.524414\",\"folder\":null,\"id\":\"da1c02b7-d608-4498-9946-7d02f55fa103\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Serene Volta\",\"description\":\"Connect the Dots, Craft Language.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:58:13.641432\",\"folder\":null,\"id\":\"8d5dd998-6b51-4f65-8331-086a7f3b11d7\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Focused Lichterman\",\"description\":\"Crafting Dialogues that Drive Business Success.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:58:13.746120\",\"folder\":null,\"id\":\"942027d3-e2ea-48c6-8279-0a41b54e8862\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Lively Einstein\",\"description\":\"Unleashing Linguistic Creativity.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:58:13.746904\",\"folder\":null,\"id\":\"9e9b6298-1073-4297-8ecc-3c620b432e70\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Cocky Planck\",\"description\":\"Empowering Language Engineering.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:58:13.747420\",\"folder\":null,\"id\":\"7cd60c83-b797-4e60-af6d-cbc540657943\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Suspicious Zobell\",\"description\":\"Innovation in Interaction, Revolution in Revenue.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:58:13.749951\",\"folder\":null,\"id\":\"dab08306-9521-4e15-aa11-e6a6a4e210f8\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Small Knuth\",\"description\":\"Nurture NLP Nodes Here.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:59:16.772119\",\"folder\":null,\"id\":\"c9149590-636a-44f5-aaae-45a4e78fe4df\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Evil Wright\",\"description\":\"Unfolding Linguistic Possibilities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:59:52.954568\",\"folder\":null,\"id\":\"6b23b2ad-c07c-46f6-b9ad-268783d1712e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Vibrant Lalande\",\"description\":\"Language Models, Mapped and Mastered.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:59:53.656156\",\"folder\":null,\"id\":\"ece3bcf6-a147-4559-862e-cacff9db5f48\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Happy Gauss\",\"description\":\"The Pinnacle of Prompt Generation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:59:53.717991\",\"folder\":null,\"id\":\"252b6021-ecad-4eaf-9e2f-106c4c89c496\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Gigantic Rosalind\",\"description\":\"Building Powerful Solutions with Language Models.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:59:53.736261\",\"folder\":null,\"id\":\"b8cb6d8d-c0fb-4e8d-a46e-2c608dc8a714\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Adoring Hubble\",\"description\":\"Powerful Prompts, Perfectly Positioned.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:59:53.769546\",\"folder\":null,\"id\":\"553a67db-7225-474c-978e-8a40cde2bfb2\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Pensive Mclean\",\"description\":\"Unlock the Power of AI in Your Business Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:59:53.981063\",\"folder\":null,\"id\":\"e0865007-4d80-4edf-87ab-9e8d2892d719\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Trusting Murdock\",\"description\":\"Uncover Business Opportunities with NLP.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:59:53.982286\",\"folder\":null,\"id\":\"1021cd20-66e0-4b81-9c79-bfe729774d20\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Perky Riemann\",\"description\":\"Powerful Prompts, Perfectly Positioned.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T00:59:53.983216\",\"folder\":null,\"id\":\"089074d3-8a1e-4d85-a59d-b4717090e4d3\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Small Tesla\",\"description\":\"Language Models, Unleashed.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:00:35.704142\",\"folder\":null,\"id\":\"beb49d88-255e-4db4-931b-4ab4358f1097\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Effervescent Boyd\",\"description\":\"Smart Chains, Smarter Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:01:13.569507\",\"folder\":null,\"id\":\"75b5ab8d-e0c0-43cf-912b-8578550e198a\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Small Babbage\",\"description\":\"Interactive Language Weaving.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:01:27.944868\",\"folder\":null,\"id\":\"503edd55-8f70-43e5-87fb-2324eaf62336\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Lonely Bose\",\"description\":\"Crafting Dialogues that Drive Business Success.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:02:40.122079\",\"folder\":null,\"id\":\"ae4f2992-1a23-4a43-bec6-68b823935762\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Mirthful Coulomb\",\"description\":\"Craft Meaningful Interactions, Generate Value.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:03:15.263237\",\"folder\":null,\"id\":\"a2a464db-b02a-4440-ad9e-7b552ee6c027\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Drunk Newton\",\"description\":\"Craft Meaningful Interactions, Generate Value.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:03:15.883766\",\"folder\":null,\"id\":\"1a5d9af7-5a96-4035-a09c-e15741785828\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Jovial Pasteur\",\"description\":\"Your Hub for Text Generation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:03:15.933083\",\"folder\":null,\"id\":\"04b94873-0828-41dc-a850-fd4132c9b9f1\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Playful Spence\",\"description\":\"Connect the Dots, Craft Language.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:03:15.967685\",\"folder\":null,\"id\":\"47003dc2-7884-48a3-aa66-e4185079f4d9\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Cheerful Noyce\",\"description\":\"Your Passport to Linguistic Landscapes.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:03:15.983198\",\"folder\":null,\"id\":\"13769cb4-2e15-4d54-a28a-c97dc15db58c\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Awesome Edison\",\"description\":\"Unfolding Linguistic Possibilities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:03:16.018879\",\"folder\":null,\"id\":\"453dacde-6b10-406b-a5b3-f90f44be6899\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Upbeat Snyder\",\"description\":\"Powerful Prompts, Perfectly Positioned.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:03:16.205689\",\"folder\":null,\"id\":\"9544fac9-3002-47aa-86b9-102844fe9649\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Tender Khorana\",\"description\":\"Chain the Words, Master Language!\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:03:16.243434\",\"folder\":null,\"id\":\"90498ef6-34f9-45c8-8cd0-fe6a36a26f41\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Elated Albattani\",\"description\":\"Interactive Language Weaving.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:05:07.775497\",\"folder\":null,\"id\":\"9577c416-8ce8-48f6-ad6d-ab2e003bb415\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Charming Goodall\",\"description\":\"Maximize Impact with Intelligent Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:07:52.139318\",\"folder\":null,\"id\":\"f10dc08e-b9c7-44b3-8837-b95aee2f6dbb\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Hilarious Ramanujan\",\"description\":\"Harness the Power of Conversational AI.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:08:22.448480\",\"folder\":null,\"id\":\"c864bd8c-67cd-465f-bf7d-7a35c7df37f3\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Tender Mclean\",\"description\":\"Unleashing Linguistic Creativity.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:09:56.507826\",\"folder\":null,\"id\":\"f0c13b19-ae23-40e6-88d6-d135897ee100\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Grave Hawking\",\"description\":\"Building Intelligent Interactions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:10:27.169757\",\"folder\":null,\"id\":\"0afb91b4-8f41-4270-900a-f5de647d45ad\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Gloomy Lovelace\",\"description\":\"Your Passport to Linguistic Landscapes.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:11:02.292233\",\"folder\":null,\"id\":\"0f1e5dcf-8769-4157-b495-5f215b490107\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Fervent Kilby\",\"description\":\"Empowering Enterprises with Intelligent Interactions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:11:46.960966\",\"folder\":null,\"id\":\"310d03d9-dd50-4946-9a27-38ee06906212\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Admiring Almeida\",\"description\":\"Language Models, Mapped and Mastered.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:12:24.475101\",\"folder\":null,\"id\":\"3cbc8fc2-a86f-4177-9a1c-a833b2a24283\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Elated Roentgen\",\"description\":\"Empowering Enterprises with Intelligent Interactions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:06.753571\",\"folder\":null,\"id\":\"c508e922-29e9-4234-84ae-505c5bdf41c1\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Perky Poitras\",\"description\":\"Chain the Words, Master Language!\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:07.754605\",\"folder\":null,\"id\":\"1431df05-1b6f-41af-a063-a18d26a946ef\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Tender Khayyam\",\"description\":\"Navigate the Linguistic Landscape, Discover Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:07.791627\",\"folder\":null,\"id\":\"344e03fb-fd49-4e87-be67-7dce04ba655b\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Zealous Mayer\",\"description\":\"Navigate the Linguistic Landscape, Discover Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:07.803889\",\"folder\":null,\"id\":\"8b3ff1cb-3a6c-46ee-b09a-0e9f9f13a8b9\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Tiny Ramanujan\",\"description\":\"Empowering Communication, Enabling Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:07.822583\",\"folder\":null,\"id\":\"18961e76-f4b1-4968-926a-fed22cb04f69\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Cheerful Franklin\",\"description\":\"Building Intelligent Interactions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:07.854440\",\"folder\":null,\"id\":\"0e0ee854-ab46-4333-a848-2e1239a24334\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Serene Swirles\",\"description\":\"Maximize Impact with Intelligent Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:07.988932\",\"folder\":null,\"id\":\"cc7b8238-3d15-4f78-bd0c-8311691c9ff8\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sleepy Swartz\",\"description\":\"Uncover Business Opportunities with NLP.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:08.028688\",\"folder\":null,\"id\":\"123369f9-c83c-4ed3-93b6-78ca29c271cf\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Cranky Kowalevski\",\"description\":\"Unleashing Linguistic Creativity.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:59.097607\",\"folder\":null,\"id\":\"ed4b1490-e9e5-46bf-b337-166b48eaadd6\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sprightly Golick\",\"description\":\"Building Powerful Solutions with Language Models.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:59.617772\",\"folder\":null,\"id\":\"9d1be311-c618-4e3e-aeb1-4161ab37850e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Boring Newton\",\"description\":\"Generate, Innovate, Communicate.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:59.646124\",\"folder\":null,\"id\":\"63a75f99-1745-40d3-9e27-3d19a143be45\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sprightly Noyce\",\"description\":\"Beyond Text Generation - Unleashing Business Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:59.685781\",\"folder\":null,\"id\":\"b418ca87-eb17-42e8-b789-3fcb0cab3ddb\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Fluffy Fermat\",\"description\":\"Text Generation Meets Business Transformation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:59.705984\",\"folder\":null,\"id\":\"93802b07-eee9-4a2b-8691-7d9a231bd67e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Stoic Payne\",\"description\":\"Beyond Text Generation - Unleashing Business Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:59.723990\",\"folder\":null,\"id\":\"d62df661-0ae5-4b41-a9fb-71cb2e46ad52\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Effervescent Darwin\",\"description\":\"Unleashing Linguistic Creativity.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:59.818343\",\"folder\":null,\"id\":\"d1f62248-415c-474a-bfa6-3509a528a33b\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Focused Thompson\",\"description\":\"Transform Your Business with Smart Dialogues.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:13:59.925999\",\"folder\":null,\"id\":\"d2267176-f020-4c52-90a4-7f944d7c1749\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Admiring Dewey\",\"description\":\"Navigate the Networks of Conversation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:15:07.400402\",\"folder\":null,\"id\":\"8cf1ac8d-d34d-4e8a-a9da-be44672e1dfb\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Prickly Zuse\",\"description\":\"Where Language Meets Logic.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:15:08.384611\",\"folder\":null,\"id\":\"bae3cb5b-0f1b-4e95-bf58-7eab38da3d73\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Hungry Zuse\",\"description\":\"The Pinnacle of Prompt Generation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:15:08.436591\",\"folder\":null,\"id\":\"4dfafe3e-e9ef-405d-be72-550084411d69\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Lonely Khayyam\",\"description\":\"Design Dialogues with Langflow.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:15:08.435958\",\"folder\":null,\"id\":\"e0f81215-dc55-4b5a-b8cf-6e2fcbf297a7\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Big Mendel\",\"description\":\"Innovation in Interaction with Langflow.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:15:08.470080\",\"folder\":null,\"id\":\"5022a71c-da47-4975-a4d0-6d2d9e760d3d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Inquisitive Poitras\",\"description\":\"Navigate the Networks of Conversation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:15:08.484430\",\"folder\":null,\"id\":\"4f1ff9e3-3500-404c-80af-2010bc46cdcb\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Joyous Jones\",\"description\":\"The Power of Language at Your Fingertips.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:15:08.661306\",\"folder\":null,\"id\":\"77af3e47-c2cc-42f6-99e1-78589439a447\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Exuberant Khayyam\",\"description\":\"Empowering Communication, Enabling Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:15:08.662877\",\"folder\":null,\"id\":\"6f3e2e56-b329-47e3-86cc-024c29203016\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Dazzling Visvesvaraya\",\"description\":\"Maximize Impact with Intelligent Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:16:10.092917\",\"folder\":null,\"id\":\"449ac0ee-ee29-4a9f-9aff-fd6a86624457\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Nostalgic Tesla\",\"description\":\"Nurture NLP Nodes Here.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:16:10.630382\",\"folder\":null,\"id\":\"a2136ed3-dc75-4a8f-ab34-6887ff955b23\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Goofy Noether\",\"description\":\"Language Models, Unleashed.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:16:10.652058\",\"folder\":null,\"id\":\"fd1080f8-db07-481a-b2ba-60f67fcb20a6\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Dazzling Pasteur\",\"description\":\"Language Models, Unleashed.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:16:10.688661\",\"folder\":null,\"id\":\"d534d5e1-92aa-4fb2-a795-7071c4feba47\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Comical Sinoussi\",\"description\":\"Bridging Prompts for Brilliance.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:16:10.741385\",\"folder\":null,\"id\":\"85323170-c066-4d8f-acb0-1b142241389e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Small Ramanujan\",\"description\":\"Uncover Business Opportunities with NLP.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:16:10.790086\",\"folder\":null,\"id\":\"b0d18432-21ab-404b-acb6-57ef97353fad\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sick Joliot\",\"description\":\"Text Generation Meets Business Transformation.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":442,\"id\":\"OpenAIEmbeddings-rVj1B\",\"type\":\"genericNode\",\"position\":{\"x\":-100.23754663035719,\"y\":-718.7575880388187},\"data\":{\"type\":\"OpenAIEmbeddings\",\"node\":{\"template\":{\"allowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[],\"password\":false,\"name\":\"allowed_special\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"chunk_size\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1000,\"password\":false,\"name\":\"chunk_size\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_headers\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"default_headers\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"default_query\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"default_query\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"deployment\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-embedding-ada-002\",\"password\":false,\"name\":\"deployment\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"disallowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"all\",\"password\":false,\"name\":\"disallowed_special\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"embedding_ctx_length\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":8191,\"password\":false,\"name\":\"embedding_ctx_length\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"{\\\"Authorization\\\": \\\"Bearer \\\"}\",\"password\":false,\"name\":\"headers\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"http_client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"http_client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":2,\"password\":false,\"name\":\"max_retries\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"model\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-embedding-ada-002\",\"password\":false,\"name\":\"model\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_api_type\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_type\",\"display_name\":\"OpenAI API Type\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_api_version\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_version\",\"display_name\":\"OpenAI API Version\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"show_progress_bar\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"show_progress_bar\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"skip_empty\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"skip_empty\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tiktoken_enabled\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":true,\"name\":\"tiktoken_enabled\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"OpenAIEmbeddings\"},\"description\":\"OpenAI embedding models.\",\"base_classes\":[\"OpenAIEmbeddings\",\"Embeddings\"],\"display_name\":\"OpenAIEmbeddings\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"OpenAIEmbeddings-rVj1B\"},\"selected\":true,\"dragging\":false,\"positionAbsolute\":{\"x\":-100.23754663035719,\"y\":-718.7575880388187}}],\"edges\":[],\"viewport\":{\"x\":267.7156633365312,\"y\":716.9644817529361,\"zoom\":0.7169776240079139}},\"is_component\":false,\"updated_at\":\"2023-12-08T18:52:26.999440\",\"folder\":null,\"id\":\"be39958a-ef42-4fa8-8e54-b611e56b5c97\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Graceful Lumiere\",\"description\":\"Conversational Cartography Unlocked.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-07T01:16:11.012069\",\"folder\":null,\"id\":\"f7dcecfd-533c-4f40-9dcb-f213962ed1a2\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"aaaaa\",\"description\":\"Text Generation Meets Business Transformation.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":442,\"id\":\"OpenAIEmbeddings-P6Z0D\",\"type\":\"genericNode\",\"position\":{\"x\":-100.23754663035719,\"y\":-718.7575880388187},\"data\":{\"type\":\"OpenAIEmbeddings\",\"node\":{\"template\":{\"allowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[],\"password\":false,\"name\":\"allowed_special\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"chunk_size\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1000,\"password\":false,\"name\":\"chunk_size\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_headers\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"default_headers\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"default_query\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"default_query\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"deployment\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-embedding-ada-002\",\"password\":false,\"name\":\"deployment\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"disallowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"all\",\"password\":false,\"name\":\"disallowed_special\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"embedding_ctx_length\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":8191,\"password\":false,\"name\":\"embedding_ctx_length\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"{\\\"Authorization\\\": \\\"Bearer \\\"}\",\"password\":false,\"name\":\"headers\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"http_client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"http_client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":2,\"password\":false,\"name\":\"max_retries\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"model\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-embedding-ada-002\",\"password\":false,\"name\":\"model\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_api_type\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_type\",\"display_name\":\"OpenAI API Type\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"openai_api_version\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_version\",\"display_name\":\"OpenAI API Version\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"show_progress_bar\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"show_progress_bar\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"skip_empty\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"skip_empty\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tiktoken_enabled\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":true,\"name\":\"tiktoken_enabled\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"_type\":\"OpenAIEmbeddings\"},\"description\":\"OpenAI embedding models.\",\"base_classes\":[\"OpenAIEmbeddings\",\"Embeddings\"],\"display_name\":\"OpenAIEmbeddings\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"OpenAIEmbeddings-P6Z0D\"},\"selected\":true,\"dragging\":false,\"positionAbsolute\":{\"x\":-100.23754663035719,\"y\":-718.7575880388187}}],\"edges\":[],\"viewport\":{\"x\":266.7156633365312,\"y\":657.9644817529361,\"zoom\":0.7169776240079139}},\"is_component\":false,\"updated_at\":\"2023-12-08T19:05:14.503144\",\"folder\":null,\"id\":\"c2411a20-57c6-44cc-a0d0-2c857453633d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Lively Aryabhata\",\"description\":\"Design, Develop, Dialogize.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":459,\"id\":\"CustomComponent-Qhbd7\",\"type\":\"genericNode\",\"position\":{\"x\":-224.36198532285903,\"y\":-39.03047722134913},\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\nfrom openai import OpenAI\\n\\n\\nclass Component(CustomComponent):\\n display_name: str = \\\"OpenAI STT english translator\\\"\\n description: str = \\\"Transcript and translate any audio to english\\\"\\n documentation: str = \\\"http://docs.langflow.org/components/custom\\\"\\n\\n def build_config(self):\\n return {\\\"audio_path\\\":{\\\"display_name\\\":\\\"Audio path\\\",\\\"input_types\\\":[\\\"str\\\"]},\\\"openAI_key\\\":{\\\"display_name\\\":\\\"OpenAI key\\\",\\\"password\\\":True}}\\n\\n def build(self,audio_path:str,openAI_key:str) -> str:\\n client = OpenAI(api_key=openAI_key)\\n audio_file= open(audio_path, \\\"rb\\\")\\n transcript = client.audio.translations.create(\\n model=\\\"whisper-1\\\", \\n file=audio_file\\n )\\n self.status = transcript.text\\n return transcript.text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"audio_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"audio_path\",\"display_name\":\"Audio path\",\"advanced\":false,\"input_types\":[\"str\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"aaaaa\"},\"openAI_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openAI_key\",\"display_name\":\"OpenAI key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"}},\"description\":\"Transcript and translate any audio to english\",\"base_classes\":[\"str\"],\"display_name\":\"OpenAI STT english tra\",\"custom_fields\":{\"audio_path\":null,\"openAI_key\":null},\"output_types\":[\"str\"],\"documentation\":\"http://docs.langflow.org/components/custom\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"CustomComponent-Qhbd7\"},\"selected\":true,\"dragging\":false,\"positionAbsolute\":{\"x\":-224.36198532285903,\"y\":-39.03047722134913}}],\"edges\":[],\"viewport\":{\"x\":433.8372868055629,\"y\":250.9611989970935,\"zoom\":0.8467453123625275}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:16:56.971879\",\"folder\":null,\"id\":\"122eee5d-9734-4e51-9da5-b39bead64a8d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Goofy Wing\",\"description\":\"Your Toolkit for Text Generation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:18:23.227017\",\"folder\":null,\"id\":\"171d0063-6446-4c6a-8f5b-786a38951d44\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Mad Boyd\",\"description\":\"Empowering Language Engineering.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:18:24.063781\",\"folder\":null,\"id\":\"3a7af50c-6555-4004-a86e-1ea37e477900\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Boring Dewey\",\"description\":\"Unleashing Linguistic Creativity.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:18:24.065404\",\"folder\":null,\"id\":\"dc4b4235-a550-41e2-9ddb-bcb352a1bc03\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Nauseous Carroll\",\"description\":\"Navigate the Networks of Conversation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:18:24.087501\",\"folder\":null,\"id\":\"9550e2bf-db7a-41f5-84e5-177a181bbeda\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Silly Engelbart\",\"description\":\"Generate, Innovate, Communicate.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:18:24.112543\",\"folder\":null,\"id\":\"6a3a24bb-01cb-4d8e-8d17-0dc92d257322\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sassy Khayyam\",\"description\":\"Innovation in Interaction, Revolution in Revenue.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:18:24.147631\",\"folder\":null,\"id\":\"8d372f5e-ca12-4ea8-a1a1-8846afa72692\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Mirthful Bell\",\"description\":\"Connect the Dots, Craft Language.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:18:24.212921\",\"folder\":null,\"id\":\"800f8785-0f41-4db3-aef8-9e3de5250526\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sleepy Bassi\",\"description\":\"Unleashing Business Potential through Language Engineering.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:18:24.395729\",\"folder\":null,\"id\":\"4589607a-065b-4a8f-ba52-5045d7b04086\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Lonely Volhard\",\"description\":\"Where Language Meets Logic.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:20:33.263971\",\"folder\":null,\"id\":\"b2440ed8-44fa-4684-adf7-b5e84bff6577\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Ecstatic Poincare\",\"description\":\"Your Passport to Linguistic Landscapes.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:20:33.377270\",\"folder\":null,\"id\":\"b996f514-e0b8-432f-969b-7276630a8f4b\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Grave Zuse\",\"description\":\"Text Generation Meets Business Transformation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:20:33.406385\",\"folder\":null,\"id\":\"6e2e9c12-0afc-499e-acdd-adf4b5f7d4fc\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Big Hopper\",\"description\":\"Conversation Catalyst Engine.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:20:33.413014\",\"folder\":null,\"id\":\"e020f1a5-aa12-45e7-ba50-6eb64a735e60\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Gleeful Jang\",\"description\":\"Conversation Catalyst Engine.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:20:33.433045\",\"folder\":null,\"id\":\"ee58f892-b7b2-408e-b4b9-9d862fc315aa\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Twinkly Ohm\",\"description\":\"Promptly Ingenious!\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:20:33.403404\",\"folder\":null,\"id\":\"023a1fc3-8807-4167-b6b2-f4e5daf036f1\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Distracted Degrasse\",\"description\":\"Bridging Prompts for Brilliance.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:20:33.411655\",\"folder\":null,\"id\":\"085f106f-c1e9-4a0f-ba31-d2fafe685d9c\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Exuberant Volta\",\"description\":\"Catalyzing Business Growth through Conversational AI.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:20:33.408697\",\"folder\":null,\"id\":\"14481bb5-1353-452f-9359-d38c9419d79c\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Goofy Bose\",\"description\":\"Conversational Cartography Unlocked.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:22:21.774940\",\"folder\":null,\"id\":\"e9316292-4ee1-441b-8327-0b09a7831fe9\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Thirsty Easley\",\"description\":\"Language Models, Mapped and Mastered.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:22:22.416556\",\"folder\":null,\"id\":\"668806ba-3efa-44de-aeb7-4ac082ba9172\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Joyous Mestorf\",\"description\":\"Generate, Innovate, Communicate.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:22:22.484048\",\"folder\":null,\"id\":\"3fc0a371-aada-4450-9d17-33d3cc05c870\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Playful Franklin\",\"description\":\"Empowering Language Engineering.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:22:22.509095\",\"folder\":null,\"id\":\"af967c98-5f08-4ee2-b1ce-6c16b4f9ebe2\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Goofy Bhaskara\",\"description\":\"Empowering Enterprises with Intelligent Interactions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:22:22.508465\",\"folder\":null,\"id\":\"758c4164-b521-45d0-a15f-d49480e312eb\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Funky Edison\",\"description\":\"Unravel the Art of Articulation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:22:22.602296\",\"folder\":null,\"id\":\"f9d3d30f-8859-433f-bafc-ccf1a7196e35\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Silly Ride\",\"description\":\"Unfolding Linguistic Possibilities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:22:22.604061\",\"folder\":null,\"id\":\"0142bca5-eb61-42ed-9917-70c4c0f54eb0\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Goofy Noyce\",\"description\":\"Text Generation Meets Business Transformation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:22:22.603113\",\"folder\":null,\"id\":\"0b63d036-4669-4ceb-8ea4-34035340df77\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Cocky Bhabha\",\"description\":\"Language Engineering Excellence.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:23:07.345767\",\"folder\":null,\"id\":\"8b9a66d4-a924-4b84-a2b5-5dd0645ac07a\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Thirsty Zobell\",\"description\":\"Unleashing Business Potential through Language Engineering.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:23:07.722319\",\"folder\":null,\"id\":\"c912fd6b-b32d-409f-a0e5-c6249b066429\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Fervent Shaw\",\"description\":\"Language Architect at Work!\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:23:07.750779\",\"folder\":null,\"id\":\"9d755cd4-c652-43e0-a68d-75a5475ce7a3\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Giggly Newton\",\"description\":\"Navigate the Linguistic Landscape, Discover Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:23:07.786602\",\"folder\":null,\"id\":\"0d3af7de-1ada-4c43-a69f-1bfad370ccfc\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Modest Yalow\",\"description\":\"Text Generation Meets Business Transformation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:23:07.792245\",\"folder\":null,\"id\":\"b6444376-4162-436b-8b40-f5a6afc850db\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sad Bhabha\",\"description\":\"Empowering Enterprises with Intelligent Interactions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:23:07.890349\",\"folder\":null,\"id\":\"d90af439-fb34-4d27-98f2-06f7f9a9ed8c\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Spirited Hoover\",\"description\":\"The Pinnacle of Prompt Generation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:23:07.905750\",\"folder\":null,\"id\":\"31597ce2-de3c-490b-9ead-3f702f63cfd9\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Trusting Davinci\",\"description\":\"Design, Develop, Dialogize.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-08T21:23:08.001400\",\"folder\":null,\"id\":\"b43c63e9-a257-4a53-8acc-049e13706ac2\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"23\",\"description\":\"23\",\"data\":{\"nodes\":[{\"width\":384,\"height\":467,\"id\":\"PromptTemplate-K7xiS\",\"type\":\"genericNode\",\"position\":{\"x\":-658.2250903773149,\"y\":809.352046606987},\"data\":{\"type\":\"PromptTemplate\",\"node\":{\"template\":{\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false},\"input_types\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_types\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"input_variables\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":true,\"value\":[\"dasdas\",\"dasdasd\"]},\"partial_variables\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"template\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"prompt\",\"list\":false,\"value\":\"{dasdas}\\n{dasdasd}\\n\"},\"template_format\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"f-string\",\"password\":false,\"name\":\"template_format\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":false},\"validate_template\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"validate_template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"PromptTemplate\",\"dasdas\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"dasdas\",\"display_name\":\"dasdas\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"dasdasd\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"dasdasd\",\"display_name\":\"dasdasd\",\"advanced\":false,\"input_types\":[\"Document\",\"BaseOutputParser\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"A prompt template for a language model.\",\"base_classes\":[\"PromptTemplate\",\"BasePromptTemplate\",\"StringPromptTemplate\"],\"name\":\"\",\"display_name\":\"PromptTemplate\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/\",\"custom_fields\":{\"\":[\"dasdas\",\"dasdasd\"]},\"output_types\":[],\"full_path\":null,\"field_formatters\":{},\"beta\":false,\"error\":null},\"id\":\"PromptTemplate-K7xiS\"},\"selected\":false,\"positionAbsolute\":{\"x\":-658.2250903773149,\"y\":809.352046606987},\"dragging\":false},{\"width\":384,\"height\":366,\"id\":\"AirbyteJSONLoader-DXfcM\",\"type\":\"genericNode\",\"position\":{\"x\":-1110.8267574563533,\"y\":569.1107380883907},\"data\":{\"type\":\"AirbyteJSONLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".json\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"json\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"\":\"\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"AirbyteJSONLoader\"},\"description\":\"Load local `Airbyte` json files.\",\"base_classes\":[\"Document\"],\"display_name\":\"AirbyteJSONLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"AirbyteJSONLoader-DXfcM\"},\"selected\":false,\"positionAbsolute\":{\"x\":-1110.8267574563533,\"y\":569.1107380883907},\"dragging\":false},{\"width\":384,\"height\":376,\"id\":\"PromptRunner-ckWMH\",\"type\":\"genericNode\",\"position\":{\"x\":-1149.4746387825978,\"y\":992.3970573758324},\"data\":{\"type\":\"PromptRunner\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\n\\nfrom langchain.llms.base import BaseLLM\\nfrom langchain.prompts import PromptTemplate\\nfrom langchain.schema import Document\\n\\n\\nclass PromptRunner(CustomComponent):\\n display_name: str = \\\"Prompt Runner\\\"\\n description: str = \\\"Run a Chain with the given PromptTemplate\\\"\\n beta: bool = True\\n field_config = {\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"prompt\\\": {\\n \\\"display_name\\\": \\\"Prompt Template\\\",\\n \\\"info\\\": \\\"Make sure the prompt has all variables filled.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(self, llm: BaseLLM, prompt: PromptTemplate, inputs: dict = {}) -> Document:\\n chain = prompt | llm\\n # The input is an empty dict because the prompt is already filled\\n result = chain.invoke(input=inputs)\\n if hasattr(result, \\\"content\\\"):\\n result = result.content\\n self.repr_value = result\\n return Document(page_content=str(result))\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"inputs\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"\":\"\"}],\"password\":false,\"name\":\"inputs\",\"display_name\":\"inputs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLLM\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt Template\",\"advanced\":false,\"dynamic\":false,\"info\":\"Make sure the prompt has all variables filled.\",\"type\":\"PromptTemplate\",\"list\":false}},\"description\":\"Run a Chain with the given PromptTemplate\",\"base_classes\":[\"Document\"],\"display_name\":\"Prompt Runner\",\"custom_fields\":{\"inputs\":null,\"llm\":null,\"prompt\":null},\"output_types\":[\"PromptRunner\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"id\":\"PromptRunner-ckWMH\"},\"selected\":false,\"positionAbsolute\":{\"x\":-1149.4746387825978,\"y\":992.3970573758324},\"dragging\":false}],\"edges\":[{\"source\":\"PromptRunner-ckWMH\",\"sourceHandle\":\"{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œPromptRunnerœ,œidœ:œPromptRunner-ckWMHœ}\",\"target\":\"PromptTemplate-K7xiS\",\"targetHandle\":\"{œfieldNameœ:œdasdasdœ,œidœ:œPromptTemplate-K7xiSœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"dasdasd\",\"id\":\"PromptTemplate-K7xiS\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"Document\"],\"dataType\":\"PromptRunner\",\"id\":\"PromptRunner-ckWMH\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-PromptRunner-ckWMH{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œPromptRunnerœ,œidœ:œPromptRunner-ckWMHœ}-PromptTemplate-K7xiS{œfieldNameœ:œdasdasdœ,œidœ:œPromptTemplate-K7xiSœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\"},{\"source\":\"AirbyteJSONLoader-DXfcM\",\"sourceHandle\":\"{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œAirbyteJSONLoaderœ,œidœ:œAirbyteJSONLoader-DXfcMœ}\",\"target\":\"PromptTemplate-K7xiS\",\"targetHandle\":\"{œfieldNameœ:œdasdasœ,œidœ:œPromptTemplate-K7xiSœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\",\"data\":{\"targetHandle\":{\"fieldName\":\"dasdas\",\"id\":\"PromptTemplate-K7xiS\",\"inputTypes\":[\"Document\",\"BaseOutputParser\"],\"type\":\"str\"},\"sourceHandle\":{\"baseClasses\":[\"Document\"],\"dataType\":\"AirbyteJSONLoader\",\"id\":\"AirbyteJSONLoader-DXfcM\"}},\"style\":{\"stroke\":\"#555\"},\"className\":\"stroke-gray-900 stroke-connection\",\"animated\":false,\"id\":\"reactflow__edge-AirbyteJSONLoader-DXfcM{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œAirbyteJSONLoaderœ,œidœ:œAirbyteJSONLoader-DXfcMœ}-PromptTemplate-K7xiS{œfieldNameœ:œdasdasœ,œidœ:œPromptTemplate-K7xiSœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ],œtypeœ:œstrœ}\"}],\"viewport\":{\"x\":721.09842496776,\"y\":-303.59762799439625,\"zoom\":0.6417129487814537}},\"is_component\":false,\"updated_at\":\"2023-12-08T22:52:14.560323\",\"folder\":null,\"id\":\"8533c46e-21fd-4b92-b68e-1086ea86c72d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Twinkly Stonebraker\",\"description\":\"Navigate the Linguistic Landscape, Discover Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-09T13:26:42.332360\",\"folder\":null,\"id\":\"92bc0875-4a73-44f2-9410-3b8342e404bf\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Metaphor Search (1)\",\"description\":\"Search in Metaphor with a custom string.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom typing import List, Union\\nfrom langchain.llms.base import BaseLLM\\nfrom langchain.chains import LLMChain\\nfrom langchain import PromptTemplate\\nfrom langchain.schema import Document\\nfrom metaphor_python import Metaphor\\nimport json\\n\\nfrom typing import List\\nfrom langflow.field_typing import Data\\n\\nclass MetaphorSearch(CustomComponent):\\n display_name: str = \\\"Metaphor Search\\\"\\n description: str = \\\"Search in Metaphor with a custom string.\\\"\\n beta = True\\n \\n def build_config(self):\\n return {\\n \\\"metaphor_client\\\": {\\\"display_name\\\": \\\"Metaphor Wrapper\\\"}, \\n \\\"search_num_results\\\": {\\\"display_name\\\": \\\"Number of Results (per domain)\\\"},\\n \\\"include_domains\\\": {\\\"display_name\\\": \\\"Include Domains\\\", \\\"is_list\\\": True},\\n \\\"start_date\\\": {\\\"display_name\\\": \\\"Start Date\\\"},\\n \\\"use_autoprompt\\\": {\\\"display_name\\\": \\\"Use Autoprompt\\\", \\\"type\\\": \\\"boolean\\\"},\\n \\\"search_type\\\": {\\\"display_name\\\": \\\"Search Type\\\", \\\"options\\\": [\\\"neural\\\", \\\"keyword\\\"]},\\n \\\"start_date\\\": {\\\"input_types\\\": [\\\"Data\\\"]}\\n }\\n\\n def build(\\n self,\\n methaphor_client: Data,\\n query: str,\\n search_type: str='keyword',\\n search_num_results: int = 5,\\n include_domains: List[str]= [\\\"youtube.com\\\"],\\n use_autoprompt: bool = False,\\n start_date: str=\\\"2023-01-01\\\",\\n \\n ) -> Data:\\n \\n results = []\\n for domain in include_domains:\\n response = methaphor_client.search(\\n query,\\n num_results=int(search_num_results),\\n include_domains=[domain],\\n use_autoprompt=use_autoprompt,\\n type=search_type,\\n # start_crawl_date=start_date,\\n start_published_date=start_date\\n )\\n results.extend(response.results)\\n \\n self.repr_value = results\\n\\n return results\\n\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"include_domains\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[\"yout\"],\"password\":false,\"name\":\"include_domains\",\"display_name\":\"Include Domains\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"methaphor_client\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"methaphor_client\",\"display_name\":\"methaphor_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false},\"query\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"query\",\"display_name\":\"query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"pasteldasdasdas\"},\"search_num_results\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":5,\"password\":false,\"name\":\"search_num_results\",\"display_name\":\"Number of Results (per domain)\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"search_type\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"keyword\",\"password\":false,\"options\":[\"neural\",\"keyword\"],\"name\":\"search_type\",\"display_name\":\"Search Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"start_date\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"start_date\",\"display_name\":\"start_date\",\"advanced\":false,\"input_types\":[\"Data\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"use_autoprompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"use_autoprompt\",\"display_name\":\"Use Autoprompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false}},\"description\":\"Search in Metaphor with a custom string.\",\"base_classes\":[\"Data\"],\"display_name\":\"Metaphor Search\",\"custom_fields\":{\"include_domains\":null,\"methaphor_client\":null,\"query\":null,\"search_num_results\":null,\"search_type\":null,\"start_date\":null,\"use_autoprompt\":null},\"output_types\":[\"Data\"],\"documentation\":\"\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"CustomComponent-dMB5d\"},\"id\":\"CustomComponent-dMB5d\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T13:26:43.668665\",\"folder\":null,\"id\":\"912265df-9b87-4b30-a585-1ca59b944391\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Metaphor Search (2)\",\"description\":\"Search in Metaphor with a custom string.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom typing import List, Union\\nfrom langchain.llms.base import BaseLLM\\nfrom langchain.chains import LLMChain\\nfrom langchain import PromptTemplate\\nfrom langchain.schema import Document\\nfrom metaphor_python import Metaphor\\nimport json\\n\\nfrom typing import List\\nfrom langflow.field_typing import Data\\n\\nclass MetaphorSearch(CustomComponent):\\n display_name: str = \\\"Metaphor Search\\\"\\n description: str = \\\"Search in Metaphor with a custom string.\\\"\\n beta = True\\n \\n def build_config(self):\\n return {\\n \\\"metaphor_client\\\": {\\\"display_name\\\": \\\"Metaphor Wrapper\\\"}, \\n \\\"search_num_results\\\": {\\\"display_name\\\": \\\"Number of Results (per domain)\\\"},\\n \\\"include_domains\\\": {\\\"display_name\\\": \\\"Include Domains\\\", \\\"is_list\\\": True},\\n \\\"start_date\\\": {\\\"display_name\\\": \\\"Start Date\\\"},\\n \\\"use_autoprompt\\\": {\\\"display_name\\\": \\\"Use Autoprompt\\\", \\\"type\\\": \\\"boolean\\\"},\\n \\\"search_type\\\": {\\\"display_name\\\": \\\"Search Type\\\", \\\"options\\\": [\\\"neural\\\", \\\"keyword\\\"]},\\n \\\"start_date\\\": {\\\"input_types\\\": [\\\"Data\\\"]}\\n }\\n\\n def build(\\n self,\\n methaphor_client: Data,\\n query: str,\\n search_type: str='keyword',\\n search_num_results: int = 5,\\n include_domains: List[str]= [\\\"youtube.com\\\"],\\n use_autoprompt: bool = False,\\n start_date: str=\\\"2023-01-01\\\",\\n \\n ) -> Data:\\n \\n results = []\\n for domain in include_domains:\\n response = methaphor_client.search(\\n query,\\n num_results=int(search_num_results),\\n include_domains=[domain],\\n use_autoprompt=use_autoprompt,\\n type=search_type,\\n # start_crawl_date=start_date,\\n start_published_date=start_date\\n )\\n results.extend(response.results)\\n \\n self.repr_value = results\\n\\n return results\\n\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"include_domains\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[\"yout\"],\"password\":false,\"name\":\"include_domains\",\"display_name\":\"Include Domains\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"methaphor_client\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"methaphor_client\",\"display_name\":\"methaphor_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false},\"query\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"query\",\"display_name\":\"query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"pasteldasdasdas\"},\"search_num_results\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":5,\"password\":false,\"name\":\"search_num_results\",\"display_name\":\"Number of Results (per domain)\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"search_type\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"keyword\",\"password\":false,\"options\":[\"neural\",\"keyword\"],\"name\":\"search_type\",\"display_name\":\"Search Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"start_date\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"start_date\",\"display_name\":\"start_date\",\"advanced\":false,\"input_types\":[\"Data\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"use_autoprompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"use_autoprompt\",\"display_name\":\"Use Autoprompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false}},\"description\":\"Search in Metaphor with a custom string.\",\"base_classes\":[\"Data\"],\"display_name\":\"Metaphor Search\",\"custom_fields\":{\"include_domains\":null,\"methaphor_client\":null,\"query\":null,\"search_num_results\":null,\"search_type\":null,\"start_date\":null,\"use_autoprompt\":null},\"output_types\":[\"Data\"],\"documentation\":\"\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"CustomComponent-ipifC\"},\"id\":\"CustomComponent-ipifC\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T13:26:49.799612\",\"folder\":null,\"id\":\"ca83ee08-2f93-427d-9897-80fef6dd5447\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Metaphor Search\",\"description\":\"Search in Metaphor with a custom string.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CustomComponent\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom typing import List, Union\\nfrom langchain.llms.base import BaseLLM\\nfrom langchain.chains import LLMChain\\nfrom langchain import PromptTemplate\\nfrom langchain.schema import Document\\nfrom metaphor_python import Metaphor\\nimport json\\n\\nfrom typing import List\\nfrom langflow.field_typing import Data\\n\\nclass MetaphorSearch(CustomComponent):\\n display_name: str = \\\"Metaphor Search\\\"\\n description: str = \\\"Search in Metaphor with a custom string.\\\"\\n beta = True\\n \\n def build_config(self):\\n return {\\n \\\"metaphor_client\\\": {\\\"display_name\\\": \\\"Metaphor Wrapper\\\"}, \\n \\\"search_num_results\\\": {\\\"display_name\\\": \\\"Number of Results (per domain)\\\"},\\n \\\"include_domains\\\": {\\\"display_name\\\": \\\"Include Domains\\\", \\\"is_list\\\": True},\\n \\\"start_date\\\": {\\\"display_name\\\": \\\"Start Date\\\"},\\n \\\"use_autoprompt\\\": {\\\"display_name\\\": \\\"Use Autoprompt\\\", \\\"type\\\": \\\"boolean\\\"},\\n \\\"search_type\\\": {\\\"display_name\\\": \\\"Search Type\\\", \\\"options\\\": [\\\"neural\\\", \\\"keyword\\\"]},\\n \\\"start_date\\\": {\\\"input_types\\\": [\\\"Data\\\"]}\\n }\\n\\n def build(\\n self,\\n methaphor_client: Data,\\n query: str,\\n search_type: str='keyword',\\n search_num_results: int = 5,\\n include_domains: List[str]= [\\\"youtube.com\\\"],\\n use_autoprompt: bool = False,\\n start_date: str=\\\"2023-01-01\\\",\\n \\n ) -> Data:\\n \\n results = []\\n for domain in include_domains:\\n response = methaphor_client.search(\\n query,\\n num_results=int(search_num_results),\\n include_domains=[domain],\\n use_autoprompt=use_autoprompt,\\n type=search_type,\\n # start_crawl_date=start_date,\\n start_published_date=start_date\\n )\\n results.extend(response.results)\\n \\n self.repr_value = results\\n\\n return results\\n\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"include_domains\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[\"yout\"],\"password\":false,\"name\":\"include_domains\",\"display_name\":\"Include Domains\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"methaphor_client\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"methaphor_client\",\"display_name\":\"methaphor_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Data\",\"list\":false},\"query\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"query\",\"display_name\":\"query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"pasteldasdasdas\"},\"search_num_results\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":5,\"password\":false,\"name\":\"search_num_results\",\"display_name\":\"Number of Results (per domain)\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"search_type\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"keyword\",\"password\":false,\"options\":[\"neural\",\"keyword\"],\"name\":\"search_type\",\"display_name\":\"Search Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"start_date\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"start_date\",\"display_name\":\"start_date\",\"advanced\":false,\"input_types\":[\"Data\"],\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"use_autoprompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"use_autoprompt\",\"display_name\":\"Use Autoprompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false}},\"description\":\"Search in Metaphor with a custom string.\",\"base_classes\":[\"Data\"],\"display_name\":\"Metaphor Search\",\"custom_fields\":{\"include_domains\":null,\"methaphor_client\":null,\"query\":null,\"search_num_results\":null,\"search_type\":null,\"start_date\":null,\"use_autoprompt\":null},\"output_types\":[\"Data\"],\"documentation\":\"\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"CustomComponent-Y4qL7\"},\"id\":\"CustomComponent-Y4qL7\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T13:26:53.719960\",\"folder\":null,\"id\":\"5847602b-769a-4a82-82e8-a70f54a59929\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Lively Williams\",\"description\":\"Conversational Cartography Unlocked.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-09T13:27:45.691254\",\"folder\":null,\"id\":\"0e3bdba9-127a-4399-81a4-2865b70a4a47\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Shared Component\",\"description\":\"Conversational Cartography Unlocked.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":328,\"id\":\"CSVAgent-TK9Ea\",\"type\":\"genericNode\",\"position\":{\"x\":251.25514772667083,\"y\":160.7424529887874},\"data\":{\"type\":\"CSVAgent\",\"node\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"_type\":\"csv_agent\"},\"description\":\"Construct a CSV agent from a CSV and tools.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"CSVAgent\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/agents/toolkits/csv\",\"beta\":false,\"error\":null},\"id\":\"CSVAgent-TK9Ea\"},\"positionAbsolute\":{\"x\":251.25514772667083,\"y\":160.7424529887874}}],\"edges\":[],\"viewport\":{\"x\":104.85568116317398,\"y\":88.26375874183478,\"zoom\":0.7169776240079145}},\"is_component\":false,\"updated_at\":\"2023-12-09T13:28:34.119070\",\"folder\":null,\"id\":\"29c1a247-47b0-457b-8666-7c0a67dc72b0\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"teste cristhian\",\"description\":\"11111\",\"data\":{\"nodes\":[{\"width\":384,\"height\":328,\"id\":\"CSVAgent-P5wrB\",\"type\":\"genericNode\",\"position\":{\"x\":251.25514772667083,\"y\":160.7424529887874},\"data\":{\"type\":\"CSVAgent\",\"node\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"_type\":\"csv_agent\"},\"description\":\"Construct a CSV agent from a CSV and tools.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"CSVAgent\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/agents/toolkits/csv\",\"beta\":false,\"error\":null},\"id\":\"CSVAgent-P5wrB\"},\"positionAbsolute\":{\"x\":251.25514772667083,\"y\":160.7424529887874}},{\"width\":384,\"height\":626,\"id\":\"OpenAI-zpihD\",\"type\":\"genericNode\",\"position\":{\"x\":-56.983202536768374,\"y\":61.715652677908665},\"data\":{\"type\":\"OpenAI\",\"node\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"list\":true},\"allowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":[],\"password\":false,\"name\":\"allowed_special\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"batch_size\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":20,\"password\":false,\"name\":\"batch_size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"best_of\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"best_of\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"default_query\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"disallowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"all\",\"password\":false,\"name\":\"disallowed_special\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"frequency_penalty\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":0,\"password\":false,\"name\":\"frequency_penalty\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"http_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"http_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"logit_bias\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"logit_bias\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":2,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-babbage-001\",\"password\":false,\"options\":[\"text-davinci-003\",\"text-davinci-002\",\"text-curie-001\",\"text-babbage-001\",\"text-ada-001\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false,\"value\":\"dasdasdasdsadasdas\"},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"presence_penalty\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":0,\"password\":false,\"name\":\"presence_penalty\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"1.4\",\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"top_p\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"top_p\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"OpenAI\"},\"description\":\"OpenAI large language models.\",\"base_classes\":[\"OpenAI\",\"BaseLanguageModel\",\"BaseLLM\",\"BaseOpenAI\"],\"display_name\":\"OpenAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai\",\"beta\":false,\"error\":null},\"id\":\"OpenAI-zpihD\"},\"selected\":true,\"dragging\":false,\"positionAbsolute\":{\"x\":-56.983202536768374,\"y\":61.715652677908665}}],\"edges\":[],\"viewport\":{\"x\":104.85568116317398,\"y\":88.26375874183478,\"zoom\":0.7169776240079145}},\"is_component\":false,\"updated_at\":\"2023-12-09T13:40:48.453096\",\"folder\":null,\"id\":\"2e59d013-2acb-49a6-915b-9231a7e6eb58\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"CSVAgent (1)\",\"description\":\"Construct a CSV agent from a CSV and tools.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CSVAgent\",\"node\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"_type\":\"csv_agent\"},\"description\":\"Construct a CSV agent from a CSV and tools.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"CSVAgent\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/agents/toolkits/csv\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"CSVAgent-jsHqy\"},\"id\":\"CSVAgent-jsHqy\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T13:31:17.317322\",\"folder\":null,\"id\":\"ab1034a9-9b5b-4c65-bf6e-9f098a403942\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Suspicious Wilsonfasdfsd\",\"description\":\"Building Linguistic Labyrinths.fasdfads\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-09T14:43:45.298121\",\"folder\":null,\"id\":\"7d91c0c5-fba6-4c60-b4d1-11d430ef357a\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"AirbyteJSONLoader (1)\",\"description\":\"Load local `Airbyte` json files.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AirbyteJSONLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".json\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"\":\"\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"AirbyteJSONLoader\"},\"description\":\"Load local `Airbyte` json files.\",\"base_classes\":[\"Document\"],\"display_name\":\"AirbyteJSONLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"AirbyteJSONLoader-pAHh6\"},\"id\":\"AirbyteJSONLoader-pAHh6\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T13:47:58.573137\",\"folder\":null,\"id\":\"f0cc4292-97cc-4748-803d-949e30dcf661\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"AirbyteJSONLoader\",\"description\":\"Load local `Airbyte` json files.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AirbyteJSONLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".json\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"ff2\":\"d3bbd\"},{\"w\":\"bvd\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"AirbyteJSONLoader\"},\"description\":\"Load local `Airbyte` json files.\",\"base_classes\":[\"Document\"],\"display_name\":\"AirbyteJSONLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"AirbyteJSONLoader-0zU2Q\"},\"id\":\"AirbyteJSONLoader-0zU2Q\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T13:50:09.035318\",\"folder\":null,\"id\":\"5e3c0d55-1a00-4e15-9821-0c625c6415ff\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"CSVAgent\",\"description\":\"Construct a CSV agent from a CSV and tools.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CSVAgent\",\"node\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"_type\":\"csv_agent\"},\"description\":\"Construct a CSV agent from a CSV and tools.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"CSVAgent\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/agents/toolkits/csv\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"CSVAgent-Ub1Xe\"},\"id\":\"CSVAgent-Ub1Xe\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T13:50:16.985419\",\"folder\":null,\"id\":\"baee8061-4788-4ce6-928f-c6ce1ecb443c\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Lively Heisenberg\",\"description\":\"Maximize Impact with Intelligent Conversations.\",\"data\":{\"nodes\":[{\"width\":384,\"height\":366,\"id\":\"CSVLoader-RMUx9\",\"type\":\"genericNode\",\"position\":{\"x\":111,\"y\":345.51250076293945},\"data\":{\"type\":\"CSVLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"z2b\":\"z9\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"CSVLoader\"},\"description\":\"Load a `CSV` file into a list of Documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"CSVLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv\",\"beta\":false,\"error\":null},\"id\":\"CSVLoader-RMUx9\"},\"positionAbsolute\":{\"x\":111,\"y\":345.51250076293945}}],\"edges\":[],\"viewport\":{\"x\":0,\"y\":0,\"zoom\":1}},\"is_component\":false,\"updated_at\":\"2023-12-09T14:38:40.291137\",\"folder\":null,\"id\":\"109e9629-d569-4555-9d40-42203a5ed035\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"CohereEmbeddings\",\"description\":\"Cohere embedding models.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CohereEmbeddings\",\"node\":{\"template\":{\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cohere_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"cohere_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false,\"value\":\"\"},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"max_retries\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"model\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"embed-english-v2.0\",\"password\":false,\"name\":\"model\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"truncate\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"truncate\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"user_agent\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"langchain\",\"password\":false,\"name\":\"user_agent\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"CohereEmbeddings\"},\"description\":\"Cohere embedding models.\",\"base_classes\":[\"CohereEmbeddings\",\"Embeddings\"],\"display_name\":\"CohereEmbeddings\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/cohere\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"CohereEmbeddings-HFUAf\"},\"id\":\"CohereEmbeddings-HFUAf\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T13:50:46.172344\",\"folder\":null,\"id\":\"662d8040-d47c-40db-bda1-66489a1c9d24\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"CSVLoader (1)\",\"description\":\"Load a `CSV` file into a list of Documents.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CSVLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"cddscz23\":\"aaqd\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"CSVLoader\"},\"description\":\"Load a `CSV` file into a list of Documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"CSVLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"CSVLoader-3wrib\"},\"id\":\"CSVLoader-3wrib\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T13:56:11.662526\",\"folder\":null,\"id\":\"4fae6aec-ddbd-498d-a4d1-ca0290f6a5ad\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"CSVLoader (2)\",\"description\":\"Load a `CSV` file into a list of Documents.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CSVLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"cddscz23\":\"aaqd\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"CSVLoader\"},\"description\":\"Load a `CSV` file into a list of Documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"CSVLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"CSVLoader-VEjyx\"},\"id\":\"CSVLoader-VEjyx\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T13:57:37.560784\",\"folder\":null,\"id\":\"d80635ee-966c-41f2-981c-afa2c388ac6e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"CSVLoader (3)\",\"description\":\"Load a `CSV` file into a list of Documents.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CSVLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"cddscz23\":\"aaqd\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"CSVLoader\"},\"description\":\"Load a `CSV` file into a list of Documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"CSVLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"CSVLoader-PIhOc\"},\"id\":\"CSVLoader-PIhOc\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:00:27.991966\",\"folder\":null,\"id\":\"c5cc4c32-77c8-4889-a9f8-2632466b7366\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"CSVLoader (4)\",\"description\":\"Load a `CSV` file into a list of Documents.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CSVLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"cddscz23\":\"aaqd\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"CSVLoader\"},\"description\":\"Load a `CSV` file into a list of Documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"CSVLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"CSVLoader-deB43\"},\"id\":\"CSVLoader-deB43\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:00:42.509243\",\"folder\":null,\"id\":\"0ab59938-ccf4-4bb9-b285-1f5da38648f0\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"CSVLoader\",\"description\":\"Load a `CSV` file into a list of Documents.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CSVLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"\":\"aaqd\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"CSVLoader\"},\"description\":\"Load a `CSV` file into a list of Documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"CSVLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"CSVLoader-mdkLm\"},\"id\":\"CSVLoader-mdkLm\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:02:17.458354\",\"folder\":null,\"id\":\"f127b7e7-4149-492e-8998-6b1b35ec4153\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"CSVLoader (5)\",\"description\":\"Load a `CSV` file into a list of Documents.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CSVLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"cddscz23\":\"aaqd\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"CSVLoader\"},\"description\":\"Load a `CSV` file into a list of Documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"CSVLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"CSVLoader-FRc6Y\"},\"id\":\"CSVLoader-FRc6Y\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:02:26.958867\",\"folder\":null,\"id\":\"a870a096-725c-4914-add0-8d57d710b7e5\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"AirbyteJSONLoader (2)\",\"description\":\"Load local `Airbyte` json files.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AirbyteJSONLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".json\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"\":\"\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"AirbyteJSONLoader\"},\"description\":\"Load local `Airbyte` json files.\",\"base_classes\":[\"Document\"],\"display_name\":\"AirbyteJSONLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"AirbyteJSONLoader-Z0uol\"},\"id\":\"AirbyteJSONLoader-Z0uol\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:03:33.764117\",\"folder\":null,\"id\":\"2a37c76c-65c8-4c01-a72d-eb46f3d41a56\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Amazon Bedrock Embeddings\",\"description\":\"Embeddings model from Amazon Bedrock.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AmazonBedrockEmbeddings\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.embeddings import BedrockEmbeddings\\nfrom langchain.embeddings.base import Embeddings\\nfrom langflow import CustomComponent\\n\\n\\nclass AmazonBedrockEmeddingsComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing an Embeddings Model using Amazon Bedrock.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"Amazon Bedrock Embeddings\\\"\\n description: str = \\\"Embeddings model from Amazon Bedrock.\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\\\"\\n beta = True\\n\\n def build_config(self):\\n return {\\n \\\"model_id\\\": {\\n \\\"display_name\\\": \\\"Model Id\\\",\\n \\\"options\\\": [\\\"amazon.titan-embed-text-v1\\\"],\\n },\\n \\\"credentials_profile_name\\\": {\\\"display_name\\\": \\\"Credentials Profile Name\\\"},\\n \\\"endpoint_url\\\": {\\\"display_name\\\": \\\"Bedrock Endpoint URL\\\"},\\n \\\"region_name\\\": {\\\"display_name\\\": \\\"AWS Region\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model_id: str = \\\"amazon.titan-embed-text-v1\\\",\\n credentials_profile_name: Optional[str] = None,\\n endpoint_url: Optional[str] = None,\\n region_name: Optional[str] = None,\\n ) -> Embeddings:\\n try:\\n output = BedrockEmbeddings(\\n credentials_profile_name=credentials_profile_name,\\n model_id=model_id,\\n endpoint_url=endpoint_url,\\n region_name=region_name,\\n ) # type: ignore\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to AmazonBedrock API.\\\") from e\\n return output\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"credentials_profile_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"credentials_profile_name\",\"display_name\":\"Credentials Profile Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"endpoint_url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"endpoint_url\",\"display_name\":\"Bedrock Endpoint URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"amazon.titan-embed-text-v1\",\"password\":false,\"options\":[\"amazon.titan-embed-text-v1\"],\"name\":\"model_id\",\"display_name\":\"Model Id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"region_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"region_name\",\"display_name\":\"AWS Region\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"Embeddings model from Amazon Bedrock.\",\"base_classes\":[\"Embeddings\"],\"display_name\":\"Amazon Bedrock Embeddings\",\"custom_fields\":{\"credentials_profile_name\":null,\"endpoint_url\":null,\"model_id\":null,\"region_name\":null},\"output_types\":[\"AmazonBedrockEmbeddings\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"AmazonBedrockEmbeddings-28ASv\"},\"id\":\"AmazonBedrockEmbeddings-28ASv\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:03:41.066618\",\"folder\":null,\"id\":\"850ba4e6-96ca-49a7-9b0c-4b7048fd52c8\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"ConversationBufferMemory\",\"description\":\"Buffer for storing conversation memory.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"ConversationBufferMemory\",\"node\":{\"template\":{\"chat_memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMessageHistory\",\"list\":false},\"ai_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"AI\",\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"human_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"Human\",\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"input_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"type\":\"str\",\"list\":false},\"memory_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"output_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\"type\":\"str\",\"list\":false},\"return_messages\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ConversationBufferMemory\"},\"description\":\"Buffer for storing conversation memory.\",\"base_classes\":[\"BaseMemory\",\"BaseChatMemory\",\"ConversationBufferMemory\"],\"display_name\":\"ConversationBufferMemory\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/buffer\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"ConversationBufferMemory-WaoLx\"},\"id\":\"ConversationBufferMemory-WaoLx\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:04:46.058568\",\"folder\":null,\"id\":\"be650940-0449-4eb0-a708-056310f924fd\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Amazon Bedrock Embeddings (1)\",\"description\":\"Embeddings model from Amazon Bedrock.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AmazonBedrockEmbeddings\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.embeddings import BedrockEmbeddings\\nfrom langchain.embeddings.base import Embeddings\\nfrom langflow import CustomComponent\\n\\n\\nclass AmazonBedrockEmeddingsComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing an Embeddings Model using Amazon Bedrock.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"Amazon Bedrock Embeddings\\\"\\n description: str = \\\"Embeddings model from Amazon Bedrock.\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\\\"\\n beta = True\\n\\n def build_config(self):\\n return {\\n \\\"model_id\\\": {\\n \\\"display_name\\\": \\\"Model Id\\\",\\n \\\"options\\\": [\\\"amazon.titan-embed-text-v1\\\"],\\n },\\n \\\"credentials_profile_name\\\": {\\\"display_name\\\": \\\"Credentials Profile Name\\\"},\\n \\\"endpoint_url\\\": {\\\"display_name\\\": \\\"Bedrock Endpoint URL\\\"},\\n \\\"region_name\\\": {\\\"display_name\\\": \\\"AWS Region\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model_id: str = \\\"amazon.titan-embed-text-v1\\\",\\n credentials_profile_name: Optional[str] = None,\\n endpoint_url: Optional[str] = None,\\n region_name: Optional[str] = None,\\n ) -> Embeddings:\\n try:\\n output = BedrockEmbeddings(\\n credentials_profile_name=credentials_profile_name,\\n model_id=model_id,\\n endpoint_url=endpoint_url,\\n region_name=region_name,\\n ) # type: ignore\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to AmazonBedrock API.\\\") from e\\n return output\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"credentials_profile_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"credentials_profile_name\",\"display_name\":\"Credentials Profile Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"endpoint_url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"endpoint_url\",\"display_name\":\"Bedrock Endpoint URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"amazon.titan-embed-text-v1\",\"password\":false,\"options\":[\"amazon.titan-embed-text-v1\"],\"name\":\"model_id\",\"display_name\":\"Model Id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"region_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"region_name\",\"display_name\":\"AWS Region\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"Embeddings model from Amazon Bedrock.\",\"base_classes\":[\"Embeddings\"],\"display_name\":\"Amazon Bedrock Embeddings\",\"custom_fields\":{\"credentials_profile_name\":null,\"endpoint_url\":null,\"model_id\":null,\"region_name\":null},\"output_types\":[\"AmazonBedrockEmbeddings\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"AmazonBedrockEmbeddings-Bs5PG\"},\"id\":\"AmazonBedrockEmbeddings-Bs5PG\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:05:50.570800\",\"folder\":null,\"id\":\"53ad1fe2-f3af-4354-86e0-eb141ce12859\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Amazon Bedrock Embeddings (2)\",\"description\":\"Embeddings model from Amazon Bedrock.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AmazonBedrockEmbeddings\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.embeddings import BedrockEmbeddings\\nfrom langchain.embeddings.base import Embeddings\\nfrom langflow import CustomComponent\\n\\n\\nclass AmazonBedrockEmeddingsComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing an Embeddings Model using Amazon Bedrock.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"Amazon Bedrock Embeddings\\\"\\n description: str = \\\"Embeddings model from Amazon Bedrock.\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\\\"\\n beta = True\\n\\n def build_config(self):\\n return {\\n \\\"model_id\\\": {\\n \\\"display_name\\\": \\\"Model Id\\\",\\n \\\"options\\\": [\\\"amazon.titan-embed-text-v1\\\"],\\n },\\n \\\"credentials_profile_name\\\": {\\\"display_name\\\": \\\"Credentials Profile Name\\\"},\\n \\\"endpoint_url\\\": {\\\"display_name\\\": \\\"Bedrock Endpoint URL\\\"},\\n \\\"region_name\\\": {\\\"display_name\\\": \\\"AWS Region\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model_id: str = \\\"amazon.titan-embed-text-v1\\\",\\n credentials_profile_name: Optional[str] = None,\\n endpoint_url: Optional[str] = None,\\n region_name: Optional[str] = None,\\n ) -> Embeddings:\\n try:\\n output = BedrockEmbeddings(\\n credentials_profile_name=credentials_profile_name,\\n model_id=model_id,\\n endpoint_url=endpoint_url,\\n region_name=region_name,\\n ) # type: ignore\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to AmazonBedrock API.\\\") from e\\n return output\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"credentials_profile_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"credentials_profile_name\",\"display_name\":\"Credentials Profile Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"endpoint_url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"endpoint_url\",\"display_name\":\"Bedrock Endpoint URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"amazon.titan-embed-text-v1\",\"password\":false,\"options\":[\"amazon.titan-embed-text-v1\"],\"name\":\"model_id\",\"display_name\":\"Model Id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"region_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"region_name\",\"display_name\":\"AWS Region\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"Embeddings model from Amazon Bedrock.\",\"base_classes\":[\"Embeddings\"],\"display_name\":\"Amazon Bedrock Embeddings\",\"custom_fields\":{\"credentials_profile_name\":null,\"endpoint_url\":null,\"model_id\":null,\"region_name\":null},\"output_types\":[\"AmazonBedrockEmbeddings\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"AmazonBedrockEmbeddings-zSZ4t\"},\"id\":\"AmazonBedrockEmbeddings-zSZ4t\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:06:11.415088\",\"folder\":null,\"id\":\"e9ed1c90-146e-452d-8ccd-ebf2e0da4331\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Amazon Bedrock Embeddings (3)\",\"description\":\"Embeddings model from Amazon Bedrock.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AmazonBedrockEmbeddings\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.embeddings import BedrockEmbeddings\\nfrom langchain.embeddings.base import Embeddings\\nfrom langflow import CustomComponent\\n\\n\\nclass AmazonBedrockEmeddingsComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing an Embeddings Model using Amazon Bedrock.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"Amazon Bedrock Embeddings\\\"\\n description: str = \\\"Embeddings model from Amazon Bedrock.\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\\\"\\n beta = True\\n\\n def build_config(self):\\n return {\\n \\\"model_id\\\": {\\n \\\"display_name\\\": \\\"Model Id\\\",\\n \\\"options\\\": [\\\"amazon.titan-embed-text-v1\\\"],\\n },\\n \\\"credentials_profile_name\\\": {\\\"display_name\\\": \\\"Credentials Profile Name\\\"},\\n \\\"endpoint_url\\\": {\\\"display_name\\\": \\\"Bedrock Endpoint URL\\\"},\\n \\\"region_name\\\": {\\\"display_name\\\": \\\"AWS Region\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model_id: str = \\\"amazon.titan-embed-text-v1\\\",\\n credentials_profile_name: Optional[str] = None,\\n endpoint_url: Optional[str] = None,\\n region_name: Optional[str] = None,\\n ) -> Embeddings:\\n try:\\n output = BedrockEmbeddings(\\n credentials_profile_name=credentials_profile_name,\\n model_id=model_id,\\n endpoint_url=endpoint_url,\\n region_name=region_name,\\n ) # type: ignore\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to AmazonBedrock API.\\\") from e\\n return output\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"credentials_profile_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"credentials_profile_name\",\"display_name\":\"Credentials Profile Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"endpoint_url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"endpoint_url\",\"display_name\":\"Bedrock Endpoint URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"amazon.titan-embed-text-v1\",\"password\":false,\"options\":[\"amazon.titan-embed-text-v1\"],\"name\":\"model_id\",\"display_name\":\"Model Id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"region_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"region_name\",\"display_name\":\"AWS Region\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"Embeddings model from Amazon Bedrock.\",\"base_classes\":[\"Embeddings\"],\"display_name\":\"Amazon Bedrock Embeddings\",\"custom_fields\":{\"credentials_profile_name\":null,\"endpoint_url\":null,\"model_id\":null,\"region_name\":null},\"output_types\":[\"AmazonBedrockEmbeddings\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"AmazonBedrockEmbeddings-Bxhlt\"},\"id\":\"AmazonBedrockEmbeddings-Bxhlt\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:07:06.411108\",\"folder\":null,\"id\":\"83783c57-64e3-41a6-aa7e-ed82f80f1e53\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"CSVLoader (6)\",\"description\":\"Load a `CSV` file into a list of Documents.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"CSVLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".csv\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"z2b\":\"z9\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"CSVLoader\"},\"description\":\"Load a `CSV` file into a list of Documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"CSVLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"CSVLoader-2pn2o\"},\"id\":\"CSVLoader-2pn2o\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:38:30.706258\",\"folder\":null,\"id\":\"c4ae9de6-9df3-4d3c-afc9-1752af4fecd7\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Agent Initializer\",\"description\":\"Initialize a Langchain Agent.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AgentInitializer\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from typing import Callable, List, Union\\n\\nfrom langchain.agents import AgentExecutor, AgentType, initialize_agent, types\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import BaseChatMemory, BaseLanguageModel, Tool\\n\\n\\nclass AgentInitializerComponent(CustomComponent):\\n display_name: str = \\\"Agent Initializer\\\"\\n description: str = \\\"Initialize a Langchain Agent.\\\"\\n documentation: str = \\\"https://python.langchain.com/docs/modules/agents/agent_types/\\\"\\n\\n def build_config(self):\\n agents = list(types.AGENT_TO_CLASS.keys())\\n # field_type and required are optional\\n return {\\n \\\"agent\\\": {\\\"options\\\": agents, \\\"value\\\": agents[0], \\\"display_name\\\": \\\"Agent Type\\\"},\\n \\\"max_iterations\\\": {\\\"display_name\\\": \\\"Max Iterations\\\", \\\"value\\\": 10},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"tools\\\": {\\\"display_name\\\": \\\"Tools\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"Language Model\\\"},\\n }\\n\\n def build(\\n self, agent: str, llm: BaseLanguageModel, memory: BaseChatMemory, tools: List[Tool], max_iterations: int\\n ) -> Union[AgentExecutor, Callable]:\\n agent = AgentType(agent)\\n return initialize_agent(\\n tools=tools,\\n llm=llm,\\n agent=agent,\\n memory=memory,\\n return_intermediate_steps=True,\\n handle_parsing_errors=True,\\n max_iterations=max_iterations,\\n )\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"agent\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"zero-shot-react-description\",\"password\":false,\"options\":[\"zero-shot-react-description\",\"react-docstore\",\"self-ask-with-search\",\"conversational-react-description\",\"chat-zero-shot-react-description\",\"chat-conversational-react-description\",\"structured-chat-zero-shot-react-description\",\"openai-functions\",\"openai-multi-functions\",\"JsonAgent\",\"CSVAgent\",\"AgentInitializer\",\"VectorStoreAgent\",\"VectorStoreRouterAgent\",\"SQLAgent\"],\"name\":\"agent\",\"display_name\":\"Agent Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"Language Model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"max_iterations\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"max_iterations\",\"display_name\":\"Max Iterations\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"memory\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMemory\",\"list\":false},\"tools\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"tools\",\"display_name\":\"Tools\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Tool\",\"list\":true}},\"description\":\"Initialize a Langchain Agent.\",\"base_classes\":[\"Chain\",\"AgentExecutor\",\"Callable\"],\"display_name\":\"Agent Initializer\",\"custom_fields\":{\"agent\":null,\"llm\":null,\"max_iterations\":null,\"memory\":null,\"tools\":null},\"output_types\":[\"AgentInitializer\"],\"documentation\":\"https://python.langchain.com/docs/modules/agents/agent_types/\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"AgentInitializer-MJrAC\"},\"id\":\"AgentInitializer-MJrAC\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:42:54.715163\",\"folder\":null,\"id\":\"ff84b5f9-5680-4084-a9f1-7d94fe675486\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Agent Initializer (1)\",\"description\":\"Initialize a Langchain Agent.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AgentInitializer\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from typing import Callable, List, Union\\n\\nfrom langchain.agents import AgentExecutor, AgentType, initialize_agent, types\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import BaseChatMemory, BaseLanguageModel, Tool\\n\\n\\nclass AgentInitializerComponent(CustomComponent):\\n display_name: str = \\\"Agent Initializer\\\"\\n description: str = \\\"Initialize a Langchain Agent.\\\"\\n documentation: str = \\\"https://python.langchain.com/docs/modules/agents/agent_types/\\\"\\n\\n def build_config(self):\\n agents = list(types.AGENT_TO_CLASS.keys())\\n # field_type and required are optional\\n return {\\n \\\"agent\\\": {\\\"options\\\": agents, \\\"value\\\": agents[0], \\\"display_name\\\": \\\"Agent Type\\\"},\\n \\\"max_iterations\\\": {\\\"display_name\\\": \\\"Max Iterations\\\", \\\"value\\\": 10},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"tools\\\": {\\\"display_name\\\": \\\"Tools\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"Language Model\\\"},\\n }\\n\\n def build(\\n self, agent: str, llm: BaseLanguageModel, memory: BaseChatMemory, tools: List[Tool], max_iterations: int\\n ) -> Union[AgentExecutor, Callable]:\\n agent = AgentType(agent)\\n return initialize_agent(\\n tools=tools,\\n llm=llm,\\n agent=agent,\\n memory=memory,\\n return_intermediate_steps=True,\\n handle_parsing_errors=True,\\n max_iterations=max_iterations,\\n )\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"agent\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"zero-shot-react-description\",\"password\":false,\"options\":[\"zero-shot-react-description\",\"react-docstore\",\"self-ask-with-search\",\"conversational-react-description\",\"chat-zero-shot-react-description\",\"chat-conversational-react-description\",\"structured-chat-zero-shot-react-description\",\"openai-functions\",\"openai-multi-functions\",\"JsonAgent\",\"CSVAgent\",\"AgentInitializer\",\"VectorStoreAgent\",\"VectorStoreRouterAgent\",\"SQLAgent\"],\"name\":\"agent\",\"display_name\":\"Agent Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"Language Model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"max_iterations\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"max_iterations\",\"display_name\":\"Max Iterations\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"memory\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMemory\",\"list\":false},\"tools\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"tools\",\"display_name\":\"Tools\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Tool\",\"list\":true}},\"description\":\"Initialize a Langchain Agent.\",\"base_classes\":[\"Chain\",\"AgentExecutor\",\"Callable\"],\"display_name\":\"Agent Initializer\",\"custom_fields\":{\"agent\":null,\"llm\":null,\"max_iterations\":null,\"memory\":null,\"tools\":null},\"output_types\":[\"AgentInitializer\"],\"documentation\":\"https://python.langchain.com/docs/modules/agents/agent_types/\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"AgentInitializer-3lcZ4\"},\"id\":\"AgentInitializer-3lcZ4\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:43:20.819934\",\"folder\":null,\"id\":\"97f5db9f-d6cc-4555-88fb-3be102c67814\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Exuberant Banach\",\"description\":\"Unfolding Linguistic Possibilities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-09T14:52:49.951416\",\"folder\":null,\"id\":\"a600acd1-213a-4ce7-8648-ab2ee59f5918\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"AirbyteJSONLoader (3)\",\"description\":\"Load local `Airbyte` json files.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AirbyteJSONLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".json\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"\":\"\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"AirbyteJSONLoader\"},\"description\":\"Load local `Airbyte` json files.\",\"base_classes\":[\"Document\"],\"display_name\":\"AirbyteJSONLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"AirbyteJSONLoader-JhQtx\"},\"id\":\"AirbyteJSONLoader-JhQtx\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:44:49.587337\",\"folder\":null,\"id\":\"07c52ad7-ca52-4cdd-ab40-74a91dbad0dd\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"AirbyteJSONLoader (4)\",\"description\":\"Load local `Airbyte` json files.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AirbyteJSONLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".json\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"\":\"\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"AirbyteJSONLoader\"},\"description\":\"Load local `Airbyte` json files.\",\"base_classes\":[\"Document\"],\"display_name\":\"AirbyteJSONLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"AirbyteJSONLoader-bIvDc\"},\"id\":\"AirbyteJSONLoader-bIvDc\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:45:13.458500\",\"folder\":null,\"id\":\"53e4d256-3653-444b-b8e0-fb97b3ae5c7e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"AirbyteJSONLoader (5)\",\"description\":\"Load local `Airbyte` json files.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AirbyteJSONLoader\",\"node\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\".json\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[{\"\":\"\"}],\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"AirbyteJSONLoader\"},\"description\":\"Load local `Airbyte` json files.\",\"base_classes\":[\"Document\"],\"display_name\":\"AirbyteJSONLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json\",\"beta\":false,\"error\":null,\"official\":false},\"id\":\"AirbyteJSONLoader-2BLS8\"},\"id\":\"AirbyteJSONLoader-2BLS8\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:45:44.461699\",\"folder\":null,\"id\":\"b5158cc1-c6b8-4e25-907a-bc7e7637aa38\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Amazon Bedrock Embeddings (4)\",\"description\":\"Embeddings model from Amazon Bedrock.\",\"data\":{\"edges\":[],\"nodes\":[{\"data\":{\"type\":\"AmazonBedrockEmbeddings\",\"node\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.embeddings import BedrockEmbeddings\\nfrom langchain.embeddings.base import Embeddings\\nfrom langflow import CustomComponent\\n\\n\\nclass AmazonBedrockEmeddingsComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing an Embeddings Model using Amazon Bedrock.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"Amazon Bedrock Embeddings\\\"\\n description: str = \\\"Embeddings model from Amazon Bedrock.\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\\\"\\n beta = True\\n\\n def build_config(self):\\n return {\\n \\\"model_id\\\": {\\n \\\"display_name\\\": \\\"Model Id\\\",\\n \\\"options\\\": [\\\"amazon.titan-embed-text-v1\\\"],\\n },\\n \\\"credentials_profile_name\\\": {\\\"display_name\\\": \\\"Credentials Profile Name\\\"},\\n \\\"endpoint_url\\\": {\\\"display_name\\\": \\\"Bedrock Endpoint URL\\\"},\\n \\\"region_name\\\": {\\\"display_name\\\": \\\"AWS Region\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model_id: str = \\\"amazon.titan-embed-text-v1\\\",\\n credentials_profile_name: Optional[str] = None,\\n endpoint_url: Optional[str] = None,\\n region_name: Optional[str] = None,\\n ) -> Embeddings:\\n try:\\n output = BedrockEmbeddings(\\n credentials_profile_name=credentials_profile_name,\\n model_id=model_id,\\n endpoint_url=endpoint_url,\\n region_name=region_name,\\n ) # type: ignore\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to AmazonBedrock API.\\\") from e\\n return output\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"credentials_profile_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"credentials_profile_name\",\"display_name\":\"Credentials Profile Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"endpoint_url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"endpoint_url\",\"display_name\":\"Bedrock Endpoint URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"amazon.titan-embed-text-v1\",\"password\":false,\"options\":[\"amazon.titan-embed-text-v1\"],\"name\":\"model_id\",\"display_name\":\"Model Id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"region_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"region_name\",\"display_name\":\"AWS Region\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"Embeddings model from Amazon Bedrock.\",\"base_classes\":[\"Embeddings\"],\"display_name\":\"Amazon Bedrock Embeddings\",\"custom_fields\":{\"credentials_profile_name\":null,\"endpoint_url\":null,\"model_id\":null,\"region_name\":null},\"output_types\":[\"AmazonBedrockEmbeddings\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\",\"beta\":true,\"error\":null,\"official\":false},\"id\":\"AmazonBedrockEmbeddings-NsBzN\"},\"id\":\"AmazonBedrockEmbeddings-NsBzN\",\"position\":{\"x\":0,\"y\":0},\"type\":\"genericNode\"}],\"viewport\":{\"x\":1,\"y\":1,\"zoom\":1}},\"is_component\":true,\"updated_at\":\"2023-12-09T14:50:29.791575\",\"folder\":null,\"id\":\"3fb77f72-1be7-4ce0-ae89-a82b0eee9acf\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Evil Golick\",\"description\":\"Where Language Meets Logic.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:19:02.707854\",\"folder\":null,\"id\":\"9c5d21c2-ea82-4cf4-a591-c3d3fd3f13e6\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Gleeful Davinci (1)\",\"description\":\"Unleashing Linguistic Creativity.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:19:02.761150\",\"folder\":null,\"id\":\"f8e2e16e-129c-4116-9626-2c6b524d97b7\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Effervescent Degrasse\",\"description\":\"Language Models, Mapped and Mastered.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:19:02.764440\",\"folder\":null,\"id\":\"d7f4f7b5-effe-4834-8cab-4f2fc4f6e9de\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Prickly Archimedes\",\"description\":\"Language Chainlink Master.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:19:02.767546\",\"folder\":null,\"id\":\"2265d813-4a87-410f-b06a-65e35db8219e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Boring Heyrovsky\",\"description\":\"Building Intelligent Interactions.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:19:02.765105\",\"folder\":null,\"id\":\"10bfd881-e67e-4c33-965d-ee041695edce\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Nostalgic Carroll\",\"description\":\"Unfolding Linguistic Possibilities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:19:02.811794\",\"folder\":null,\"id\":\"63fa5653-4513-47f2-8dfa-baeb1d981f93\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Pensive Perlman\",\"description\":\"Empowering Communication, Enabling Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:19:02.814993\",\"folder\":null,\"id\":\"f4bc5945-20d9-4703-a5bb-6a4a3ef7fc8e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Hilarious Stallman\",\"description\":\"Connect the Dots, Craft Language.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:19:02.813144\",\"folder\":null,\"id\":\"8d8b80f9-4b74-4cd5-bc5c-08a32c4d2b95\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Awesome Einstein\",\"description\":\"The Power of Language at Your Fingertips.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:20:19.518262\",\"folder\":null,\"id\":\"21808fd7-a492-4e29-8863-301c2785f542\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Funky Swanson\",\"description\":\"Sculpting Language with Precision.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:20:20.185637\",\"folder\":null,\"id\":\"7752299a-4aa9-44db-8d9c-5c4a2ac1b071\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Romantic Pascal\",\"description\":\"Unlock the Power of AI in Your Business Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:20:20.223064\",\"folder\":null,\"id\":\"78f48a13-6a9f-42ce-9d58-ec9b63b381d2\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Twinkly Bartik\",\"description\":\"Sculpting Language with Precision.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:20:20.239758\",\"folder\":null,\"id\":\"38074091-3d7c-4d42-b61b-ae195c1b8a4e\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Condescending Joliot\",\"description\":\"Language Models, Unleashed.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:20:20.271996\",\"folder\":null,\"id\":\"773020aa-5c2d-4632-ad9e-21276861ab93\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Gleeful Kalam\",\"description\":\"The Power of Language at Your Fingertips.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:20:20.283929\",\"folder\":null,\"id\":\"0092d077-6d31-401f-a739-b164476b90ed\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Grinning Bhabha\",\"description\":\"Your Passport to Linguistic Landscapes.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:20:20.467739\",\"folder\":null,\"id\":\"4a395211-712d-4dd2-b3bb-e6b19200f15d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Mirthful Babbage\",\"description\":\"Design Dialogues with Langflow.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:20:20.742990\",\"folder\":null,\"id\":\"3f086a82-3e29-4328-9da8-e02dc9201744\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Tiny Volta\",\"description\":\"Nurture NLP Nodes Here.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:24:18.594247\",\"folder\":null,\"id\":\"659b8289-c8e8-413d-9b2b-51e68411f170\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Radiant Jennings\",\"description\":\"Design, Develop, Dialogize.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:24:42.640309\",\"folder\":null,\"id\":\"f55f0640-d47e-4471-b1ba-3411d38ecac5\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Distracted Shaw\",\"description\":\"Interactive Language Weaving.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:24:58.376247\",\"folder\":null,\"id\":\"a039811e-449a-4244-83ed-4ddd731a0921\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Peppy Murdock (1)\",\"description\":\"Language Chainlink Master.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:25:14.674524\",\"folder\":null,\"id\":\"0faf6144-6ca6-4f64-a343-665ae54774ef\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Lively Volta\",\"description\":\"Language Models, Mapped and Mastered.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:26:50.418029\",\"folder\":null,\"id\":\"c5d149d0-c401-4f5e-b79f-2abcc7b8d98d\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Bubbly Curie\",\"description\":\"Navigate the Networks of Conversation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:27:10.900899\",\"folder\":null,\"id\":\"7bb2d226-9740-433d-861f-a499632c5a13\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Radiant Nobel\",\"description\":\"Navigate the Linguistic Landscape, Discover Opportunities.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:29:11.541630\",\"folder\":null,\"id\":\"947906d8-75ea-470c-a8e2-cc80c5d3bdbb\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Cheerful Northcutt\",\"description\":\"Unleashing Business Potential through Language Engineering.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:29:37.169791\",\"folder\":null,\"id\":\"56f109fd-2c18-42ed-a9b2-089a678a0c11\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Suspicious Khayyam\",\"description\":\"Crafting Dialogues that Drive Business Success.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:30:24.686359\",\"folder\":null,\"id\":\"551d7bfa-49aa-43f1-a779-e47eabc2aaf2\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Twinkly Spence\",\"description\":\"Create, Curate, Communicate with Langflow.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:30:47.738472\",\"folder\":null,\"id\":\"12aef128-130e-492e-bf84-62d4cb4cd456\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Mirthful Lavoisier\",\"description\":\"Nurture NLP Nodes Here.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:31:43.272365\",\"folder\":null,\"id\":\"9952c044-6903-4fba-a270-6ed0b90c93c9\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Upbeat Bose\",\"description\":\"Unravel the Art of Articulation.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:32:23.972035\",\"folder\":null,\"id\":\"65f49582-c9fa-4c67-a2bc-4e8fa73ca731\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Tender Perlman\",\"description\":\"Maximize Impact with Intelligent Conversations.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:33:26.984083\",\"folder\":null,\"id\":\"9ae57fe2-c677-47fa-a059-7d3d915a1178\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"},{\"name\":\"Sharp Cori\",\"description\":\"Conversation Catalyst Engine.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2023-12-11T18:33:52.377359\",\"folder\":null,\"id\":\"2920dde2-5c24-4fe0-9c06-ef86b5a16a99\",\"user_id\":\"d253bfba-6368-44dc-85f7-0d6da9e45968\"}]" - }, - "headersSize": -1, - "bodySize": -1, - "redirectURL": "" - }, - "cache": {}, - "timings": { "send": -1, "wait": -1, "receive": 1.309 } - }, - { - "startedDateTime": "2023-12-11T18:55:08.950Z", - "time": 0.595, - "request": { - "method": "GET", - "url": "http://localhost:3000/api/v1/build/2920dde2-5c24-4fe0-9c06-ef86b5a16a99/status", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Accept", "value": "application/json, text/plain, */*" }, - { "name": "Accept-Encoding", "value": "gzip, deflate, br" }, - { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, - { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkMjUzYmZiYS02MzY4LTQ0ZGMtODVmNy0wZDZkYTllNDU5NjgiLCJleHAiOjE3MzM4NTY4OTh9.5MFFb0JCck3ITSKXbxhwO9yAscnXcwXNTV70ZYBRB20" }, - { "name": "Connection", "value": "keep-alive" }, - { "name": "Cookie", "value": "access_token_lf=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJkMjUzYmZiYS02MzY4LTQ0ZGMtODVmNy0wZDZkYTllNDU5NjgiLCJleHAiOjE3MzM4NTY4OTh9.5MFFb0JCck3ITSKXbxhwO9yAscnXcwXNTV70ZYBRB20; refresh_tkn_lflw=auto" }, - { "name": "Host", "value": "localhost:3000" }, - { "name": "Referer", "value": "http://localhost:3000/flow/2920dde2-5c24-4fe0-9c06-ef86b5a16a99" }, - { "name": "Sec-Fetch-Dest", "value": "empty" }, - { "name": "Sec-Fetch-Mode", "value": "cors" }, - { "name": "Sec-Fetch-Site", "value": "same-origin" }, - { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36" }, - { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"119\", \"Not?A_Brand\";v=\"24\"" }, - { "name": "sec-ch-ua-mobile", "value": "?0" }, - { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } - ], - "queryString": [], - "headersSize": -1, - "bodySize": -1 - }, - "response": { - "status": 200, - "statusText": "OK", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Access-Control-Allow-Origin", "value": "*" }, - { "name": "connection", "value": "close" }, - { "name": "content-length", "value": "15" }, - { "name": "content-type", "value": "application/json" }, - { "name": "date", "value": "Mon, 11 Dec 2023 18:55:08 GMT" }, - { "name": "server", "value": "uvicorn" } - ], - "content": { - "size": -1, - "mimeType": "application/json", - "text": "{\"built\":false}" - }, - "headersSize": -1, - "bodySize": -1, - "redirectURL": "" - }, - "cache": {}, - "timings": { "send": -1, "wait": -1, "receive": 0.595 } - } - ] - } -} \ No newline at end of file diff --git a/src/frontend/harFiles/langflow.har b/src/frontend/harFiles/langflow.har index edac6806e..d6fef50cd 100644 --- a/src/frontend/harFiles/langflow.har +++ b/src/frontend/harFiles/langflow.har @@ -1,229 +1,733 @@ { - "log": { - "version": "1.2", - "creator": { - "name": "Playwright", - "version": "1.37.1" + "log": { + "version": "1.2", + "creator": { + "name": "Playwright", + "version": "1.42.0" + }, + "browser": { + "name": "chromium", + "version": "123.0.6312.4" + }, + "entries": [ + { + "startedDateTime": "2024-02-28T14:32:30.858Z", + "time": 0.77, + "request": { + "method": "GET", + "url": "http://localhost:3000/api/v1/version", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Accept", "value": "application/json, text/plain, */*" }, + { "name": "Accept-Encoding", "value": "gzip, deflate, br, zstd" }, + { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, + { "name": "Connection", "value": "keep-alive" }, + { "name": "Host", "value": "localhost:3000" }, + { "name": "Referer", "value": "http://localhost:3000/" }, + { "name": "Sec-Fetch-Dest", "value": "empty" }, + { "name": "Sec-Fetch-Mode", "value": "cors" }, + { "name": "Sec-Fetch-Site", "value": "same-origin" }, + { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36" }, + { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"123\", \"Not:A-Brand\";v=\"8\"" }, + { "name": "sec-ch-ua-mobile", "value": "?0" }, + { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } + ], + "queryString": [], + "headersSize": -1, + "bodySize": -1 + }, + "response": { + "status": 200, + "statusText": "OK", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Access-Control-Allow-Origin", "value": "*" }, + { "name": "connection", "value": "close" }, + { "name": "content-length", "value": "19" }, + { "name": "content-type", "value": "application/json" }, + { "name": "date", "value": "Wed, 28 Feb 2024 14:32:30 GMT" }, + { "name": "server", "value": "uvicorn" } + ], + "content": { + "size": -1, + "mimeType": "application/json", + "text": "{\"version\":\"0.6.7\"}" + }, + "headersSize": -1, + "bodySize": -1, + "redirectURL": "" + }, + "cache": {}, + "timings": { "send": -1, "wait": -1, "receive": 0.77 } }, - "browser": { - "name": "chromium", - "version": "116.0.5845.82" + { + "startedDateTime": "2024-02-28T14:32:30.859Z", + "time": 0.894, + "request": { + "method": "GET", + "url": "http://localhost:3000/api/v1/auto_login", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Accept", "value": "application/json, text/plain, */*" }, + { "name": "Accept-Encoding", "value": "gzip, deflate, br, zstd" }, + { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, + { "name": "Connection", "value": "keep-alive" }, + { "name": "Host", "value": "localhost:3000" }, + { "name": "Referer", "value": "http://localhost:3000/" }, + { "name": "Sec-Fetch-Dest", "value": "empty" }, + { "name": "Sec-Fetch-Mode", "value": "cors" }, + { "name": "Sec-Fetch-Site", "value": "same-origin" }, + { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36" }, + { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"123\", \"Not:A-Brand\";v=\"8\"" }, + { "name": "sec-ch-ua-mobile", "value": "?0" }, + { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } + ], + "queryString": [], + "headersSize": -1, + "bodySize": -1 + }, + "response": { + "status": 200, + "statusText": "OK", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Access-Control-Allow-Origin", "value": "*" }, + { "name": "connection", "value": "close" }, + { "name": "content-length", "value": "227" }, + { "name": "content-type", "value": "application/json" }, + { "name": "date", "value": "Wed, 28 Feb 2024 14:32:30 GMT" }, + { "name": "server", "value": "uvicorn" }, + { "name": "set-cookie", "value": "access_token_lf=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc; Path=/; SameSite=none; Secure" } + ], + "content": { + "size": -1, + "mimeType": "application/json", + "text": "{\"access_token\":\"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc\",\"refresh_token\":null,\"token_type\":\"bearer\"}" + }, + "headersSize": -1, + "bodySize": -1, + "redirectURL": "" + }, + "cache": {}, + "timings": { "send": -1, "wait": -1, "receive": 0.894 } }, - "entries": [ - { - "startedDateTime": "2023-08-31T14:55:35.502Z", - "time": 0.727, - "request": { - "method": "GET", - "url": "http://localhost:3000/api/v1/auto_login", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Accept", "value": "application/json, text/plain, */*" }, - { "name": "Accept-Encoding", "value": "gzip, deflate, br" }, - { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, - { "name": "Connection", "value": "keep-alive" }, - { "name": "Host", "value": "localhost:3000" }, - { "name": "Referer", "value": "http://localhost:3000/" }, - { "name": "Sec-Fetch-Dest", "value": "empty" }, - { "name": "Sec-Fetch-Mode", "value": "cors" }, - { "name": "Sec-Fetch-Site", "value": "same-origin" }, - { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36" }, - { "name": "sec-ch-ua", "value": "\"Not)A;Brand\";v=\"24\", \"Chromium\";v=\"116\"" }, - { "name": "sec-ch-ua-mobile", "value": "?0" }, - { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } - ], - "queryString": [], - "headersSize": -1, - "bodySize": -1 - }, - "response": { - "status": 200, - "statusText": "OK", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Access-Control-Allow-Origin", "value": "*" }, - { "name": "connection", "value": "close" }, - { "name": "content-length", "value": "227" }, - { "name": "content-type", "value": "application/json" }, - { "name": "date", "value": "Thu, 31 Aug 2023 14:55:34 GMT" }, - { "name": "server", "value": "uvicorn" } - ], - "content": { - "size": -1, - "mimeType": "application/json", - "text": "{\"access_token\":\"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJiNWIxYjFhZC05M2VjLTRjMjgtYWMxNy01OGMxNDQ0MWI1ZGQiLCJleHAiOjE3MjUwMjk3MzV9.8qhEtryWqA9RbwEP2s20umKdVE7J7jHGoogXZqxLNWk\",\"refresh_token\":null,\"token_type\":\"bearer\"}" - }, - "headersSize": -1, - "bodySize": -1, - "redirectURL": "" - }, - "cache": {}, - "timings": { "send": -1, "wait": -1, "receive": 0.727 } + { + "startedDateTime": "2024-02-28T14:32:30.937Z", + "time": 0.944, + "request": { + "method": "GET", + "url": "http://localhost:3000/api/v1/users/whoami", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Accept", "value": "application/json, text/plain, */*" }, + { "name": "Accept-Encoding", "value": "gzip, deflate, br, zstd" }, + { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, + { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Connection", "value": "keep-alive" }, + { "name": "Cookie", "value": "access_token_lf=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Host", "value": "localhost:3000" }, + { "name": "Referer", "value": "http://localhost:3000/" }, + { "name": "Sec-Fetch-Dest", "value": "empty" }, + { "name": "Sec-Fetch-Mode", "value": "cors" }, + { "name": "Sec-Fetch-Site", "value": "same-origin" }, + { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36" }, + { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"123\", \"Not:A-Brand\";v=\"8\"" }, + { "name": "sec-ch-ua-mobile", "value": "?0" }, + { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } + ], + "queryString": [], + "headersSize": -1, + "bodySize": -1 }, - { - "startedDateTime": "2023-08-31T14:55:35.586Z", - "time": 0.719, - "request": { - "method": "GET", - "url": "http://localhost:3000/api/v1/flows/", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Accept", "value": "application/json, text/plain, */*" }, - { "name": "Accept-Encoding", "value": "gzip, deflate, br" }, - { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, - { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJiNWIxYjFhZC05M2VjLTRjMjgtYWMxNy01OGMxNDQ0MWI1ZGQiLCJleHAiOjE3MjUwMjk3MzV9.8qhEtryWqA9RbwEP2s20umKdVE7J7jHGoogXZqxLNWk" }, - { "name": "Connection", "value": "keep-alive" }, - { "name": "Cookie", "value": "access_token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJiNWIxYjFhZC05M2VjLTRjMjgtYWMxNy01OGMxNDQ0MWI1ZGQiLCJleHAiOjE3MjUwMjk3MzV9.8qhEtryWqA9RbwEP2s20umKdVE7J7jHGoogXZqxLNWk; refresh_token=auto" }, - { "name": "Host", "value": "localhost:3000" }, - { "name": "Referer", "value": "http://localhost:3000/" }, - { "name": "Sec-Fetch-Dest", "value": "empty" }, - { "name": "Sec-Fetch-Mode", "value": "cors" }, - { "name": "Sec-Fetch-Site", "value": "same-origin" }, - { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36" }, - { "name": "sec-ch-ua", "value": "\"Not)A;Brand\";v=\"24\", \"Chromium\";v=\"116\"" }, - { "name": "sec-ch-ua-mobile", "value": "?0" }, - { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } - ], - "queryString": [], - "headersSize": -1, - "bodySize": -1 + "response": { + "status": 200, + "statusText": "OK", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Access-Control-Allow-Origin", "value": "*" }, + { "name": "connection", "value": "close" }, + { "name": "content-length", "value": "253" }, + { "name": "content-type", "value": "application/json" }, + { "name": "date", "value": "Wed, 28 Feb 2024 14:32:30 GMT" }, + { "name": "server", "value": "uvicorn" } + ], + "content": { + "size": -1, + "mimeType": "application/json", + "text": "{\"id\":\"0389fb29-daa6-408c-b8cb-b8ff8d17343a\",\"username\":\"langflow\",\"profile_image\":null,\"is_active\":true,\"is_superuser\":true,\"create_at\":\"2024-02-28T14:31:41.362911\",\"updated_at\":\"2024-02-28T14:32:30.864882\",\"last_login_at\":\"2024-02-28T14:32:30.863748\"}" }, - "response": { - "status": 200, - "statusText": "OK", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Access-Control-Allow-Origin", "value": "*" }, - { "name": "connection", "value": "close" }, - { "name": "content-length", "value": "2" }, - { "name": "content-type", "value": "application/json" }, - { "name": "date", "value": "Thu, 31 Aug 2023 14:55:34 GMT" }, - { "name": "server", "value": "uvicorn" } - ], - "content": { - "size": -1, - "mimeType": "application/json", - "text": "[]" - }, - "headersSize": -1, - "bodySize": -1, - "redirectURL": "" - }, - "cache": {}, - "timings": { "send": -1, "wait": -1, "receive": 0.719 } + "headersSize": -1, + "bodySize": -1, + "redirectURL": "" }, - { - "startedDateTime": "2023-08-31T14:55:35.586Z", - "time": 1.031, - "request": { - "method": "GET", - "url": "http://localhost:3000/api/v1/all", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Accept", "value": "application/json, text/plain, */*" }, - { "name": "Accept-Encoding", "value": "gzip, deflate, br" }, - { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, - { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJiNWIxYjFhZC05M2VjLTRjMjgtYWMxNy01OGMxNDQ0MWI1ZGQiLCJleHAiOjE3MjUwMjk3MzV9.8qhEtryWqA9RbwEP2s20umKdVE7J7jHGoogXZqxLNWk" }, - { "name": "Connection", "value": "keep-alive" }, - { "name": "Cookie", "value": "access_token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJiNWIxYjFhZC05M2VjLTRjMjgtYWMxNy01OGMxNDQ0MWI1ZGQiLCJleHAiOjE3MjUwMjk3MzV9.8qhEtryWqA9RbwEP2s20umKdVE7J7jHGoogXZqxLNWk; refresh_token=auto" }, - { "name": "Host", "value": "localhost:3000" }, - { "name": "Referer", "value": "http://localhost:3000/" }, - { "name": "Sec-Fetch-Dest", "value": "empty" }, - { "name": "Sec-Fetch-Mode", "value": "cors" }, - { "name": "Sec-Fetch-Site", "value": "same-origin" }, - { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36" }, - { "name": "sec-ch-ua", "value": "\"Not)A;Brand\";v=\"24\", \"Chromium\";v=\"116\"" }, - { "name": "sec-ch-ua-mobile", "value": "?0" }, - { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } - ], - "queryString": [], - "headersSize": -1, - "bodySize": -1 - }, - "response": { - "status": 200, - "statusText": "OK", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Access-Control-Allow-Origin", "value": "*" }, - { "name": "connection", "value": "close" }, - { "name": "content-length", "value": "251307" }, - { "name": "content-type", "value": "application/json" }, - { "name": "date", "value": "Thu, 31 Aug 2023 14:55:34 GMT" }, - { "name": "server", "value": "uvicorn" } - ], - "content": { - "size": -1, - "mimeType": "application/json", - "text": "{\"chains\":{\"ConversationChain\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":{\"_type\":\"default\"},\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLLMOutputParser\",\"list\":false},\"prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"history\",\"input\"],\"input_types\":{},\"output_parser\":null,\"partial_variables\":{},\"template\":\"The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.\\n\\nCurrent conversation:\\n{history}\\nHuman: {input}\\nAI:\",\"template_format\":\"f-string\",\"validate_template\":true,\"_type\":\"prompt\"},\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"input_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"input\",\"password\":false,\"name\":\"input_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"llm_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"llm_kwargs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"output_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"response\",\"password\":false,\"name\":\"output_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"return_final_only\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"return_final_only\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ConversationChain\"},\"description\":\"Chain to have a conversation and load context from memory.\",\"base_classes\":[\"LLMChain\",\"Chain\",\"ConversationChain\",\"function\"],\"display_name\":\"ConversationChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"ConversationalRetrievalChain\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Callbacks\",\"list\":false},\"condense_question_llm\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"condense_question_llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"condense_question_prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"chat_history\",\"question\"],\"input_types\":{},\"output_parser\":null,\"partial_variables\":{},\"template\":\"Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question, in its original language.\\n\\nChat History:\\n{chat_history}\\nFollow Up Input: {question}\\nStandalone question:\",\"template_format\":\"f-string\",\"validate_template\":true,\"_type\":\"prompt\"},\"password\":false,\"name\":\"condense_question_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMemory\",\"list\":false},\"retriever\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseRetriever\",\"list\":false},\"chain_type\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"stuff\",\"password\":false,\"options\":[\"stuff\",\"map_reduce\",\"map_rerank\",\"refine\"],\"name\":\"chain_type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"combine_docs_chain_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"combine_docs_chain_kwargs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"return_source_documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"return_source_documents\",\"display_name\":\"Return source documents\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ConversationalRetrievalChain\"},\"description\":\"Convenience method to load chain from LLM and retriever.\",\"base_classes\":[\"BaseConversationalRetrievalChain\",\"Chain\",\"ConversationalRetrievalChain\",\"function\"],\"display_name\":\"ConversationalRetrievalChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/chains/popular/chat_vector_db\",\"beta\":false,\"error\":null},\"LLMChain\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLLMOutputParser\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"llm_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"llm_kwargs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"output_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text\",\"password\":false,\"name\":\"output_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"return_final_only\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"return_final_only\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"LLMChain\"},\"description\":\"Chain to run queries against LLMs.\",\"base_classes\":[\"LLMChain\",\"Chain\",\"function\"],\"display_name\":\"LLMChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/chains/foundational/llm_chain\",\"beta\":false,\"error\":null},\"LLMCheckerChain\":{\"template\":{\"check_assertions_prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"assertions\"],\"input_types\":{},\"output_parser\":null,\"partial_variables\":{},\"template\":\"Here is a bullet point list of assertions:\\n{assertions}\\nFor each assertion, determine whether it is true or false. If it is false, explain why.\\n\\n\",\"template_format\":\"f-string\",\"validate_template\":true,\"_type\":\"prompt\"},\"password\":false,\"name\":\"check_assertions_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"PromptTemplate\",\"list\":false},\"create_draft_answer_prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"question\"],\"input_types\":{},\"output_parser\":null,\"partial_variables\":{},\"template\":\"{question}\\n\\n\",\"template_format\":\"f-string\",\"validate_template\":true,\"_type\":\"prompt\"},\"password\":false,\"name\":\"create_draft_answer_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"PromptTemplate\",\"list\":false},\"list_assertions_prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"statement\"],\"input_types\":{},\"output_parser\":null,\"partial_variables\":{},\"template\":\"Here is a statement:\\n{statement}\\nMake a bullet point list of the assumptions you made when producing the above statement.\\n\\n\",\"template_format\":\"f-string\",\"validate_template\":true,\"_type\":\"prompt\"},\"password\":false,\"name\":\"list_assertions_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"PromptTemplate\",\"list\":false},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"revised_answer_prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"checked_assertions\",\"question\"],\"input_types\":{},\"output_parser\":null,\"partial_variables\":{},\"template\":\"{checked_assertions}\\n\\nQuestion: In light of the above assertions and checks, how would you answer the question '{question}'?\\n\\nAnswer:\",\"template_format\":\"f-string\",\"validate_template\":true,\"_type\":\"prompt\"},\"password\":false,\"name\":\"revised_answer_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"PromptTemplate\",\"list\":false},\"_type\":\"LLMCheckerChain\"},\"description\":\"\",\"base_classes\":[\"Chain\",\"LLMCheckerChain\",\"function\"],\"display_name\":\"LLMCheckerChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/chains/additional/llm_checker\",\"beta\":false,\"error\":null},\"LLMMathChain\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"llm_chain\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm_chain\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"LLMChain\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"question\"],\"input_types\":{},\"output_parser\":null,\"partial_variables\":{},\"template\":\"Translate a math problem into a expression that can be executed using Python's numexpr library. Use the output of running this code to answer the question.\\n\\nQuestion: ${{Question with math problem.}}\\n```text\\n${{single line mathematical expression that solves the problem}}\\n```\\n...numexpr.evaluate(text)...\\n```output\\n${{Output of running the code}}\\n```\\nAnswer: ${{Answer}}\\n\\nBegin.\\n\\nQuestion: What is 37593 * 67?\\n```text\\n37593 * 67\\n```\\n...numexpr.evaluate(\\\"37593 * 67\\\")...\\n```output\\n2518731\\n```\\nAnswer: 2518731\\n\\nQuestion: 37593^(1/5)\\n```text\\n37593**(1/5)\\n```\\n...numexpr.evaluate(\\\"37593**(1/5)\\\")...\\n```output\\n8.222831614237718\\n```\\nAnswer: 8.222831614237718\\n\\nQuestion: {question}\\n\",\"template_format\":\"f-string\",\"validate_template\":true,\"_type\":\"prompt\"},\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"input_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"question\",\"password\":false,\"name\":\"input_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"output_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"answer\",\"password\":false,\"name\":\"output_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"LLMMathChain\"},\"description\":\"Chain that interprets a prompt and executes python code to do math.\",\"base_classes\":[\"Chain\",\"LLMMathChain\",\"function\"],\"display_name\":\"LLMMathChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/chains/additional/llm_math\",\"beta\":false,\"error\":null},\"RetrievalQA\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"combine_documents_chain\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"combine_documents_chain\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseCombineDocumentsChain\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"retriever\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseRetriever\",\"list\":false},\"input_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"query\",\"password\":false,\"name\":\"input_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"output_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"result\",\"password\":false,\"name\":\"output_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"return_source_documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"return_source_documents\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"RetrievalQA\"},\"description\":\"Chain for question-answering against an index.\",\"base_classes\":[\"Chain\",\"RetrievalQA\",\"BaseRetrievalQA\",\"function\"],\"display_name\":\"RetrievalQA\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/chains/popular/vector_db_qa\",\"beta\":false,\"error\":null},\"RetrievalQAWithSourcesChain\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"combine_documents_chain\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"combine_documents_chain\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseCombineDocumentsChain\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"retriever\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseRetriever\",\"list\":false},\"answer_key\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"answer\",\"password\":false,\"name\":\"answer_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"input_docs_key\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"docs\",\"password\":false,\"name\":\"input_docs_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"max_tokens_limit\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":3375,\"password\":false,\"name\":\"max_tokens_limit\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"question_key\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"question\",\"password\":false,\"name\":\"question_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"reduce_k_below_max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"reduce_k_below_max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"return_source_documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"return_source_documents\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"sources_answer_key\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"sources\",\"password\":false,\"name\":\"sources_answer_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"RetrievalQAWithSourcesChain\"},\"description\":\"Question-answering with sources over an index.\",\"base_classes\":[\"Chain\",\"BaseQAWithSourcesChain\",\"RetrievalQAWithSourcesChain\",\"function\"],\"display_name\":\"RetrievalQAWithSourcesChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"SQLDatabaseChain\":{\"template\":{\"db\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"db\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"SQLDatabase\",\"list\":false},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"_type\":\"SQLDatabaseChain\"},\"description\":\"\",\"base_classes\":[\"Chain\",\"SQLDatabaseChain\",\"function\"],\"display_name\":\"SQLDatabaseChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"CombineDocsChain\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"chain_type\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"stuff\",\"password\":false,\"options\":[\"stuff\",\"map_reduce\",\"map_rerank\",\"refine\"],\"name\":\"chain_type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"_type\":\"load_qa_chain\"},\"description\":\"Load question answering chain.\",\"base_classes\":[\"BaseCombineDocumentsChain\",\"function\"],\"display_name\":\"CombineDocsChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"SeriesCharacterChain\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"character\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"character\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"series\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"series\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"SeriesCharacterChain\"},\"description\":\"SeriesCharacterChain is a chain you can use to have a conversation with a character from a series.\",\"base_classes\":[\"LLMChain\",\"BaseCustomChain\",\"Chain\",\"ConversationChain\",\"SeriesCharacterChain\",\"function\"],\"display_name\":\"SeriesCharacterChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"MidJourneyPromptChain\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMemory\",\"list\":false},\"_type\":\"MidJourneyPromptChain\"},\"description\":\"MidJourneyPromptChain is a chain you can use to generate new MidJourney prompts.\",\"base_classes\":[\"LLMChain\",\"BaseCustomChain\",\"Chain\",\"ConversationChain\",\"MidJourneyPromptChain\"],\"display_name\":\"MidJourneyPromptChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"TimeTravelGuideChain\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMemory\",\"list\":false},\"_type\":\"TimeTravelGuideChain\"},\"description\":\"Time travel guide chain.\",\"base_classes\":[\"LLMChain\",\"BaseCustomChain\",\"TimeTravelGuideChain\",\"Chain\",\"ConversationChain\"],\"display_name\":\"TimeTravelGuideChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"PromptRunner\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\n\\nfrom langchain.llms.base import BaseLLM\\nfrom langchain.prompts import PromptTemplate\\nfrom langchain.schema import Document\\n\\n\\nclass PromptRunner(CustomComponent):\\n display_name: str = \\\"Prompt Runner\\\"\\n description: str = \\\"Run a Chain with the given PromptTemplate\\\"\\n beta = True\\n field_config = {\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"prompt\\\": {\\n \\\"display_name\\\": \\\"Prompt Template\\\",\\n \\\"info\\\": \\\"Make sure the prompt has all variables filled.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self, llm: BaseLLM, prompt: PromptTemplate, inputs: dict = {}\\n ) -> Document:\\n chain = prompt | llm\\n # The input is an empty dict because the prompt is already filled\\n result = chain.invoke(input=inputs)\\n if hasattr(result, \\\"content\\\"):\\n result = result.content\\n self.repr_value = result\\n return Document(page_content=str(result))\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"inputs\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"inputs\",\"display_name\":\"inputs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLLM\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt Template\",\"advanced\":false,\"dynamic\":false,\"info\":\"Make sure the prompt has all variables filled.\",\"type\":\"PromptTemplate\",\"list\":false}},\"description\":\"Run a Chain with the given PromptTemplate\",\"base_classes\":[\"Document\"],\"display_name\":\"Prompt Runner\",\"custom_fields\":{\"inputs\":null,\"llm\":null,\"prompt\":null},\"output_types\":[\"PromptRunner\"],\"documentation\":\"\",\"beta\":true,\"error\":null}},\"agents\":{\"ZeroShotAgent\":{\"template\":{\"callback_manager\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callback_manager\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseCallbackManager\",\"list\":false},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"AgentOutputParser\",\"list\":false},\"tools\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"tools\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseTool\",\"list\":true},\"format_instructions\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"Use the following format:\\n\\nQuestion: the input question you must answer\\nThought: you should always think about what to do\\nAction: the action to take, should be one of [{tool_names}]\\nAction Input: the input to the action\\nObservation: the result of the action\\n... (this Thought/Action/Action Input/Observation can repeat N times)\\nThought: I now know the final answer\\nFinal Answer: the final answer to the original input question\",\"password\":false,\"name\":\"format_instructions\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"input_variables\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"Answer the following questions as best you can. You have access to the following tools:\",\"password\":false,\"name\":\"prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"suffix\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"Begin!\\n\\nQuestion: {input}\\nThought:{agent_scratchpad}\",\"password\":false,\"name\":\"suffix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"ZeroShotAgent\"},\"description\":\"Construct an agent from an LLM and tools.\",\"base_classes\":[\"Agent\",\"BaseSingleActionAgent\",\"ZeroShotAgent\",\"function\"],\"display_name\":\"ZeroShotAgent\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/agents/how_to/custom_mrkl_agent\",\"beta\":false,\"error\":null},\"JsonAgent\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"toolkit\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"toolkit\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseToolkit\",\"list\":false},\"_type\":\"json_agent\"},\"description\":\"Construct a json agent from an LLM and tools.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"JsonAgent\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/agents/toolkits/openapi\",\"beta\":false,\"error\":null},\"CSVAgent\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".csv\"],\"password\":false,\"name\":\"path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"csv\"],\"file_path\":null},\"_type\":\"csv_agent\"},\"description\":\"Construct a CSV agent from a CSV and tools.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"CSVAgent\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/agents/toolkits/csv\",\"beta\":false,\"error\":null},\"AgentInitializer\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMemory\",\"list\":false},\"tools\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"tools\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Tool\",\"list\":true},\"agent\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"zero-shot-react-description\",\"password\":false,\"options\":[\"zero-shot-react-description\",\"react-docstore\",\"self-ask-with-search\",\"conversational-react-description\",\"openai-functions\",\"openai-multi-functions\"],\"name\":\"agent\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"_type\":\"initialize_agent\"},\"description\":\"Construct a zero shot agent from an LLM and tools.\",\"base_classes\":[\"AgentExecutor\",\"function\"],\"display_name\":\"AgentInitializer\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/agents/agent_types/\",\"beta\":false,\"error\":null},\"VectorStoreAgent\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"vectorstoreinfo\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"vectorstoreinfo\",\"display_name\":\"Vector Store Info\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"VectorStoreInfo\",\"list\":false},\"_type\":\"vectorstore_agent\"},\"description\":\"Construct an agent from a Vector Store.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"VectorStoreAgent\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"VectorStoreRouterAgent\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"vectorstoreroutertoolkit\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"vectorstoreroutertoolkit\",\"display_name\":\"Vector Store Router Toolkit\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"VectorStoreRouterToolkit\",\"list\":false},\"_type\":\"vectorstorerouter_agent\"},\"description\":\"Construct an agent from a Vector Store Router.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"VectorStoreRouterAgent\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"SQLAgent\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"database_uri\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"database_uri\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"sql_agent\"},\"description\":\"Construct an SQL agent from an LLM and tools.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"SQLAgent\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"OpenAIConversationalAgent\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom typing import Optional\\nfrom langchain.prompts import SystemMessagePromptTemplate\\nfrom langchain.tools import Tool\\nfrom langchain.schema.memory import BaseMemory\\nfrom langchain.chat_models import ChatOpenAI\\n\\nfrom langchain.agents.agent import AgentExecutor\\nfrom langchain.agents.openai_functions_agent.base import OpenAIFunctionsAgent\\nfrom langchain.memory.token_buffer import ConversationTokenBufferMemory\\nfrom langchain.prompts.chat import MessagesPlaceholder\\nfrom langchain.agents.agent_toolkits.conversational_retrieval.openai_functions import (\\n _get_default_system_message,\\n)\\n\\n\\nclass ConversationalAgent(CustomComponent):\\n display_name: str = \\\"OpenAI Conversational Agent\\\"\\n description: str = \\\"Conversational Agent that can use OpenAI's function calling API\\\"\\n\\n def build_config(self):\\n openai_function_models = [\\n \\\"gpt-3.5-turbo-0613\\\",\\n \\\"gpt-3.5-turbo-16k-0613\\\",\\n \\\"gpt-4-0613\\\",\\n \\\"gpt-4-32k-0613\\\",\\n ]\\n return {\\n \\\"tools\\\": {\\\"is_list\\\": True, \\\"display_name\\\": \\\"Tools\\\"},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"system_message\\\": {\\\"display_name\\\": \\\"System Message\\\"},\\n \\\"max_token_limit\\\": {\\\"display_name\\\": \\\"Max Token Limit\\\"},\\n \\\"model_name\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"options\\\": openai_function_models,\\n \\\"value\\\": openai_function_models[0],\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model_name: str,\\n openai_api_key: str,\\n tools: Tool,\\n openai_api_base: Optional[str] = None,\\n memory: Optional[BaseMemory] = None,\\n system_message: Optional[SystemMessagePromptTemplate] = None,\\n max_token_limit: int = 2000,\\n ) -> AgentExecutor:\\n llm = ChatOpenAI(\\n model=model_name,\\n openai_api_key=openai_api_key,\\n openai_api_base=openai_api_base,\\n )\\n if not memory:\\n memory_key = \\\"chat_history\\\"\\n memory = ConversationTokenBufferMemory(\\n memory_key=memory_key,\\n return_messages=True,\\n output_key=\\\"output\\\",\\n llm=llm,\\n max_token_limit=max_token_limit,\\n )\\n else:\\n memory_key = memory.memory_key # type: ignore\\n\\n _system_message = system_message or _get_default_system_message()\\n prompt = OpenAIFunctionsAgent.create_prompt(\\n system_message=_system_message, # type: ignore\\n extra_prompt_messages=[MessagesPlaceholder(variable_name=memory_key)],\\n )\\n agent = OpenAIFunctionsAgent(\\n llm=llm, tools=tools, prompt=prompt # type: ignore\\n )\\n return AgentExecutor(\\n agent=agent,\\n tools=tools, # type: ignore\\n memory=memory,\\n verbose=True,\\n return_intermediate_steps=True,\\n handle_parsing_errors=True,\\n )\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"max_token_limit\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":2000,\"password\":false,\"name\":\"max_token_limit\",\"display_name\":\"Max Token Limit\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"model_name\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"gpt-3.5-turbo-0613\",\"password\":false,\"options\":[\"gpt-3.5-turbo-0613\",\"gpt-3.5-turbo-16k-0613\",\"gpt-4-0613\",\"gpt-4-32k-0613\"],\"name\":\"model_name\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"openai_api_base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_key\",\"display_name\":\"openai_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"system_message\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"system_message\",\"display_name\":\"System Message\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"SystemMessagePromptTemplate\",\"list\":false},\"tools\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"tools\",\"display_name\":\"Tools\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Tool\",\"list\":true}},\"description\":\"Conversational Agent that can use OpenAI's function calling API\",\"base_classes\":[\"Chain\",\"AgentExecutor\"],\"display_name\":\"OpenAI Conversational Agent\",\"custom_fields\":{\"max_token_limit\":null,\"memory\":null,\"model_name\":null,\"openai_api_base\":null,\"openai_api_key\":null,\"system_message\":null,\"tools\":null},\"output_types\":[\"OpenAIConversationalAgent\"],\"documentation\":\"\",\"beta\":true,\"error\":null}},\"prompts\":{\"ChatMessagePromptTemplate\":{\"template\":{\"additional_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"additional_kwargs\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\\nYou are a helpful assistant that talks casually about life in general.\\nYou are a good listener and you can talk about anything.\\n\",\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"prompt\",\"list\":false},\"role\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"role\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"ChatMessagePromptTemplate\"},\"description\":\"Chat message prompt template.\",\"base_classes\":[\"BaseStringMessagePromptTemplate\",\"ChatMessagePromptTemplate\",\"BaseMessagePromptTemplate\"],\"display_name\":\"ChatMessagePromptTemplate\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/msg_prompt_templates\",\"beta\":false,\"error\":null},\"ChatPromptTemplate\":{\"template\":{\"messages\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"messages\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseMessagePromptTemplate\",\"list\":true},\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false},\"input_types\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_types\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"input_variables\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":true},\"partial_variables\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"ChatPromptTemplate\"},\"description\":\"A prompt template for chat models.\",\"base_classes\":[\"BaseChatPromptTemplate\",\"BasePromptTemplate\",\"ChatPromptTemplate\"],\"display_name\":\"ChatPromptTemplate\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/how_to/prompts\",\"beta\":false,\"error\":null},\"HumanMessagePromptTemplate\":{\"template\":{\"additional_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"additional_kwargs\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\\nYou are a helpful assistant that talks casually about life in general.\\nYou are a good listener and you can talk about anything.\\n\",\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"prompt\",\"list\":false},\"_type\":\"HumanMessagePromptTemplate\"},\"description\":\"Human message prompt template. This is a message sent from the user.\",\"base_classes\":[\"BaseStringMessagePromptTemplate\",\"HumanMessagePromptTemplate\",\"BaseMessagePromptTemplate\"],\"display_name\":\"HumanMessagePromptTemplate\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/how_to/prompts\",\"beta\":false,\"error\":null},\"PromptTemplate\":{\"template\":{\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false},\"input_types\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_types\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"input_variables\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":true},\"partial_variables\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"template\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"prompt\",\"list\":false},\"template_format\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"f-string\",\"password\":false,\"name\":\"template_format\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"str\",\"list\":false},\"validate_template\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"validate_template\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"PromptTemplate\"},\"description\":\"A prompt template for a language model.\",\"base_classes\":[\"StringPromptTemplate\",\"BasePromptTemplate\",\"PromptTemplate\"],\"display_name\":\"PromptTemplate\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/\",\"beta\":false,\"error\":null},\"SystemMessagePromptTemplate\":{\"template\":{\"additional_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"additional_kwargs\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\\nYou are a helpful assistant that talks casually about life in general.\\nYou are a good listener and you can talk about anything.\\n\",\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"prompt\",\"list\":false},\"_type\":\"SystemMessagePromptTemplate\"},\"description\":\"System message prompt template.\",\"base_classes\":[\"BaseStringMessagePromptTemplate\",\"BaseMessagePromptTemplate\",\"SystemMessagePromptTemplate\"],\"display_name\":\"SystemMessagePromptTemplate\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/how_to/prompts\",\"beta\":false,\"error\":null}},\"llms\":{\"Anthropic\":{\"template\":{\"anthropic_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"anthropic_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"SecretStr\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"count_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"count_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Callable[[str], int]\",\"list\":false},\"AI_PROMPT\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"AI_PROMPT\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"HUMAN_PROMPT\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"HUMAN_PROMPT\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"anthropic_api_url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"anthropic_api_url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"max_tokens_to_sample\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":256,\"password\":false,\"name\":\"max_tokens_to_sample\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"claude-2\",\"password\":false,\"name\":\"model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"top_k\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"top_k\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"top_p\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"top_p\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"Anthropic\"},\"description\":\"Anthropic large language models.\",\"base_classes\":[\"_AnthropicCommon\",\"BaseLanguageModel\",\"Anthropic\",\"BaseLLM\",\"LLM\"],\"display_name\":\"Anthropic\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"Cohere\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cohere_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"cohere_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"frequency_penalty\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":0.0,\"password\":false,\"name\":\"frequency_penalty\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"k\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":0,\"password\":false,\"name\":\"k\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":256,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"p\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"p\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"presence_penalty\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":0.0,\"password\":false,\"name\":\"presence_penalty\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"stop\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"stop\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.75,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"truncate\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"truncate\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"Cohere\"},\"description\":\"Cohere large language models.\",\"base_classes\":[\"BaseLanguageModel\",\"BaseLLM\",\"LLM\",\"BaseCohere\",\"Cohere\"],\"display_name\":\"Cohere\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/cohere\",\"beta\":false,\"error\":null},\"CTransformers\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"config\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{\\n \\\"top_k\\\": 40,\\n \\\"top_p\\\": 0.95,\\n \\\"temperature\\\": 0.8,\\n \\\"repetition_penalty\\\": 1.1,\\n \\\"last_n_tokens\\\": 64,\\n \\\"seed\\\": -1,\\n \\\"max_new_tokens\\\": 256,\\n \\\"stop\\\": null,\\n \\\"stream\\\": false,\\n \\\"reset\\\": true,\\n \\\"batch_size\\\": 8,\\n \\\"threads\\\": -1,\\n \\\"context_length\\\": -1,\\n \\\"gpu_layers\\\": 0\\n}\",\"password\":false,\"name\":\"config\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"lib\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"lib\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_file\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_file\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_type\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"CTransformers\"},\"description\":\"C Transformers LLM models.\",\"base_classes\":[\"CTransformers\",\"BaseLanguageModel\",\"BaseLLM\",\"LLM\"],\"display_name\":\"CTransformers\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/ctransformers\",\"beta\":false,\"error\":null},\"HuggingFaceHub\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"huggingfacehub_api_token\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"huggingfacehub_api_token\",\"display_name\":\"HuggingFace Hub API Token\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"repo_id\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"gpt2\",\"password\":false,\"name\":\"repo_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"task\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-generation\",\"password\":false,\"options\":[\"text-generation\",\"text2text-generation\",\"summarization\"],\"name\":\"task\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"HuggingFaceHub\"},\"description\":\"HuggingFaceHub models.\",\"base_classes\":[\"HuggingFaceHub\",\"BaseLanguageModel\",\"BaseLLM\",\"LLM\"],\"display_name\":\"HuggingFaceHub\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/huggingface_hub\",\"beta\":false,\"error\":null},\"LlamaCpp\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"grammar\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"grammar\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"ForwardRef('str\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"echo\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"echo\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"f16_kv\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"f16_kv\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"grammar_path\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"grammar_path\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"last_n_tokens_size\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":64,\"password\":false,\"name\":\"last_n_tokens_size\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"logits_all\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"logits_all\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"logprobs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"logprobs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"lora_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"lora_base\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"lora_path\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"lora_path\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":256,\"password\":true,\"name\":\"max_tokens\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"n_batch\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":8,\"password\":false,\"name\":\"n_batch\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"n_ctx\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":512,\"password\":false,\"name\":\"n_ctx\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"n_gpu_layers\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"n_gpu_layers\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"n_parts\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":-1,\"password\":false,\"name\":\"n_parts\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"n_threads\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"n_threads\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"repeat_penalty\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1.1,\"password\":false,\"name\":\"repeat_penalty\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"rope_freq_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":10000.0,\"password\":false,\"name\":\"rope_freq_base\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"rope_freq_scale\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1.0,\"password\":false,\"name\":\"rope_freq_scale\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"seed\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":-1,\"password\":false,\"name\":\"seed\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"stop\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"stop\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"streaming\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"suffix\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"suffix\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.8,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"top_k\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":40,\"password\":false,\"name\":\"top_k\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"top_p\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.95,\"password\":false,\"name\":\"top_p\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"use_mlock\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"use_mlock\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"use_mmap\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"use_mmap\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"vocab_only\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"vocab_only\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"LlamaCpp\"},\"description\":\"llama.cpp model.\",\"base_classes\":[\"BaseLanguageModel\",\"BaseLLM\",\"LLM\",\"LlamaCpp\"],\"display_name\":\"LlamaCpp\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/llamacpp\",\"beta\":false,\"error\":null},\"OpenAI\":{\"template\":{\"allowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":[],\"password\":false,\"name\":\"allowed_special\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Literal'all'\",\"list\":true},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"disallowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"all\",\"password\":false,\"name\":\"disallowed_special\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Literal'all'\",\"list\":false},\"batch_size\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":20,\"password\":false,\"name\":\"batch_size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"best_of\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"best_of\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"frequency_penalty\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":0,\"password\":false,\"name\":\"frequency_penalty\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"logit_bias\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"logit_bias\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":6,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":256,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-davinci-003\",\"password\":false,\"options\":[\"text-davinci-003\",\"text-davinci-002\",\"text-curie-001\",\"text-babbage-001\",\"text-ada-001\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"presence_penalty\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":0,\"password\":false,\"name\":\"presence_penalty\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.7,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"top_p\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"top_p\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"OpenAI\"},\"description\":\"OpenAI large language models.\",\"base_classes\":[\"BaseLanguageModel\",\"BaseLLM\",\"BaseOpenAI\",\"OpenAI\"],\"display_name\":\"OpenAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai\",\"beta\":false,\"error\":null},\"VertexAI\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"ForwardRef(\\\"'_LanguageModel'\\\")\",\"list\":false},\"credentials\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".json\"],\"password\":false,\"name\":\"credentials\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"json\"],\"file_path\":null},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"location\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"us-central1\",\"password\":false,\"name\":\"location\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"max_output_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":128,\"password\":false,\"name\":\"max_output_tokens\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":6,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-bison\",\"password\":false,\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"project\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"project\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_parallelism\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":5,\"password\":false,\"name\":\"request_parallelism\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"stop\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"stop\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.0,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"top_k\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":40,\"password\":false,\"name\":\"top_k\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"top_p\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.95,\"password\":false,\"name\":\"top_p\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tuned_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"tuned_model_name\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"VertexAI\"},\"description\":\"Google Vertex AI large language models.\",\"base_classes\":[\"_VertexAICommon\",\"VertexAI\",\"BaseLanguageModel\",\"BaseLLM\",\"_VertexAIBase\"],\"display_name\":\"VertexAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/google_vertex_ai_palm\",\"beta\":false,\"error\":null},\"ChatAnthropic\":{\"template\":{\"anthropic_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"anthropic_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"SecretStr\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"count_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"count_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Callable[[str], int]\",\"list\":false},\"AI_PROMPT\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"AI_PROMPT\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"HUMAN_PROMPT\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"HUMAN_PROMPT\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"anthropic_api_url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"anthropic_api_url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"max_tokens_to_sample\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":256,\"password\":false,\"name\":\"max_tokens_to_sample\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"claude-2\",\"password\":false,\"name\":\"model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"top_k\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"top_k\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"top_p\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"top_p\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatAnthropic\"},\"description\":\"`Anthropic` chat large language models.\",\"base_classes\":[\"_AnthropicCommon\",\"BaseLanguageModel\",\"BaseChatModel\",\"ChatAnthropic\",\"BaseLLM\"],\"display_name\":\"ChatAnthropic\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/anthropic\",\"beta\":false,\"error\":null},\"ChatOpenAI\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":6,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"gpt-3.5-turbo-0613\",\"password\":false,\"options\":[\"gpt-3.5-turbo-0613\",\"gpt-3.5-turbo\",\"gpt-3.5-turbo-16k-0613\",\"gpt-3.5-turbo-16k\",\"gpt-4-0613\",\"gpt-4-32k-0613\",\"gpt-4\",\"gpt-4-32k\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.7,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatOpenAI\"},\"description\":\"`OpenAI` Chat large language models API.\",\"base_classes\":[\"BaseLanguageModel\",\"BaseChatModel\",\"ChatOpenAI\",\"BaseLLM\"],\"display_name\":\"ChatOpenAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai\",\"beta\":false,\"error\":null},\"ChatVertexAI\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"ForwardRef(\\\"'_LanguageModel'\\\")\",\"list\":false},\"credentials\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".json\"],\"password\":false,\"name\":\"credentials\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"json\"],\"file_path\":null},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"location\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"us-central1\",\"password\":false,\"name\":\"location\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"max_output_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":128,\"password\":false,\"name\":\"max_output_tokens\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":6,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"chat-bison\",\"password\":false,\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"project\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"project\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_parallelism\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":5,\"password\":false,\"name\":\"request_parallelism\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"stop\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"stop\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.0,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"top_k\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":40,\"password\":false,\"name\":\"top_k\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"top_p\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.95,\"password\":false,\"name\":\"top_p\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatVertexAI\"},\"description\":\"`Vertex AI` Chat large language models API.\",\"base_classes\":[\"_VertexAICommon\",\"BaseChatModel\",\"BaseLanguageModel\",\"_VertexAIBase\",\"ChatVertexAI\",\"BaseLLM\"],\"display_name\":\"ChatVertexAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/google_vertex_ai_palm\",\"beta\":false,\"error\":null},\"HuggingFaceEndpoints\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional\\nfrom langflow import CustomComponent\\nfrom langchain.llms.huggingface_endpoint import HuggingFaceEndpoint\\nfrom langchain.llms.base import BaseLLM\\n\\n\\nclass HuggingFaceEndpointsComponent(CustomComponent):\\n display_name: str = \\\"Hugging Face Inference API\\\"\\n description: str = \\\"LLM model from Hugging Face Inference API.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"endpoint_url\\\": {\\\"display_name\\\": \\\"Endpoint URL\\\", \\\"password\\\": True},\\n \\\"task\\\": {\\n \\\"display_name\\\": \\\"Task\\\",\\n \\\"options\\\": [\\\"text2text-generation\\\", \\\"text-generation\\\", \\\"summarization\\\"],\\n },\\n \\\"huggingfacehub_api_token\\\": {\\\"display_name\\\": \\\"API token\\\", \\\"password\\\": True},\\n \\\"model_kwargs\\\": {\\n \\\"display_name\\\": \\\"Model Keyword Arguments\\\",\\n \\\"field_type\\\": \\\"code\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n endpoint_url: str,\\n task: str = \\\"text2text-generation\\\",\\n huggingfacehub_api_token: Optional[str] = None,\\n model_kwargs: Optional[dict] = None,\\n ) -> BaseLLM:\\n try:\\n output = HuggingFaceEndpoint(\\n endpoint_url=endpoint_url,\\n task=task,\\n huggingfacehub_api_token=huggingfacehub_api_token,\\n model_kwargs=model_kwargs,\\n )\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to HuggingFace Endpoints API.\\\") from e\\n return output\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"endpoint_url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"endpoint_url\",\"display_name\":\"Endpoint URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"huggingfacehub_api_token\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"huggingfacehub_api_token\",\"display_name\":\"API token\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"display_name\":\"Model Keyword Arguments\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"task\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text2text-generation\",\"password\":false,\"options\":[\"text2text-generation\",\"text-generation\",\"summarization\"],\"name\":\"task\",\"display_name\":\"Task\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true}},\"description\":\"LLM model from Hugging Face Inference API.\",\"base_classes\":[\"BaseLanguageModel\",\"BaseLLM\"],\"display_name\":\"Hugging Face Inference API\",\"custom_fields\":{\"endpoint_url\":null,\"huggingfacehub_api_token\":null,\"model_kwargs\":null,\"task\":null},\"output_types\":[\"HuggingFaceEndpoints\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"BaiduQianfanChatEndpoints\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional\\nfrom langflow import CustomComponent\\nfrom langchain.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint\\nfrom langchain.llms.base import BaseLLM\\n\\n\\nclass QianfanChatEndpointComponent(CustomComponent):\\n display_name: str = \\\"QianfanChatEndpoint\\\"\\n description: str = (\\n \\\"Baidu Qianfan chat models. Get more detail from \\\"\\n \\\"https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint.\\\"\\n )\\n\\n def build_config(self):\\n return {\\n \\\"model\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"options\\\": [\\n \\\"ERNIE-Bot\\\",\\n \\\"ERNIE-Bot-turbo\\\",\\n \\\"BLOOMZ-7B\\\",\\n \\\"Llama-2-7b-chat\\\",\\n \\\"Llama-2-13b-chat\\\",\\n \\\"Llama-2-70b-chat\\\",\\n \\\"Qianfan-BLOOMZ-7B-compressed\\\",\\n \\\"Qianfan-Chinese-Llama-2-7B\\\",\\n \\\"ChatGLM2-6B-32K\\\",\\n \\\"AquilaChat-7B\\\",\\n ],\\n \\\"info\\\": \\\"https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint\\\",\\n \\\"required\\\": True,\\n },\\n \\\"qianfan_ak\\\": {\\n \\\"display_name\\\": \\\"Qianfan Ak\\\",\\n \\\"required\\\": True,\\n \\\"password\\\": True,\\n \\\"info\\\": \\\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\\\",\\n },\\n \\\"qianfan_sk\\\": {\\n \\\"display_name\\\": \\\"Qianfan Sk\\\",\\n \\\"required\\\": True,\\n \\\"password\\\": True,\\n \\\"info\\\": \\\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\\\",\\n },\\n \\\"top_p\\\": {\\n \\\"display_name\\\": \\\"Top p\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\\\",\\n \\\"value\\\": 0.8,\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\\\",\\n \\\"value\\\": 0.95,\\n },\\n \\\"penalty_score\\\": {\\n \\\"display_name\\\": \\\"Penalty Score\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\\\",\\n \\\"value\\\": 1.0,\\n },\\n \\\"endpoint\\\": {\\n \\\"display_name\\\": \\\"Endpoint\\\",\\n \\\"info\\\": \\\"Endpoint of the Qianfan LLM, required if custom model used.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model: str = \\\"ERNIE-Bot-turbo\\\",\\n qianfan_ak: Optional[str] = None,\\n qianfan_sk: Optional[str] = None,\\n top_p: Optional[float] = None,\\n temperature: Optional[float] = None,\\n penalty_score: Optional[float] = None,\\n endpoint: Optional[str] = None,\\n ) -> BaseLLM:\\n try:\\n output = QianfanChatEndpoint( # type: ignore\\n model=model,\\n qianfan_ak=qianfan_ak,\\n qianfan_sk=qianfan_sk,\\n top_p=top_p,\\n temperature=temperature,\\n penalty_score=penalty_score,\\n endpoint=endpoint,\\n )\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to Baidu Qianfan API.\\\") from e\\n return output # type: ignore\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"endpoint\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"endpoint\",\"display_name\":\"Endpoint\",\"advanced\":false,\"dynamic\":false,\"info\":\"Endpoint of the Qianfan LLM, required if custom model used.\",\"type\":\"str\",\"list\":false},\"model\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"ERNIE-Bot-turbo\",\"password\":false,\"options\":[\"ERNIE-Bot\",\"ERNIE-Bot-turbo\",\"BLOOMZ-7B\",\"Llama-2-7b-chat\",\"Llama-2-13b-chat\",\"Llama-2-70b-chat\",\"Qianfan-BLOOMZ-7B-compressed\",\"Qianfan-Chinese-Llama-2-7B\",\"ChatGLM2-6B-32K\",\"AquilaChat-7B\"],\"name\":\"model\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint\",\"type\":\"str\",\"list\":true},\"penalty_score\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1.0,\"password\":false,\"name\":\"penalty_score\",\"display_name\":\"Penalty Score\",\"advanced\":false,\"dynamic\":false,\"info\":\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\",\"type\":\"float\",\"list\":false},\"qianfan_ak\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"qianfan_ak\",\"display_name\":\"Qianfan Ak\",\"advanced\":false,\"dynamic\":false,\"info\":\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\",\"type\":\"str\",\"list\":false},\"qianfan_sk\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"qianfan_sk\",\"display_name\":\"Qianfan Sk\",\"advanced\":false,\"dynamic\":false,\"info\":\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\",\"type\":\"str\",\"list\":false},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.95,\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\",\"type\":\"float\",\"list\":false},\"top_p\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.8,\"password\":false,\"name\":\"top_p\",\"display_name\":\"Top p\",\"advanced\":false,\"dynamic\":false,\"info\":\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\",\"type\":\"float\",\"list\":false}},\"description\":\"Baidu Qianfan chat models. Get more detail from https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint.\",\"base_classes\":[\"BaseLanguageModel\",\"BaseLLM\"],\"display_name\":\"QianfanChatEndpoint\",\"custom_fields\":{\"endpoint\":null,\"model\":null,\"penalty_score\":null,\"qianfan_ak\":null,\"qianfan_sk\":null,\"temperature\":null,\"top_p\":null},\"output_types\":[\"BaiduQianfanChatEndpoints\"],\"documentation\":\"\",\"beta\":true,\"error\":null},\"BaiduQianfanLLMEndpoints\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional\\nfrom langflow import CustomComponent\\nfrom langchain.llms.baidu_qianfan_endpoint import QianfanLLMEndpoint\\nfrom langchain.llms.base import BaseLLM\\n\\n\\nclass QianfanLLMEndpointComponent(CustomComponent):\\n display_name: str = \\\"QianfanLLMEndpoint\\\"\\n description: str = (\\n \\\"Baidu Qianfan hosted open source or customized models. \\\"\\n \\\"Get more detail from https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint\\\"\\n )\\n\\n def build_config(self):\\n return {\\n \\\"model\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"options\\\": [\\n \\\"ERNIE-Bot\\\",\\n \\\"ERNIE-Bot-turbo\\\",\\n \\\"BLOOMZ-7B\\\",\\n \\\"Llama-2-7b-chat\\\",\\n \\\"Llama-2-13b-chat\\\",\\n \\\"Llama-2-70b-chat\\\",\\n \\\"Qianfan-BLOOMZ-7B-compressed\\\",\\n \\\"Qianfan-Chinese-Llama-2-7B\\\",\\n \\\"ChatGLM2-6B-32K\\\",\\n \\\"AquilaChat-7B\\\",\\n ],\\n \\\"info\\\": \\\"https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint\\\",\\n \\\"required\\\": True,\\n },\\n \\\"qianfan_ak\\\": {\\n \\\"display_name\\\": \\\"Qianfan Ak\\\",\\n \\\"required\\\": True,\\n \\\"password\\\": True,\\n \\\"info\\\": \\\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\\\",\\n },\\n \\\"qianfan_sk\\\": {\\n \\\"display_name\\\": \\\"Qianfan Sk\\\",\\n \\\"required\\\": True,\\n \\\"password\\\": True,\\n \\\"info\\\": \\\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\\\",\\n },\\n \\\"top_p\\\": {\\n \\\"display_name\\\": \\\"Top p\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\\\",\\n \\\"value\\\": 0.8,\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\\\",\\n \\\"value\\\": 0.95,\\n },\\n \\\"penalty_score\\\": {\\n \\\"display_name\\\": \\\"Penalty Score\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\\\",\\n \\\"value\\\": 1.0,\\n },\\n \\\"endpoint\\\": {\\n \\\"display_name\\\": \\\"Endpoint\\\",\\n \\\"info\\\": \\\"Endpoint of the Qianfan LLM, required if custom model used.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model: str = \\\"ERNIE-Bot-turbo\\\",\\n qianfan_ak: Optional[str] = None,\\n qianfan_sk: Optional[str] = None,\\n top_p: Optional[float] = None,\\n temperature: Optional[float] = None,\\n penalty_score: Optional[float] = None,\\n endpoint: Optional[str] = None,\\n ) -> BaseLLM:\\n try:\\n output = QianfanLLMEndpoint( # type: ignore\\n model=model,\\n qianfan_ak=qianfan_ak,\\n qianfan_sk=qianfan_sk,\\n top_p=top_p,\\n temperature=temperature,\\n penalty_score=penalty_score,\\n endpoint=endpoint,\\n )\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to Baidu Qianfan API.\\\") from e\\n return output # type: ignore\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"endpoint\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"endpoint\",\"display_name\":\"Endpoint\",\"advanced\":false,\"dynamic\":false,\"info\":\"Endpoint of the Qianfan LLM, required if custom model used.\",\"type\":\"str\",\"list\":false},\"model\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"ERNIE-Bot-turbo\",\"password\":false,\"options\":[\"ERNIE-Bot\",\"ERNIE-Bot-turbo\",\"BLOOMZ-7B\",\"Llama-2-7b-chat\",\"Llama-2-13b-chat\",\"Llama-2-70b-chat\",\"Qianfan-BLOOMZ-7B-compressed\",\"Qianfan-Chinese-Llama-2-7B\",\"ChatGLM2-6B-32K\",\"AquilaChat-7B\"],\"name\":\"model\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint\",\"type\":\"str\",\"list\":true},\"penalty_score\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1.0,\"password\":false,\"name\":\"penalty_score\",\"display_name\":\"Penalty Score\",\"advanced\":false,\"dynamic\":false,\"info\":\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\",\"type\":\"float\",\"list\":false},\"qianfan_ak\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"qianfan_ak\",\"display_name\":\"Qianfan Ak\",\"advanced\":false,\"dynamic\":false,\"info\":\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\",\"type\":\"str\",\"list\":false},\"qianfan_sk\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"qianfan_sk\",\"display_name\":\"Qianfan Sk\",\"advanced\":false,\"dynamic\":false,\"info\":\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\",\"type\":\"str\",\"list\":false},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.95,\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\",\"type\":\"float\",\"list\":false},\"top_p\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.8,\"password\":false,\"name\":\"top_p\",\"display_name\":\"Top p\",\"advanced\":false,\"dynamic\":false,\"info\":\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\",\"type\":\"float\",\"list\":false}},\"description\":\"Baidu Qianfan hosted open source or customized models. Get more detail from https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint\",\"base_classes\":[\"BaseLanguageModel\",\"BaseLLM\"],\"display_name\":\"QianfanLLMEndpoint\",\"custom_fields\":{\"endpoint\":null,\"model\":null,\"penalty_score\":null,\"qianfan_ak\":null,\"qianfan_sk\":null,\"temperature\":null,\"top_p\":null},\"output_types\":[\"BaiduQianfanLLMEndpoints\"],\"documentation\":\"\",\"beta\":true,\"error\":null}},\"memories\":{\"ConversationBufferMemory\":{\"template\":{\"chat_memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMessageHistory\",\"list\":false},\"ai_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"AI\",\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"human_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"Human\",\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"input_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"type\":\"str\",\"list\":false},\"memory_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"output_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\"type\":\"str\",\"list\":false},\"return_messages\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ConversationBufferMemory\"},\"description\":\"Buffer for storing conversation memory.\",\"base_classes\":[\"BaseMemory\",\"BaseChatMemory\",\"ConversationBufferMemory\"],\"display_name\":\"ConversationBufferMemory\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/buffer\",\"beta\":false,\"error\":null},\"ConversationBufferWindowMemory\":{\"template\":{\"chat_memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMessageHistory\",\"list\":false},\"ai_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"AI\",\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"human_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"Human\",\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"input_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"type\":\"str\",\"list\":false},\"k\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"k\",\"display_name\":\"Memory Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"memory_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"output_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\"type\":\"str\",\"list\":false},\"return_messages\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ConversationBufferWindowMemory\"},\"description\":\"Buffer for storing conversation memory inside a limited size window.\",\"base_classes\":[\"BaseMemory\",\"ConversationBufferWindowMemory\",\"BaseChatMemory\"],\"display_name\":\"ConversationBufferWindowMemory\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/buffer_window\",\"beta\":false,\"error\":null},\"ConversationEntityMemory\":{\"template\":{\"chat_memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMessageHistory\",\"list\":false},\"entity_extraction_prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"entity_extraction_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"entity_store\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"entity_store\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseEntityStore\",\"list\":false},\"entity_summarization_prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"entity_summarization_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"ai_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"AI\",\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"chat_history_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"history\",\"password\":false,\"name\":\"chat_history_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"entity_cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"entity_cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"human_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"Human\",\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"input_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"type\":\"str\",\"list\":false},\"k\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"k\",\"display_name\":\"Memory Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"output_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\"type\":\"str\",\"list\":false},\"return_messages\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ConversationEntityMemory\"},\"description\":\"Entity extractor & summarizer memory.\",\"base_classes\":[\"BaseMemory\",\"ConversationEntityMemory\",\"BaseChatMemory\"],\"display_name\":\"ConversationEntityMemory\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/memory/integrations/entity_memory_with_sqlite\",\"beta\":false,\"error\":null},\"ConversationKGMemory\":{\"template\":{\"chat_memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMessageHistory\",\"list\":false},\"entity_extraction_prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"entity_extraction_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"kg\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"kg\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"NetworkxEntityGraph\",\"list\":false},\"knowledge_extraction_prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"knowledge_extraction_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"summary_message_cls\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"summary_message_cls\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[langchain.schema.messages.BaseMessage]\",\"list\":false},\"ai_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"AI\",\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"human_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"Human\",\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"input_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"type\":\"str\",\"list\":false},\"k\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"k\",\"display_name\":\"Memory Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"memory_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"output_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\"type\":\"str\",\"list\":false},\"return_messages\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ConversationKGMemory\"},\"description\":\"Knowledge graph conversation memory.\",\"base_classes\":[\"ConversationKGMemory\",\"BaseMemory\",\"BaseChatMemory\"],\"display_name\":\"ConversationKGMemory\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/kg\",\"beta\":false,\"error\":null},\"ConversationSummaryMemory\":{\"template\":{\"chat_memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMessageHistory\",\"list\":false},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"summary_message_cls\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"summary_message_cls\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[langchain.schema.messages.BaseMessage]\",\"list\":false},\"ai_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"AI\",\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"buffer\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"buffer\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"human_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"Human\",\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"input_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"type\":\"str\",\"list\":false},\"memory_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"output_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\"type\":\"str\",\"list\":false},\"return_messages\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ConversationSummaryMemory\"},\"description\":\"Conversation summarizer to chat memory.\",\"base_classes\":[\"SummarizerMixin\",\"ConversationSummaryMemory\",\"BaseChatMemory\",\"BaseMemory\"],\"display_name\":\"ConversationSummaryMemory\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/summary\",\"beta\":false,\"error\":null},\"MongoDBChatMessageHistory\":{\"template\":{\"collection_name\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"message_store\",\"password\":false,\"name\":\"collection_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"connection_string\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"connection_string\",\"advanced\":false,\"dynamic\":false,\"info\":\"MongoDB connection string (e.g mongodb://mongo_user:password123@mongo:27017)\",\"type\":\"str\",\"list\":false},\"database_name\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"password\":false,\"name\":\"database_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"session_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"session_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"MongoDBChatMessageHistory\"},\"description\":\"Memory store with MongoDB\",\"base_classes\":[\"MongoDBChatMessageHistory\",\"BaseChatMessageHistory\"],\"display_name\":\"MongoDBChatMessageHistory\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/memory/integrations/mongodb_chat_message_history\",\"beta\":false,\"error\":null},\"MotorheadMemory\":{\"template\":{\"chat_memory\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMessageHistory\",\"list\":false},\"api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"client_id\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"client_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"context\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"context\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"input_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"type\":\"str\",\"list\":false},\"memory_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"output_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\"type\":\"str\",\"list\":false},\"return_messages\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"session_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"session_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"https://api.getmetal.io/v1/motorhead\",\"password\":false,\"name\":\"url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"MotorheadMemory\"},\"description\":\"Chat message memory backed by Motorhead service.\",\"base_classes\":[\"BaseMemory\",\"BaseChatMemory\",\"MotorheadMemory\"],\"display_name\":\"MotorheadMemory\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/integrations/memory/motorhead_memory\",\"beta\":false,\"error\":null},\"PostgresChatMessageHistory\":{\"template\":{\"connection_string\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"postgresql://postgres:mypassword@localhost/chat_history\",\"password\":false,\"name\":\"connection_string\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"session_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"session_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"table_name\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"message_store\",\"password\":false,\"name\":\"table_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"PostgresChatMessageHistory\"},\"description\":\"Memory store with Postgres\",\"base_classes\":[\"PostgresChatMessageHistory\",\"BaseChatMessageHistory\"],\"display_name\":\"PostgresChatMessageHistory\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/memory/integrations/postgres_chat_message_history\",\"beta\":false,\"error\":null},\"VectorStoreRetrieverMemory\":{\"template\":{\"retriever\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"VectorStoreRetriever\",\"list\":false},\"exclude_input_keys\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"exclude_input_keys\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"input_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"type\":\"str\",\"list\":false},\"memory_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"return_docs\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"return_docs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"return_messages\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"VectorStoreRetrieverMemory\"},\"description\":\"VectorStoreRetriever-backed memory.\",\"base_classes\":[\"BaseMemory\",\"VectorStoreRetrieverMemory\"],\"display_name\":\"VectorStoreRetrieverMemory\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/vectorstore_retriever_memory\",\"beta\":false,\"error\":null}},\"tools\":{\"Calculator\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"_type\":\"Calculator\"},\"description\":\"Useful for when you need to answer questions about math.\",\"base_classes\":[\"Tool\",\"BaseTool\"],\"display_name\":\"Calculator\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"News API\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"news_api_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"news_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"News API\"},\"description\":\"Use this when you want to get information about the top headlines of current news stories. The input should be a question in natural language that this API can answer.\",\"base_classes\":[\"Tool\",\"BaseTool\"],\"display_name\":\"News API\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"TMDB API\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"tmdb_bearer_token\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"tmdb_bearer_token\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"TMDB API\"},\"description\":\"Useful for when you want to get information from The Movie Database. The input should be a question in natural language that this API can answer.\",\"base_classes\":[\"Tool\",\"BaseTool\"],\"display_name\":\"TMDB API\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"Podcast API\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"listen_api_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"listen_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"Podcast API\"},\"description\":\"Use the Listen Notes Podcast API to search all podcasts or episodes. The input should be a question in natural language that this API can answer.\",\"base_classes\":[\"Tool\",\"BaseTool\"],\"display_name\":\"Podcast API\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"Search\":{\"template\":{\"aiosession\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"aiosession\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"serpapi_api_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"serpapi_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"Search\"},\"description\":\"A search engine. Useful for when you need to answer questions about current events. Input should be a search query.\",\"base_classes\":[\"Tool\",\"BaseTool\"],\"display_name\":\"Search\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"Tool\":{\"template\":{\"func\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"func\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"function\",\"list\":false},\"description\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"description\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"name\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"return_direct\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"return_direct\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"Tool\"},\"description\":\"Converts a chain, agent or function into a tool.\",\"base_classes\":[\"Tool\",\"BaseTool\"],\"display_name\":\"Tool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"PythonFunctionTool\":{\"template\":{\"code\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\\ndef python_function(text: str) -> str:\\n \\\"\\\"\\\"This is a default python function that returns the input text\\\"\\\"\\\"\\n return text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"description\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"description\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"name\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"return_direct\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"return_direct\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"PythonFunctionTool\"},\"description\":\"Python function to be executed.\",\"base_classes\":[\"BaseTool\",\"Tool\"],\"display_name\":\"PythonFunctionTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"PythonFunction\":{\"template\":{\"code\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\\ndef python_function(text: str) -> str:\\n \\\"\\\"\\\"This is a default python function that returns the input text\\\"\\\"\\\"\\n return text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"PythonFunction\"},\"description\":\"Python function to be executed.\",\"base_classes\":[\"function\"],\"display_name\":\"PythonFunction\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"JsonSpec\":{\"template\":{\"path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".json\",\".yaml\",\".yml\"],\"password\":false,\"name\":\"path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"json\",\"yaml\",\"yml\"],\"file_path\":null},\"max_value_length\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"max_value_length\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"_type\":\"JsonSpec\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"JsonSpec\"],\"display_name\":\"JsonSpec\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"BingSearchRun\":{\"template\":{\"api_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BingSearchAPIWrapper\",\"list\":false},\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"BingSearchRun\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BingSearchRun\",\"BaseTool\"],\"display_name\":\"BingSearchRun\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"GoogleSearchResults\":{\"template\":{\"api_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"GoogleSearchAPIWrapper\",\"list\":false},\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"num_results\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":4,\"password\":false,\"name\":\"num_results\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"GoogleSearchResults\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"GoogleSearchResults\",\"BaseTool\"],\"display_name\":\"GoogleSearchResults\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"GoogleSearchRun\":{\"template\":{\"api_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"GoogleSearchAPIWrapper\",\"list\":false},\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"GoogleSearchRun\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"GoogleSearchRun\",\"BaseTool\"],\"display_name\":\"GoogleSearchRun\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"GoogleSerperRun\":{\"template\":{\"api_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"GoogleSerperAPIWrapper\",\"list\":false},\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"GoogleSerperRun\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"GoogleSerperRun\",\"BaseTool\"],\"display_name\":\"GoogleSerperRun\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"InfoSQLDatabaseTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"db\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"db\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"SQLDatabase\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"InfoSQLDatabaseTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseSQLDatabaseTool\",\"InfoSQLDatabaseTool\",\"BaseTool\"],\"display_name\":\"InfoSQLDatabaseTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"JsonGetValueTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"spec\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"spec\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"JsonSpec\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"JsonGetValueTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"JsonGetValueTool\",\"BaseTool\"],\"display_name\":\"JsonGetValueTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"JsonListKeysTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"spec\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"spec\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"JsonSpec\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"JsonListKeysTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"JsonListKeysTool\",\"BaseTool\"],\"display_name\":\"JsonListKeysTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"ListSQLDatabaseTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"db\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"db\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"SQLDatabase\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"ListSQLDatabaseTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseSQLDatabaseTool\",\"BaseTool\",\"ListSQLDatabaseTool\"],\"display_name\":\"ListSQLDatabaseTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"PythonAstREPLTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"globals\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"globals\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"locals\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"locals\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"sanitize_input\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"sanitize_input\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"PythonAstREPLTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"PythonAstREPLTool\",\"BaseTool\"],\"display_name\":\"PythonAstREPLTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"PythonREPLTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"python_repl\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"python_repl\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"PythonREPL\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"sanitize_input\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"sanitize_input\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"PythonREPLTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"PythonREPLTool\",\"BaseTool\"],\"display_name\":\"PythonREPLTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"QuerySQLDataBaseTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"db\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"db\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"SQLDatabase\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"QuerySQLDataBaseTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseSQLDatabaseTool\",\"QuerySQLDataBaseTool\",\"BaseTool\"],\"display_name\":\"QuerySQLDataBaseTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"RequestsDeleteTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"requests_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"TextRequestsWrapper\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"RequestsDeleteTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"RequestsDeleteTool\",\"BaseRequestsTool\",\"BaseTool\"],\"display_name\":\"RequestsDeleteTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"RequestsGetTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"requests_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"TextRequestsWrapper\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"RequestsGetTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"RequestsGetTool\",\"BaseRequestsTool\",\"BaseTool\"],\"display_name\":\"RequestsGetTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"RequestsPatchTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"requests_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"TextRequestsWrapper\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"RequestsPatchTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseRequestsTool\",\"BaseTool\",\"RequestsPatchTool\"],\"display_name\":\"RequestsPatchTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"RequestsPostTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"requests_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"TextRequestsWrapper\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"RequestsPostTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"RequestsPostTool\",\"BaseRequestsTool\",\"BaseTool\"],\"display_name\":\"RequestsPostTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"RequestsPutTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"requests_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"TextRequestsWrapper\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"RequestsPutTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseRequestsTool\",\"BaseTool\",\"RequestsPutTool\"],\"display_name\":\"RequestsPutTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"WikipediaQueryRun\":{\"template\":{\"api_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"WikipediaAPIWrapper\",\"list\":false},\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"WikipediaQueryRun\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"WikipediaQueryRun\",\"BaseTool\"],\"display_name\":\"WikipediaQueryRun\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"WolframAlphaQueryRun\":{\"template\":{\"api_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"WolframAlphaAPIWrapper\",\"list\":false},\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"WolframAlphaQueryRun\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"WolframAlphaQueryRun\",\"BaseTool\"],\"display_name\":\"WolframAlphaQueryRun\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null}},\"toolkits\":{\"JsonToolkit\":{\"template\":{\"spec\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"spec\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"JsonSpec\",\"list\":false},\"_type\":\"JsonToolkit\"},\"description\":\"Toolkit for interacting with a JSON spec.\",\"base_classes\":[\"BaseToolkit\",\"JsonToolkit\",\"Tool\"],\"display_name\":\"JsonToolkit\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"OpenAPIToolkit\":{\"template\":{\"json_agent\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"json_agent\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"AgentExecutor\",\"list\":false},\"requests_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"TextRequestsWrapper\",\"list\":false},\"_type\":\"OpenAPIToolkit\"},\"description\":\"Toolkit for interacting with an OpenAPI API.\",\"base_classes\":[\"BaseToolkit\",\"OpenAPIToolkit\",\"Tool\"],\"display_name\":\"OpenAPIToolkit\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"VectorStoreInfo\":{\"template\":{\"vectorstore\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"vectorstore\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"VectorStore\",\"list\":false},\"description\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"description\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"name\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"VectorStoreInfo\"},\"description\":\"Information about a VectorStore.\",\"base_classes\":[\"VectorStoreInfo\"],\"display_name\":\"VectorStoreInfo\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"VectorStoreRouterToolkit\":{\"template\":{\"llm\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"vectorstores\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"vectorstores\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"VectorStoreInfo\",\"list\":true},\"_type\":\"VectorStoreRouterToolkit\"},\"description\":\"Toolkit for routing between Vector Stores.\",\"base_classes\":[\"BaseToolkit\",\"VectorStoreRouterToolkit\",\"Tool\"],\"display_name\":\"VectorStoreRouterToolkit\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"VectorStoreToolkit\":{\"template\":{\"llm\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"vectorstore_info\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"vectorstore_info\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"VectorStoreInfo\",\"list\":false},\"_type\":\"VectorStoreToolkit\"},\"description\":\"Toolkit for interacting with a Vector Store.\",\"base_classes\":[\"BaseToolkit\",\"VectorStoreToolkit\",\"Tool\"],\"display_name\":\"VectorStoreToolkit\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"Metaphor\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from typing import List, Union\\nfrom langflow import CustomComponent\\n\\nfrom metaphor_python import Metaphor # type: ignore\\nfrom langchain.tools import Tool\\nfrom langchain.agents import tool\\nfrom langchain.agents.agent_toolkits.base import BaseToolkit\\n\\n\\nclass MetaphorToolkit(CustomComponent):\\n display_name: str = \\\"Metaphor\\\"\\n description: str = \\\"Metaphor Toolkit\\\"\\n documentation = (\\n \\\"https://python.langchain.com/docs/integrations/tools/metaphor_search\\\"\\n )\\n beta = True\\n # api key should be password = True\\n field_config = {\\n \\\"metaphor_api_key\\\": {\\\"display_name\\\": \\\"Metaphor API Key\\\", \\\"password\\\": True},\\n \\\"code\\\": {\\\"advanced\\\": True},\\n }\\n\\n def build(\\n self,\\n metaphor_api_key: str,\\n use_autoprompt: bool = True,\\n search_num_results: int = 5,\\n similar_num_results: int = 5,\\n ) -> Union[Tool, BaseToolkit]:\\n # If documents, then we need to create a Vectara instance using .from_documents\\n client = Metaphor(api_key=metaphor_api_key)\\n\\n @tool\\n def search(query: str):\\n \\\"\\\"\\\"Call search engine with a query.\\\"\\\"\\\"\\n return client.search(\\n query, use_autoprompt=use_autoprompt, num_results=search_num_results\\n )\\n\\n @tool\\n def get_contents(ids: List[str]):\\n \\\"\\\"\\\"Get contents of a webpage.\\n\\n The ids passed in should be a list of ids as fetched from `search`.\\n \\\"\\\"\\\"\\n return client.get_contents(ids)\\n\\n @tool\\n def find_similar(url: str):\\n \\\"\\\"\\\"Get search results similar to a given URL.\\n\\n The url passed in should be a URL returned from `search`\\n \\\"\\\"\\\"\\n return client.find_similar(url, num_results=similar_num_results)\\n\\n return [search, get_contents, find_similar] # type: ignore\\n\",\"password\":false,\"name\":\"code\",\"advanced\":true,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"metaphor_api_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"metaphor_api_key\",\"display_name\":\"Metaphor API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"search_num_results\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":5,\"password\":false,\"name\":\"search_num_results\",\"display_name\":\"search_num_results\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"similar_num_results\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":5,\"password\":false,\"name\":\"similar_num_results\",\"display_name\":\"similar_num_results\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"use_autoprompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"use_autoprompt\",\"display_name\":\"use_autoprompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false}},\"description\":\"Metaphor Toolkit\",\"base_classes\":[\"Tool\",\"BaseTool\"],\"display_name\":\"Metaphor\",\"custom_fields\":{\"metaphor_api_key\":null,\"search_num_results\":null,\"similar_num_results\":null,\"use_autoprompt\":null},\"output_types\":[\"Metaphor\"],\"documentation\":\"https://python.langchain.com/docs/integrations/tools/metaphor_search\",\"beta\":true,\"error\":null}},\"wrappers\":{\"TextRequestsWrapper\":{\"template\":{\"aiosession\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"aiosession\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"ClientSession\",\"list\":false},\"auth\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"auth\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"headers\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"{\\\"Authorization\\\": \\\"Bearer \\\"}\",\"password\":false,\"name\":\"headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"TextRequestsWrapper\"},\"description\":\"Lightweight wrapper around requests library.\",\"base_classes\":[\"TextRequestsWrapper\"],\"display_name\":\"TextRequestsWrapper\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"SQLDatabase\":{\"template\":{\"database_uri\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"database_uri\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"engine_args\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"engine_args\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"SQLDatabase\"},\"description\":\"Construct a SQLAlchemy engine from URI.\",\"base_classes\":[\"SQLDatabase\",\"function\"],\"display_name\":\"SQLDatabase\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null}},\"embeddings\":{\"OpenAIEmbeddings\":{\"template\":{\"allowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[],\"password\":false,\"name\":\"allowed_special\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Literal'all'\",\"list\":true},\"disallowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"all\",\"password\":false,\"name\":\"disallowed_special\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Literal'all'\",\"list\":true},\"chunk_size\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1000,\"password\":false,\"name\":\"chunk_size\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"deployment\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-embedding-ada-002\",\"password\":false,\"name\":\"deployment\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"embedding_ctx_length\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":8191,\"password\":false,\"name\":\"embedding_ctx_length\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"{\\\"Authorization\\\": \\\"Bearer \\\"}\",\"password\":false,\"name\":\"headers\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":6,\"password\":false,\"name\":\"max_retries\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"model\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-embedding-ada-002\",\"password\":false,\"name\":\"model\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_api_type\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_type\",\"display_name\":\"OpenAI API Type\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_api_version\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_version\",\"display_name\":\"OpenAI API Version\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"show_progress_bar\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"show_progress_bar\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"skip_empty\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"skip_empty\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"OpenAIEmbeddings\"},\"description\":\"OpenAI embedding models.\",\"base_classes\":[\"OpenAIEmbeddings\",\"Embeddings\"],\"display_name\":\"OpenAIEmbeddings\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/openai\",\"beta\":false,\"error\":null},\"CohereEmbeddings\":{\"template\":{\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cohere_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"cohere_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"embed-english-v2.0\",\"password\":false,\"name\":\"model\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"truncate\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"truncate\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"CohereEmbeddings\"},\"description\":\"Cohere embedding models.\",\"base_classes\":[\"CohereEmbeddings\",\"Embeddings\"],\"display_name\":\"CohereEmbeddings\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/cohere\",\"beta\":false,\"error\":null},\"HuggingFaceEmbeddings\":{\"template\":{\"cache_folder\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"cache_folder\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"encode_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"encode_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"sentence-transformers/all-mpnet-base-v2\",\"password\":false,\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"multi_process\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"multi_process\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"HuggingFaceEmbeddings\"},\"description\":\"HuggingFace sentence_transformers embedding models.\",\"base_classes\":[\"HuggingFaceEmbeddings\",\"Embeddings\"],\"display_name\":\"HuggingFaceEmbeddings\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/sentence_transformers\",\"beta\":false,\"error\":null},\"VertexAIEmbeddings\":{\"template\":{\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"ForwardRef(\\\"'_LanguageModel'\\\")\",\"list\":false},\"credentials\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".json\"],\"password\":false,\"name\":\"credentials\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"json\"],\"file_path\":null},\"location\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"us-central1\",\"password\":false,\"name\":\"location\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"max_output_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":128,\"password\":true,\"name\":\"max_output_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":6,\"password\":false,\"name\":\"max_retries\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"textembedding-gecko\",\"password\":false,\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"project\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"project\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_parallelism\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":5,\"password\":false,\"name\":\"request_parallelism\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"stop\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"stop\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.0,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"top_k\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":40,\"password\":false,\"name\":\"top_k\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"top_p\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.95,\"password\":false,\"name\":\"top_p\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"_type\":\"VertexAIEmbeddings\"},\"description\":\"Google Cloud VertexAI embedding models.\",\"base_classes\":[\"_VertexAICommon\",\"VertexAIEmbeddings\",\"Embeddings\",\"_VertexAIBase\"],\"display_name\":\"VertexAIEmbeddings\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/google_vertex_ai_palm\",\"beta\":false,\"error\":null}},\"vectorstores\":{\"Chroma\":{\"template\":{\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"chromadb.Client\",\"list\":false},\"client_settings\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client_settings\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"chromadb.config.Setting\",\"list\":true},\"documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":true},\"embedding\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Embeddings\",\"list\":false},\"chroma_server_cors_allow_origins\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_cors_allow_origins\",\"display_name\":\"Chroma Server CORS Allow Origins\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"chroma_server_grpc_port\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_grpc_port\",\"display_name\":\"Chroma Server GRPC Port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"chroma_server_host\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_host\",\"display_name\":\"Chroma Server Host\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"chroma_server_http_port\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_http_port\",\"display_name\":\"Chroma Server HTTP Port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"chroma_server_ssl_enabled\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"chroma_server_ssl_enabled\",\"display_name\":\"Chroma Server SSL Enabled\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"collection_metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"collection_metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"collection_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"langchain\",\"password\":false,\"name\":\"collection_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"ids\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"ids\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"metadatas\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":true},\"persist\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"persist\",\"display_name\":\"Persist\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"persist_directory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"persist_directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"search_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"NestedDict\",\"list\":false},\"_type\":\"Chroma\"},\"description\":\"Create a Chroma vectorstore from a raw documents.\",\"base_classes\":[\"VectorStore\",\"Chroma\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"Chroma\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/chroma\",\"beta\":false,\"error\":null},\"FAISS\":{\"template\":{\"documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":true},\"embedding\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Embeddings\",\"list\":false},\"folder_path\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"folder_path\",\"display_name\":\"Local Path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"ids\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"ids\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"index_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"index_name\",\"display_name\":\"Index Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadatas\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":true},\"search_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"NestedDict\",\"list\":false},\"_type\":\"FAISS\"},\"description\":\"Construct FAISS wrapper from raw documents.\",\"base_classes\":[\"FAISS\",\"VectorStore\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"FAISS\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/faiss\",\"beta\":false,\"error\":null},\"MongoDBAtlasVectorSearch\":{\"template\":{\"collection\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"collection\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Collection[MongoDBDocumentType]\",\"list\":false},\"documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":true},\"embedding\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Embeddings\",\"list\":false},\"collection_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"collection_name\",\"display_name\":\"Collection Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"db_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"db_name\",\"display_name\":\"Database Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"index_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"index_name\",\"display_name\":\"Index Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadatas\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":true},\"mongodb_atlas_cluster_uri\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"mongodb_atlas_cluster_uri\",\"display_name\":\"MongoDB Atlas Cluster URI\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"search_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"NestedDict\",\"list\":false},\"_type\":\"MongoDBAtlasVectorSearch\"},\"description\":\"Construct a `MongoDB Atlas Vector Search` vector store from raw documents.\",\"base_classes\":[\"MongoDBAtlasVectorSearch\",\"VectorStore\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"MongoDB Atlas\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/mongodb_atlas\",\"beta\":false,\"error\":null},\"Pinecone\":{\"template\":{\"documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":true},\"embedding\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1000,\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Embeddings\",\"list\":false},\"batch_size\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":32,\"password\":false,\"name\":\"batch_size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"ids\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"ids\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"index_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"index_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadatas\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":true},\"namespace\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"namespace\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"pinecone_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"pinecone_api_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"pinecone_env\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"pinecone_env\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"pool_threads\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":4,\"password\":false,\"name\":\"pool_threads\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"search_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"NestedDict\",\"list\":false},\"text_key\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"text\",\"password\":true,\"name\":\"text_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"upsert_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"upsert_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"Pinecone\"},\"description\":\"Construct Pinecone wrapper from raw documents.\",\"base_classes\":[\"Pinecone\",\"VectorStore\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"Pinecone\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/pinecone\",\"beta\":false,\"error\":null},\"Qdrant\":{\"template\":{\"documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":true},\"embedding\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Embeddings\",\"list\":false},\"hnsw_config\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"hnsw_config\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"common_types.HnswConfigDiff\",\"list\":false},\"init_from\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"init_from\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"common_types.InitFrom\",\"list\":false},\"optimizers_config\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"optimizers_config\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"common_types.OptimizersConfigDiff\",\"list\":false},\"quantization_config\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"quantization_config\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"common_types.QuantizationConfig\",\"list\":false},\"wal_config\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"wal_config\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"common_types.WalConfigDiff\",\"list\":false},\"api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"api_key\",\"display_name\":\"API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"batch_size\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":64,\"password\":false,\"name\":\"batch_size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"collection_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"collection_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"content_payload_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"page_content\",\"password\":false,\"name\":\"content_payload_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"distance_func\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"Cosine\",\"password\":false,\"name\":\"distance_func\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"force_recreate\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"force_recreate\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"grpc_port\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":6334,\"password\":false,\"name\":\"grpc_port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"host\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"host\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"https\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"https\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"ids\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"ids\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"location\":{\"required\":false,\"placeholder\":\":memory:\",\"show\":true,\"multiline\":false,\"value\":\":memory:\",\"password\":false,\"name\":\"location\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadata_payload_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"metadata\",\"password\":false,\"name\":\"metadata_payload_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadatas\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":true},\"on_disk\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"on_disk\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"on_disk_payload\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"on_disk_payload\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"path\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"path\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"port\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":6333,\"password\":false,\"name\":\"port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"prefer_grpc\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"prefer_grpc\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"prefix\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"replication_factor\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"replication_factor\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"search_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"NestedDict\",\"list\":false},\"shard_number\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"shard_number\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"timeout\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"vector_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"vector_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"write_consistency_factor\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"write_consistency_factor\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"_type\":\"Qdrant\"},\"description\":\"Construct Qdrant wrapper from a list of texts.\",\"base_classes\":[\"VectorStore\",\"Qdrant\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"Qdrant\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/qdrant\",\"beta\":false,\"error\":null},\"SupabaseVectorStore\":{\"template\":{\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"supabase.client.Client\",\"list\":false},\"documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":true},\"embedding\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Embeddings\",\"list\":false},\"ids\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"ids\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"metadatas\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":true},\"query_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"query_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"search_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"NestedDict\",\"list\":false},\"supabase_service_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"supabase_service_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"supabase_url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"supabase_url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"table_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"table_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"SupabaseVectorStore\"},\"description\":\"Return VectorStore initialized from texts and embeddings.\",\"base_classes\":[\"SupabaseVectorStore\",\"VectorStore\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"Supabase\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/supabase\",\"beta\":false,\"error\":null},\"Weaviate\":{\"template\":{\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"weaviate.Client\",\"list\":false},\"documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":true},\"embedding\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Embeddings\",\"list\":false},\"relevance_score_fn\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"relevance_score_fn\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Callable[[float], float]\",\"list\":false},\"batch_size\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"batch_size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"by_text\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"by_text\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"client_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"index_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"index_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadatas\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":true},\"search_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"NestedDict\",\"list\":false},\"text_key\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"text\",\"password\":true,\"name\":\"text_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"weaviate_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"weaviate_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"weaviate_url\":{\"required\":true,\"placeholder\":\"http://localhost:8080\",\"show\":true,\"multiline\":false,\"value\":\"http://localhost:8080\",\"password\":false,\"name\":\"weaviate_url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"Weaviate\"},\"description\":\"Construct Weaviate wrapper from raw documents.\",\"base_classes\":[\"Weaviate\",\"VectorStore\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"Weaviate\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/weaviate\",\"beta\":false,\"error\":null},\"Vectara\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional, Union\\nfrom langflow import CustomComponent\\n\\nfrom langchain.vectorstores import Vectara\\nfrom langchain.schema import Document\\nfrom langchain.vectorstores.base import VectorStore\\nfrom langchain.schema import BaseRetriever\\n\\n\\nclass VectaraComponent(CustomComponent):\\n display_name: str = \\\"Vectara\\\"\\n description: str = \\\"Implementation of Vector Store using Vectara\\\"\\n documentation = (\\n \\\"https://python.langchain.com/docs/integrations/vectorstores/vectara\\\"\\n )\\n beta = True\\n # api key should be password = True\\n field_config = {\\n \\\"vectara_customer_id\\\": {\\\"display_name\\\": \\\"Vectara Customer ID\\\"},\\n \\\"vectara_corpus_id\\\": {\\\"display_name\\\": \\\"Vectara Corpus ID\\\"},\\n \\\"vectara_api_key\\\": {\\\"display_name\\\": \\\"Vectara API Key\\\", \\\"password\\\": True},\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"documents\\\": {\\\"display_name\\\": \\\"Documents\\\"},\\n }\\n\\n def build(\\n self,\\n vectara_customer_id: str,\\n vectara_corpus_id: str,\\n vectara_api_key: str,\\n documents: Optional[Document] = None,\\n ) -> Union[VectorStore, BaseRetriever]:\\n # If documents, then we need to create a Vectara instance using .from_documents\\n if documents is not None:\\n return Vectara.from_documents(\\n documents=documents, # type: ignore\\n vectara_customer_id=vectara_customer_id,\\n vectara_corpus_id=vectara_corpus_id,\\n vectara_api_key=vectara_api_key,\\n source=\\\"langflow\\\",\\n )\\n\\n return Vectara(\\n vectara_customer_id=vectara_customer_id,\\n vectara_corpus_id=vectara_corpus_id,\\n vectara_api_key=vectara_api_key,\\n source=\\\"langflow\\\",\\n )\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":false},\"vectara_api_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"vectara_api_key\",\"display_name\":\"Vectara API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"vectara_corpus_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"vectara_corpus_id\",\"display_name\":\"Vectara Corpus ID\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"vectara_customer_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"vectara_customer_id\",\"display_name\":\"Vectara Customer ID\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"Implementation of Vector Store using Vectara\",\"base_classes\":[\"VectorStore\",\"BaseRetriever\"],\"display_name\":\"Vectara\",\"custom_fields\":{\"documents\":null,\"vectara_api_key\":null,\"vectara_corpus_id\":null,\"vectara_customer_id\":null},\"output_types\":[\"Vectara\"],\"documentation\":\"https://python.langchain.com/docs/integrations/vectorstores/vectara\",\"beta\":true,\"error\":null},\"Chroma (1)\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional, Union\\nfrom langflow import CustomComponent\\n\\nfrom langchain.vectorstores import Chroma\\nfrom langchain.schema import Document\\nfrom langchain.vectorstores.base import VectorStore\\nfrom langchain.schema import BaseRetriever\\nfrom langchain.embeddings.base import Embeddings\\nimport chromadb # type: ignore\\n\\n\\nclass ChromaComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing a Vector Store using Chroma.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"Chroma (Custom Component)\\\"\\n description: str = \\\"Implementation of Vector Store using Chroma\\\"\\n documentation = \\\"https://python.langchain.com/docs/integrations/vectorstores/chroma\\\"\\n beta = True\\n\\n def build_config(self):\\n \\\"\\\"\\\"\\n Builds the configuration for the component.\\n\\n Returns:\\n - dict: A dictionary containing the configuration options for the component.\\n \\\"\\\"\\\"\\n return {\\n \\\"collection_name\\\": {\\\"display_name\\\": \\\"Collection Name\\\", \\\"value\\\": \\\"langflow\\\"},\\n \\\"persist\\\": {\\\"display_name\\\": \\\"Persist\\\"},\\n \\\"persist_directory\\\": {\\\"display_name\\\": \\\"Persist Directory\\\"},\\n \\\"code\\\": {\\\"show\\\": False, \\\"display_name\\\": \\\"Code\\\"},\\n \\\"documents\\\": {\\\"display_name\\\": \\\"Documents\\\", \\\"is_list\\\": True},\\n \\\"embedding\\\": {\\\"display_name\\\": \\\"Embedding\\\"},\\n \\\"chroma_server_cors_allow_origins\\\": {\\n \\\"display_name\\\": \\\"Server CORS Allow Origins\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"chroma_server_host\\\": {\\\"display_name\\\": \\\"Server Host\\\", \\\"advanced\\\": True},\\n \\\"chroma_server_port\\\": {\\\"display_name\\\": \\\"Server Port\\\", \\\"advanced\\\": True},\\n \\\"chroma_server_grpc_port\\\": {\\n \\\"display_name\\\": \\\"Server gRPC Port\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"chroma_server_ssl_enabled\\\": {\\n \\\"display_name\\\": \\\"Server SSL Enabled\\\",\\n \\\"advanced\\\": True,\\n },\\n }\\n\\n def build(\\n self,\\n collection_name: str,\\n persist: bool,\\n chroma_server_ssl_enabled: bool,\\n persist_directory: Optional[str] = None,\\n embedding: Optional[Embeddings] = None,\\n documents: Optional[Document] = None,\\n chroma_server_cors_allow_origins: Optional[str] = None,\\n chroma_server_host: Optional[str] = None,\\n chroma_server_port: Optional[int] = None,\\n chroma_server_grpc_port: Optional[int] = None,\\n ) -> Union[VectorStore, BaseRetriever]:\\n \\\"\\\"\\\"\\n Builds the Vector Store or BaseRetriever object.\\n\\n Args:\\n - collection_name (str): The name of the collection.\\n - persist_directory (Optional[str]): The directory to persist the Vector Store to.\\n - chroma_server_ssl_enabled (bool): Whether to enable SSL for the Chroma server.\\n - persist (bool): Whether to persist the Vector Store or not.\\n - embedding (Optional[Embeddings]): The embeddings to use for the Vector Store.\\n - documents (Optional[Document]): The documents to use for the Vector Store.\\n - chroma_server_cors_allow_origins (Optional[str]): The CORS allow origins for the Chroma server.\\n - chroma_server_host (Optional[str]): The host for the Chroma server.\\n - chroma_server_port (Optional[int]): The port for the Chroma server.\\n - chroma_server_grpc_port (Optional[int]): The gRPC port for the Chroma server.\\n\\n Returns:\\n - Union[VectorStore, BaseRetriever]: The Vector Store or BaseRetriever object.\\n \\\"\\\"\\\"\\n\\n # Chroma settings\\n chroma_settings = None\\n\\n if chroma_server_host is not None:\\n chroma_settings = chromadb.config.Settings(\\n chroma_server_cors_allow_origins=chroma_server_cors_allow_origins\\n or None,\\n chroma_server_host=chroma_server_host,\\n chroma_server_port=chroma_server_port or None,\\n chroma_server_grpc_port=chroma_server_grpc_port or None,\\n chroma_server_ssl_enabled=chroma_server_ssl_enabled,\\n )\\n\\n # If documents, then we need to create a Chroma instance using .from_documents\\n if documents is not None and embedding is not None:\\n return Chroma.from_documents(\\n documents=documents, # type: ignore\\n persist_directory=persist_directory if persist else None,\\n collection_name=collection_name,\\n embedding=embedding,\\n client_settings=chroma_settings,\\n )\\n\\n return Chroma(\\n persist_directory=persist_directory, client_settings=chroma_settings\\n )\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"chroma_server_cors_allow_origins\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_cors_allow_origins\",\"display_name\":\"Server CORS Allow Origins\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"chroma_server_grpc_port\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_grpc_port\",\"display_name\":\"Server gRPC Port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"chroma_server_host\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_host\",\"display_name\":\"Server Host\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"chroma_server_port\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_port\",\"display_name\":\"Server Port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"chroma_server_ssl_enabled\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"chroma_server_ssl_enabled\",\"display_name\":\"Server SSL Enabled\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"collection_name\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"langflow\",\"password\":false,\"name\":\"collection_name\",\"display_name\":\"Collection Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":true},\"embedding\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Embeddings\",\"list\":false},\"persist\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"persist\",\"display_name\":\"Persist\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"persist_directory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"persist_directory\",\"display_name\":\"Persist Directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"Implementation of Vector Store using Chroma\",\"base_classes\":[\"VectorStore\",\"BaseRetriever\"],\"display_name\":\"Chroma (Custom Component)\",\"custom_fields\":{\"chroma_server_cors_allow_origins\":null,\"chroma_server_grpc_port\":null,\"chroma_server_host\":null,\"chroma_server_port\":null,\"chroma_server_ssl_enabled\":null,\"collection_name\":null,\"documents\":null,\"embedding\":null,\"persist\":null,\"persist_directory\":null},\"output_types\":[\"Chroma\"],\"documentation\":\"https://python.langchain.com/docs/integrations/vectorstores/chroma\",\"beta\":true,\"error\":null}},\"documentloaders\":{\"AZLyricsLoader\":{\"template\":{\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"web_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"AZLyricsLoader\"},\"description\":\"Load `AZLyrics` webpages.\",\"base_classes\":[\"Document\"],\"display_name\":\"AZLyricsLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/azlyrics\",\"beta\":false,\"error\":null},\"AirbyteJSONLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".json\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"json\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"AirbyteJSONLoader\"},\"description\":\"Load local `Airbyte` json files.\",\"base_classes\":[\"Document\"],\"display_name\":\"AirbyteJSONLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json\",\"beta\":false,\"error\":null},\"BSHTMLLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".html\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"html\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"BSHTMLLoader\"},\"description\":\"Load `HTML` files and parse them with `beautiful soup`.\",\"base_classes\":[\"Document\"],\"display_name\":\"BSHTMLLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/html\",\"beta\":false,\"error\":null},\"CSVLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".csv\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"csv\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"CSVLoader\"},\"description\":\"Load a `CSV` file into a list of Documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"CSVLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv\",\"beta\":false,\"error\":null},\"CoNLLULoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".csv\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"csv\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"CoNLLULoader\"},\"description\":\"Load `CoNLL-U` files.\",\"base_classes\":[\"Document\"],\"display_name\":\"CoNLLULoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/conll-u\",\"beta\":false,\"error\":null},\"CollegeConfidentialLoader\":{\"template\":{\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"web_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"CollegeConfidentialLoader\"},\"description\":\"Load `College Confidential` webpages.\",\"base_classes\":[\"Document\"],\"display_name\":\"CollegeConfidentialLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/college_confidential\",\"beta\":false,\"error\":null},\"DirectoryLoader\":{\"template\":{\"glob\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"**/*.txt\",\"password\":false,\"name\":\"glob\",\"display_name\":\"glob\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"load_hidden\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"False\",\"password\":false,\"name\":\"load_hidden\",\"display_name\":\"Load hidden files\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"max_concurrency\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"max_concurrency\",\"display_name\":\"Max concurrency\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"path\",\"display_name\":\"Local directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"recursive\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"True\",\"password\":false,\"name\":\"recursive\",\"display_name\":\"Recursive\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"silent_errors\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"False\",\"password\":false,\"name\":\"silent_errors\",\"display_name\":\"Silent errors\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"use_multithreading\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"True\",\"password\":false,\"name\":\"use_multithreading\",\"display_name\":\"Use multithreading\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"DirectoryLoader\"},\"description\":\"Load from a directory.\",\"base_classes\":[\"Document\"],\"display_name\":\"DirectoryLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/file_directory\",\"beta\":false,\"error\":null},\"EverNoteLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".xml\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"xml\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"EverNoteLoader\"},\"description\":\"Load from `EverNote`.\",\"base_classes\":[\"Document\"],\"display_name\":\"EverNoteLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/evernote\",\"beta\":false,\"error\":null},\"FacebookChatLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".json\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"json\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"FacebookChatLoader\"},\"description\":\"Load `Facebook Chat` messages directory dump.\",\"base_classes\":[\"Document\"],\"display_name\":\"FacebookChatLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/facebook_chat\",\"beta\":false,\"error\":null},\"GitLoader\":{\"template\":{\"branch\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"branch\",\"display_name\":\"Branch\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"clone_url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"clone_url\",\"display_name\":\"Clone URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"file_filter\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_filter\",\"display_name\":\"File extensions (comma-separated)\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"repo_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"repo_path\",\"display_name\":\"Path to repository\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"GitLoader\"},\"description\":\"Load `Git` repository files.\",\"base_classes\":[\"Document\"],\"display_name\":\"GitLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/git\",\"beta\":false,\"error\":null},\"GitbookLoader\":{\"template\":{\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"web_page\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"web_page\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"GitbookLoader\"},\"description\":\"Load `GitBook` data.\",\"base_classes\":[\"Document\"],\"display_name\":\"GitbookLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/gitbook\",\"beta\":false,\"error\":null},\"GutenbergLoader\":{\"template\":{\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"web_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"GutenbergLoader\"},\"description\":\"Load from `Gutenberg.org`.\",\"base_classes\":[\"Document\"],\"display_name\":\"GutenbergLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/gutenberg\",\"beta\":false,\"error\":null},\"HNLoader\":{\"template\":{\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"web_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"HNLoader\"},\"description\":\"Load `Hacker News` data.\",\"base_classes\":[\"Document\"],\"display_name\":\"HNLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/hacker_news\",\"beta\":false,\"error\":null},\"IFixitLoader\":{\"template\":{\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"web_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"IFixitLoader\"},\"description\":\"Load `iFixit` repair guides, device wikis and answers.\",\"base_classes\":[\"Document\"],\"display_name\":\"IFixitLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/ifixit\",\"beta\":false,\"error\":null},\"IMSDbLoader\":{\"template\":{\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"web_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"IMSDbLoader\"},\"description\":\"Load `IMSDb` webpages.\",\"base_classes\":[\"Document\"],\"display_name\":\"IMSDbLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/imsdb\",\"beta\":false,\"error\":null},\"NotionDirectoryLoader\":{\"template\":{\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"path\",\"display_name\":\"Local directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"NotionDirectoryLoader\"},\"description\":\"Load `Notion directory` dump.\",\"base_classes\":[\"Document\"],\"display_name\":\"NotionDirectoryLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/notion\",\"beta\":false,\"error\":null},\"PyPDFLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".pdf\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"pdf\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"PyPDFLoader\"},\"description\":\"Load PDF using pypdf into list of documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"PyPDFLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/pdf\",\"beta\":false,\"error\":null},\"PyPDFDirectoryLoader\":{\"template\":{\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"path\",\"display_name\":\"Local directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"PyPDFDirectoryLoader\"},\"description\":\"Load a directory with `PDF` files using `pypdf` and chunks at character level.\",\"base_classes\":[\"Document\"],\"display_name\":\"PyPDFDirectoryLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/pdf\",\"beta\":false,\"error\":null},\"ReadTheDocsLoader\":{\"template\":{\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"path\",\"display_name\":\"Local directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"ReadTheDocsLoader\"},\"description\":\"Load `ReadTheDocs` documentation directory.\",\"base_classes\":[\"Document\"],\"display_name\":\"ReadTheDocsLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/readthedocs_documentation\",\"beta\":false,\"error\":null},\"SRTLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".srt\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"srt\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"SRTLoader\"},\"description\":\"Load `.srt` (subtitle) files.\",\"base_classes\":[\"Document\"],\"display_name\":\"SRTLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/subtitle\",\"beta\":false,\"error\":null},\"SlackDirectoryLoader\":{\"template\":{\"zip_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".zip\"],\"password\":false,\"name\":\"zip_path\",\"display_name\":\"Path to zip file\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"zip\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"workspace_url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"workspace_url\",\"display_name\":\"Workspace URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"SlackDirectoryLoader\"},\"description\":\"Load from a `Slack` directory dump.\",\"base_classes\":[\"Document\"],\"display_name\":\"SlackDirectoryLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/slack\",\"beta\":false,\"error\":null},\"TextLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".txt\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"txt\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"TextLoader\"},\"description\":\"Load text file.\",\"base_classes\":[\"Document\"],\"display_name\":\"TextLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/\",\"beta\":false,\"error\":null},\"UnstructuredEmailLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".eml\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"eml\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"UnstructuredEmailLoader\"},\"description\":\"Load email files using `Unstructured`.\",\"base_classes\":[\"Document\"],\"display_name\":\"UnstructuredEmailLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/email\",\"beta\":false,\"error\":null},\"UnstructuredHTMLLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".html\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"html\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"UnstructuredHTMLLoader\"},\"description\":\"Load `HTML` files using `Unstructured`.\",\"base_classes\":[\"Document\"],\"display_name\":\"UnstructuredHTMLLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/html\",\"beta\":false,\"error\":null},\"UnstructuredMarkdownLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".md\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"md\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"UnstructuredMarkdownLoader\"},\"description\":\"Load `Markdown` files using `Unstructured`.\",\"base_classes\":[\"Document\"],\"display_name\":\"UnstructuredMarkdownLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/markdown\",\"beta\":false,\"error\":null},\"UnstructuredPowerPointLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".pptx\",\".ppt\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"pptx\",\"ppt\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"UnstructuredPowerPointLoader\"},\"description\":\"Load `Microsoft PowerPoint` files using `Unstructured`.\",\"base_classes\":[\"Document\"],\"display_name\":\"UnstructuredPowerPointLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/microsoft_powerpoint\",\"beta\":false,\"error\":null},\"UnstructuredWordDocumentLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".docx\",\".doc\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"docx\",\"doc\"],\"file_path\":null},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"_type\":\"UnstructuredWordDocumentLoader\"},\"description\":\"Load `Microsoft Word` file using `Unstructured`.\",\"base_classes\":[\"Document\"],\"display_name\":\"UnstructuredWordDocumentLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/microsoft_word\",\"beta\":false,\"error\":null},\"WebBaseLoader\":{\"template\":{\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{},\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"web_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"WebBaseLoader\"},\"description\":\"Load HTML pages using `urllib` and parse them with `BeautifulSoup'.\",\"base_classes\":[\"Document\"],\"display_name\":\"WebBaseLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/web_base\",\"beta\":false,\"error\":null}},\"textsplitters\":{\"CharacterTextSplitter\":{\"template\":{\"documents\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":true},\"chunk_overlap\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":200,\"password\":false,\"name\":\"chunk_overlap\",\"display_name\":\"Chunk Overlap\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"chunk_size\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1000,\"password\":false,\"name\":\"chunk_size\",\"display_name\":\"Chunk Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"separator\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\\\\n\",\"password\":false,\"name\":\"separator\",\"display_name\":\"Separator\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"CharacterTextSplitter\"},\"description\":\"Splitting text that looks at characters.\",\"base_classes\":[\"Document\"],\"display_name\":\"CharacterTextSplitter\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/character_text_splitter\",\"beta\":false,\"error\":null},\"LanguageRecursiveTextSplitter\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional\\nfrom langflow import CustomComponent\\nfrom langchain.text_splitter import Language\\nfrom langchain.schema import Document\\n\\n\\nclass LanguageRecursiveTextSplitterComponent(CustomComponent):\\n display_name: str = \\\"Language Recursive Text Splitter\\\"\\n description: str = \\\"Split text into chunks of a specified length based on language.\\\"\\n documentation: str = \\\"https://docs.langflow.org/components/text-splitters#languagerecursivetextsplitter\\\"\\n\\n def build_config(self):\\n options = [x.value for x in Language]\\n return {\\n \\\"documents\\\": {\\n \\\"display_name\\\": \\\"Documents\\\",\\n \\\"info\\\": \\\"The documents to split.\\\",\\n },\\n \\\"separator_type\\\": {\\n \\\"display_name\\\": \\\"Separator Type\\\",\\n \\\"info\\\": \\\"The type of separator to use.\\\",\\n \\\"field_type\\\": \\\"str\\\",\\n \\\"options\\\": options,\\n \\\"value\\\": \\\"Python\\\",\\n },\\n \\\"separators\\\": {\\n \\\"display_name\\\": \\\"Separators\\\",\\n \\\"info\\\": \\\"The characters to split on.\\\",\\n \\\"is_list\\\": True,\\n },\\n \\\"chunk_size\\\": {\\n \\\"display_name\\\": \\\"Chunk Size\\\",\\n \\\"info\\\": \\\"The maximum length of each chunk.\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 1000,\\n },\\n \\\"chunk_overlap\\\": {\\n \\\"display_name\\\": \\\"Chunk Overlap\\\",\\n \\\"info\\\": \\\"The amount of overlap between chunks.\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 200,\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n documents: list[Document],\\n chunk_size: Optional[int] = 1000,\\n chunk_overlap: Optional[int] = 200,\\n separator_type: Optional[str] = \\\"Python\\\",\\n ) -> list[Document]:\\n \\\"\\\"\\\"\\n Split text into chunks of a specified length.\\n\\n Args:\\n separators (list[str]): The characters to split on.\\n chunk_size (int): The maximum length of each chunk.\\n chunk_overlap (int): The amount of overlap between chunks.\\n length_function (function): The function to use to calculate the length of the text.\\n\\n Returns:\\n list[str]: The chunks of text.\\n \\\"\\\"\\\"\\n from langchain.text_splitter import RecursiveCharacterTextSplitter\\n\\n # Make sure chunk_size and chunk_overlap are ints\\n if isinstance(chunk_size, str):\\n chunk_size = int(chunk_size)\\n if isinstance(chunk_overlap, str):\\n chunk_overlap = int(chunk_overlap)\\n\\n splitter = RecursiveCharacterTextSplitter.from_language(\\n language=Language(separator_type),\\n chunk_size=chunk_size,\\n chunk_overlap=chunk_overlap,\\n )\\n\\n docs = splitter.split_documents(documents)\\n return docs\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"chunk_overlap\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":200,\"password\":false,\"name\":\"chunk_overlap\",\"display_name\":\"Chunk Overlap\",\"advanced\":false,\"dynamic\":false,\"info\":\"The amount of overlap between chunks.\",\"type\":\"int\",\"list\":false},\"chunk_size\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1000,\"password\":false,\"name\":\"chunk_size\",\"display_name\":\"Chunk Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"The maximum length of each chunk.\",\"type\":\"int\",\"list\":false},\"documents\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"The documents to split.\",\"type\":\"Document\",\"list\":true},\"separator_type\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"Python\",\"password\":false,\"options\":[\"cpp\",\"go\",\"java\",\"kotlin\",\"js\",\"ts\",\"php\",\"proto\",\"python\",\"rst\",\"ruby\",\"rust\",\"scala\",\"swift\",\"markdown\",\"latex\",\"html\",\"sol\",\"csharp\"],\"name\":\"separator_type\",\"display_name\":\"Separator Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"The type of separator to use.\",\"type\":\"str\",\"list\":true}},\"description\":\"Split text into chunks of a specified length based on language.\",\"base_classes\":[\"Document\"],\"display_name\":\"Language Recursive Text Splitter\",\"custom_fields\":{\"chunk_overlap\":null,\"chunk_size\":null,\"documents\":null,\"separator_type\":null},\"output_types\":[\"LanguageRecursiveTextSplitter\"],\"documentation\":\"https://docs.langflow.org/components/text-splitters#languagerecursivetextsplitter\",\"beta\":true,\"error\":null},\"RecursiveCharacterTextSplitter\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional\\nfrom langflow import CustomComponent\\nfrom langchain.schema import Document\\nfrom langflow.utils.util import build_loader_repr_from_documents\\n\\n\\nclass RecursiveCharacterTextSplitterComponent(CustomComponent):\\n display_name: str = \\\"Recursive Character Text Splitter\\\"\\n description: str = \\\"Split text into chunks of a specified length.\\\"\\n documentation: str = \\\"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\\\"\\n\\n def build_config(self):\\n return {\\n \\\"documents\\\": {\\n \\\"display_name\\\": \\\"Documents\\\",\\n \\\"info\\\": \\\"The documents to split.\\\",\\n },\\n \\\"separators\\\": {\\n \\\"display_name\\\": \\\"Separators\\\",\\n \\\"info\\\": 'The characters to split on.\\\\nIf left empty defaults to [\\\"\\\\\\\\n\\\\\\\\n\\\", \\\"\\\\\\\\n\\\", \\\" \\\", \\\"\\\"].',\\n \\\"is_list\\\": True,\\n },\\n \\\"chunk_size\\\": {\\n \\\"display_name\\\": \\\"Chunk Size\\\",\\n \\\"info\\\": \\\"The maximum length of each chunk.\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 1000,\\n },\\n \\\"chunk_overlap\\\": {\\n \\\"display_name\\\": \\\"Chunk Overlap\\\",\\n \\\"info\\\": \\\"The amount of overlap between chunks.\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 200,\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n documents: list[Document],\\n separators: Optional[list[str]] = None,\\n chunk_size: Optional[int] = 1000,\\n chunk_overlap: Optional[int] = 200,\\n ) -> list[Document]:\\n \\\"\\\"\\\"\\n Split text into chunks of a specified length.\\n\\n Args:\\n separators (list[str]): The characters to split on.\\n chunk_size (int): The maximum length of each chunk.\\n chunk_overlap (int): The amount of overlap between chunks.\\n length_function (function): The function to use to calculate the length of the text.\\n\\n Returns:\\n list[str]: The chunks of text.\\n \\\"\\\"\\\"\\n from langchain.text_splitter import RecursiveCharacterTextSplitter\\n\\n if separators == \\\"\\\":\\n separators = None\\n elif separators:\\n # check if the separators list has escaped characters\\n # if there are escaped characters, unescape them\\n separators = [x.encode().decode(\\\"unicode-escape\\\") for x in separators]\\n\\n # Make sure chunk_size and chunk_overlap are ints\\n if isinstance(chunk_size, str):\\n chunk_size = int(chunk_size)\\n if isinstance(chunk_overlap, str):\\n chunk_overlap = int(chunk_overlap)\\n splitter = RecursiveCharacterTextSplitter(\\n separators=separators,\\n chunk_size=chunk_size,\\n chunk_overlap=chunk_overlap,\\n )\\n\\n docs = splitter.split_documents(documents)\\n self.repr_value = build_loader_repr_from_documents(docs)\\n return docs\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"chunk_overlap\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":200,\"password\":false,\"name\":\"chunk_overlap\",\"display_name\":\"Chunk Overlap\",\"advanced\":false,\"dynamic\":false,\"info\":\"The amount of overlap between chunks.\",\"type\":\"int\",\"list\":false},\"chunk_size\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1000,\"password\":false,\"name\":\"chunk_size\",\"display_name\":\"Chunk Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"The maximum length of each chunk.\",\"type\":\"int\",\"list\":false},\"documents\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"The documents to split.\",\"type\":\"Document\",\"list\":true},\"separators\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"separators\",\"display_name\":\"Separators\",\"advanced\":false,\"dynamic\":false,\"info\":\"The characters to split on.\\nIf left empty defaults to [\\\"\\\\n\\\\n\\\", \\\"\\\\n\\\", \\\" \\\", \\\"\\\"].\",\"type\":\"str\",\"list\":true}},\"description\":\"Split text into chunks of a specified length.\",\"base_classes\":[\"Document\"],\"display_name\":\"Recursive Character Text Splitter\",\"custom_fields\":{\"chunk_overlap\":null,\"chunk_size\":null,\"documents\":null,\"separators\":null},\"output_types\":[\"RecursiveCharacterTextSplitter\"],\"documentation\":\"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\",\"beta\":true,\"error\":null}},\"utilities\":{\"BingSearchAPIWrapper\":{\"template\":{\"bing_search_url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"bing_search_url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"bing_subscription_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"bing_subscription_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"k\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"k\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"_type\":\"BingSearchAPIWrapper\"},\"description\":\"Wrapper for Bing Search API.\",\"base_classes\":[\"BingSearchAPIWrapper\"],\"display_name\":\"BingSearchAPIWrapper\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"GoogleSearchAPIWrapper\":{\"template\":{\"google_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"google_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"google_cse_id\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"google_cse_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"k\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"k\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"search_engine\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"search_engine\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"siterestrict\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"siterestrict\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"GoogleSearchAPIWrapper\"},\"description\":\"Wrapper for Google Search API.\",\"base_classes\":[\"GoogleSearchAPIWrapper\"],\"display_name\":\"GoogleSearchAPIWrapper\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"GoogleSerperAPIWrapper\":{\"template\":{\"aiosession\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"aiosession\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"ClientSession\",\"list\":false},\"gl\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"us\",\"password\":false,\"name\":\"gl\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"hl\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"en\",\"password\":false,\"name\":\"hl\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"k\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"k\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"result_key_for_type\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{\\n \\\"news\\\": \\\"news\\\",\\n \\\"places\\\": \\\"places\\\",\\n \\\"images\\\": \\\"images\\\",\\n \\\"search\\\": \\\"organic\\\"\\n}\",\"password\":true,\"name\":\"result_key_for_type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"serper_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"serper_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"tbs\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tbs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"type\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"search\",\"password\":false,\"options\":[\"news\",\"search\",\"places\",\"images\"],\"name\":\"type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"_type\":\"GoogleSerperAPIWrapper\"},\"description\":\"Wrapper around the Serper.dev Google Search API.\",\"base_classes\":[\"GoogleSerperAPIWrapper\"],\"display_name\":\"GoogleSerperAPIWrapper\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"SearxSearchWrapper\":{\"template\":{\"aiosession\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"aiosession\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"categories\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"categories\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"engines\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"engines\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"headers\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"{\\\"Authorization\\\": \\\"Bearer \\\"}\",\"password\":false,\"name\":\"headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"k\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"k\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"params\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"params\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"query_suffix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"query_suffix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"searx_host\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"searx_host\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"unsecure\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"unsecure\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"SearxSearchWrapper\"},\"description\":\"Wrapper for Searx API.\",\"base_classes\":[\"SearxSearchWrapper\"],\"display_name\":\"SearxSearchWrapper\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"SerpAPIWrapper\":{\"template\":{\"aiosession\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"aiosession\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"ClientSession\",\"list\":false},\"params\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"{\\n \\\"engine\\\": \\\"google\\\",\\n \\\"google_domain\\\": \\\"google.com\\\",\\n \\\"gl\\\": \\\"us\\\",\\n \\\"hl\\\": \\\"en\\\"\\n}\",\"password\":false,\"name\":\"params\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false},\"search_engine\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"search_engine\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"serpapi_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"serpapi_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"SerpAPIWrapper\"},\"description\":\"Wrapper around SerpAPI.\",\"base_classes\":[\"SerpAPIWrapper\"],\"display_name\":\"SerpAPIWrapper\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"WikipediaAPIWrapper\":{\"template\":{\"doc_content_chars_max\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":4000,\"password\":false,\"name\":\"doc_content_chars_max\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"lang\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"en\",\"password\":false,\"name\":\"lang\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"load_all_available_meta\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"load_all_available_meta\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"top_k_results\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":3,\"password\":false,\"name\":\"top_k_results\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"wiki_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"wiki_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"_type\":\"WikipediaAPIWrapper\"},\"description\":\"Wrapper around WikipediaAPI.\",\"base_classes\":[\"WikipediaAPIWrapper\"],\"display_name\":\"WikipediaAPIWrapper\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"WolframAlphaAPIWrapper\":{\"template\":{\"wolfram_alpha_appid\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"wolfram_alpha_appid\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"wolfram_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"wolfram_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"_type\":\"WolframAlphaAPIWrapper\"},\"description\":\"Wrapper for Wolfram Alpha.\",\"base_classes\":[\"WolframAlphaAPIWrapper\"],\"display_name\":\"WolframAlphaAPIWrapper\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"GetRequest\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langchain.schema import Document\\nfrom langflow.services.database.models.base import orjson_dumps\\nimport requests\\nfrom typing import Optional\\n\\n\\nclass GetRequest(CustomComponent):\\n display_name: str = \\\"GET Request\\\"\\n description: str = \\\"Make a GET request to the given URL.\\\"\\n output_types: list[str] = [\\\"Document\\\"]\\n documentation: str = \\\"https://docs.langflow.org/components/utilities#get-request\\\"\\n beta = True\\n field_config = {\\n \\\"url\\\": {\\n \\\"display_name\\\": \\\"URL\\\",\\n \\\"info\\\": \\\"The URL to make the request to\\\",\\n \\\"is_list\\\": True,\\n },\\n \\\"headers\\\": {\\n \\\"display_name\\\": \\\"Headers\\\",\\n \\\"info\\\": \\\"The headers to send with the request.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"timeout\\\": {\\n \\\"display_name\\\": \\\"Timeout\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"The timeout to use for the request.\\\",\\n \\\"value\\\": 5,\\n },\\n }\\n\\n def get_document(\\n self, session: requests.Session, url: str, headers: Optional[dict], timeout: int\\n ) -> Document:\\n try:\\n response = session.get(url, headers=headers, timeout=int(timeout))\\n try:\\n response_json = response.json()\\n result = orjson_dumps(response_json, indent_2=False)\\n except Exception:\\n result = response.text\\n self.repr_value = result\\n return Document(\\n page_content=result,\\n metadata={\\n \\\"source\\\": url,\\n \\\"headers\\\": headers,\\n \\\"status_code\\\": response.status_code,\\n },\\n )\\n except requests.Timeout:\\n return Document(\\n page_content=\\\"Request Timed Out\\\",\\n metadata={\\\"source\\\": url, \\\"headers\\\": headers, \\\"status_code\\\": 408},\\n )\\n except Exception as exc:\\n return Document(\\n page_content=str(exc),\\n metadata={\\\"source\\\": url, \\\"headers\\\": headers, \\\"status_code\\\": 500},\\n )\\n\\n def build(\\n self,\\n url: str,\\n headers: Optional[dict] = None,\\n timeout: int = 5,\\n ) -> list[Document]:\\n if headers is None:\\n headers = {}\\n urls = url if isinstance(url, list) else [url]\\n with requests.Session() as session:\\n documents = [self.get_document(session, u, headers, timeout) for u in urls]\\n self.repr_value = documents\\n return documents\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"headers\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"headers\",\"display_name\":\"Headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"The headers to send with the request.\",\"type\":\"dict\",\"list\":false},\"timeout\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":5,\"password\":false,\"name\":\"timeout\",\"display_name\":\"Timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"The timeout to use for the request.\",\"type\":\"int\",\"list\":false},\"url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"The URL to make the request to\",\"type\":\"str\",\"list\":true}},\"description\":\"Make a GET request to the given URL.\",\"base_classes\":[\"Document\"],\"display_name\":\"GET Request\",\"custom_fields\":{\"headers\":null,\"timeout\":null,\"url\":null},\"output_types\":[\"GetRequest\"],\"documentation\":\"https://docs.langflow.org/components/utilities#get-request\",\"beta\":true,\"error\":null},\"PostRequest\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langchain.schema import Document\\nfrom langflow.services.database.models.base import orjson_dumps\\nimport requests\\nfrom typing import Optional\\n\\n\\nclass PostRequest(CustomComponent):\\n display_name: str = \\\"POST Request\\\"\\n description: str = \\\"Make a POST request to the given URL.\\\"\\n output_types: list[str] = [\\\"Document\\\"]\\n documentation: str = \\\"https://docs.langflow.org/components/utilities#post-request\\\"\\n beta = True\\n field_config = {\\n \\\"url\\\": {\\\"display_name\\\": \\\"URL\\\", \\\"info\\\": \\\"The URL to make the request to.\\\"},\\n \\\"headers\\\": {\\n \\\"display_name\\\": \\\"Headers\\\",\\n \\\"info\\\": \\\"The headers to send with the request.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"document\\\": {\\\"display_name\\\": \\\"Document\\\"},\\n }\\n\\n def post_document(\\n self,\\n session: requests.Session,\\n document: Document,\\n url: str,\\n headers: Optional[dict] = None,\\n ) -> Document:\\n try:\\n response = session.post(url, headers=headers, data=document.page_content)\\n try:\\n response_json = response.json()\\n result = orjson_dumps(response_json, indent_2=False)\\n except Exception:\\n result = response.text\\n self.repr_value = result\\n return Document(\\n page_content=result,\\n metadata={\\n \\\"source\\\": url,\\n \\\"headers\\\": headers,\\n \\\"status_code\\\": response,\\n },\\n )\\n except Exception as exc:\\n return Document(\\n page_content=str(exc),\\n metadata={\\n \\\"source\\\": url,\\n \\\"headers\\\": headers,\\n \\\"status_code\\\": 500,\\n },\\n )\\n\\n def build(\\n self,\\n document: Document,\\n url: str,\\n headers: Optional[dict] = None,\\n ) -> list[Document]:\\n if headers is None:\\n headers = {}\\n\\n if not isinstance(document, list) and isinstance(document, Document):\\n documents: list[Document] = [document]\\n elif isinstance(document, list) and all(\\n isinstance(doc, Document) for doc in document\\n ):\\n documents = document\\n else:\\n raise ValueError(\\\"document must be a Document or a list of Documents\\\")\\n\\n with requests.Session() as session:\\n documents = [\\n self.post_document(session, doc, url, headers) for doc in documents\\n ]\\n self.repr_value = documents\\n return documents\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"document\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"document\",\"display_name\":\"Document\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":false},\"headers\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"headers\",\"display_name\":\"Headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"The headers to send with the request.\",\"type\":\"dict\",\"list\":false},\"url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"The URL to make the request to.\",\"type\":\"str\",\"list\":false}},\"description\":\"Make a POST request to the given URL.\",\"base_classes\":[\"Document\"],\"display_name\":\"POST Request\",\"custom_fields\":{\"document\":null,\"headers\":null,\"url\":null},\"output_types\":[\"PostRequest\"],\"documentation\":\"https://docs.langflow.org/components/utilities#post-request\",\"beta\":true,\"error\":null},\"UpdateRequest\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import List, Optional\\nimport requests\\nfrom langflow import CustomComponent\\nfrom langchain.schema import Document\\nfrom langflow.services.database.models.base import orjson_dumps\\n\\n\\nclass UpdateRequest(CustomComponent):\\n display_name: str = \\\"Update Request\\\"\\n description: str = \\\"Make a PATCH request to the given URL.\\\"\\n output_types: list[str] = [\\\"Document\\\"]\\n documentation: str = \\\"https://docs.langflow.org/components/utilities#update-request\\\"\\n beta = True\\n field_config = {\\n \\\"url\\\": {\\\"display_name\\\": \\\"URL\\\", \\\"info\\\": \\\"The URL to make the request to.\\\"},\\n \\\"headers\\\": {\\n \\\"display_name\\\": \\\"Headers\\\",\\n \\\"field_type\\\": \\\"NestedDict\\\",\\n \\\"info\\\": \\\"The headers to send with the request.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"document\\\": {\\\"display_name\\\": \\\"Document\\\"},\\n \\\"method\\\": {\\n \\\"display_name\\\": \\\"Method\\\",\\n \\\"field_type\\\": \\\"str\\\",\\n \\\"info\\\": \\\"The HTTP method to use.\\\",\\n \\\"options\\\": [\\\"PATCH\\\", \\\"PUT\\\"],\\n \\\"value\\\": \\\"PATCH\\\",\\n },\\n }\\n\\n def update_document(\\n self,\\n session: requests.Session,\\n document: Document,\\n url: str,\\n headers: Optional[dict] = None,\\n method: str = \\\"PATCH\\\",\\n ) -> Document:\\n try:\\n if method == \\\"PATCH\\\":\\n response = session.patch(\\n url, headers=headers, data=document.page_content\\n )\\n elif method == \\\"PUT\\\":\\n response = session.put(url, headers=headers, data=document.page_content)\\n else:\\n raise ValueError(f\\\"Unsupported method: {method}\\\")\\n try:\\n response_json = response.json()\\n result = orjson_dumps(response_json, indent_2=False)\\n except Exception:\\n result = response.text\\n self.repr_value = result\\n return Document(\\n page_content=result,\\n metadata={\\n \\\"source\\\": url,\\n \\\"headers\\\": headers,\\n \\\"status_code\\\": response.status_code,\\n },\\n )\\n except Exception as exc:\\n return Document(\\n page_content=str(exc),\\n metadata={\\\"source\\\": url, \\\"headers\\\": headers, \\\"status_code\\\": 500},\\n )\\n\\n def build(\\n self,\\n method: str,\\n document: Document,\\n url: str,\\n headers: Optional[dict] = None,\\n ) -> List[Document]:\\n if headers is None:\\n headers = {}\\n\\n if not isinstance(document, list) and isinstance(document, Document):\\n documents: list[Document] = [document]\\n elif isinstance(document, list) and all(\\n isinstance(doc, Document) for doc in document\\n ):\\n documents = document\\n else:\\n raise ValueError(\\\"document must be a Document or a list of Documents\\\")\\n\\n with requests.Session() as session:\\n documents = [\\n self.update_document(session, doc, url, headers, method)\\n for doc in documents\\n ]\\n self.repr_value = documents\\n return documents\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"document\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"document\",\"display_name\":\"Document\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":false},\"headers\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"headers\",\"display_name\":\"Headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"The headers to send with the request.\",\"type\":\"NestedDict\",\"list\":false},\"method\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"PATCH\",\"password\":false,\"options\":[\"PATCH\",\"PUT\"],\"name\":\"method\",\"display_name\":\"Method\",\"advanced\":false,\"dynamic\":false,\"info\":\"The HTTP method to use.\",\"type\":\"str\",\"list\":true},\"url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"The URL to make the request to.\",\"type\":\"str\",\"list\":false}},\"description\":\"Make a PATCH request to the given URL.\",\"base_classes\":[\"Document\"],\"display_name\":\"Update Request\",\"custom_fields\":{\"document\":null,\"headers\":null,\"method\":null,\"url\":null},\"output_types\":[\"UpdateRequest\"],\"documentation\":\"https://docs.langflow.org/components/utilities#update-request\",\"beta\":true,\"error\":null},\"JSONDocumentBuilder\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"### JSON Document Builder\\n\\n# Build a Document containing a JSON object using a key and another Document page content.\\n\\n# **Params**\\n\\n# - **Key:** The key to use for the JSON object.\\n# - **Document:** The Document page to use for the JSON object.\\n\\n# **Output**\\n\\n# - **Document:** The Document containing the JSON object.\\n\\nfrom langflow import CustomComponent\\nfrom langchain.schema import Document\\nfrom langflow.services.database.models.base import orjson_dumps\\n\\n\\nclass JSONDocumentBuilder(CustomComponent):\\n display_name: str = \\\"JSON Document Builder\\\"\\n description: str = \\\"Build a Document containing a JSON object using a key and another Document page content.\\\"\\n output_types: list[str] = [\\\"Document\\\"]\\n beta = True\\n documentation: str = (\\n \\\"https://docs.langflow.org/components/utilities#json-document-builder\\\"\\n )\\n\\n field_config = {\\n \\\"key\\\": {\\\"display_name\\\": \\\"Key\\\"},\\n \\\"document\\\": {\\\"display_name\\\": \\\"Document\\\"},\\n }\\n\\n def build(\\n self,\\n key: str,\\n document: Document,\\n ) -> Document:\\n documents = None\\n if isinstance(document, list):\\n documents = [\\n Document(\\n page_content=orjson_dumps({key: doc.page_content}, indent_2=False)\\n )\\n for doc in document\\n ]\\n elif isinstance(document, Document):\\n documents = Document(\\n page_content=orjson_dumps({key: document.page_content}, indent_2=False)\\n )\\n else:\\n raise TypeError(\\n f\\\"Expected Document or list of Documents, got {type(document)}\\\"\\n )\\n self.repr_value = documents\\n return documents\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"document\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"document\",\"display_name\":\"Document\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":false},\"key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"key\",\"display_name\":\"Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"Build a Document containing a JSON object using a key and another Document page content.\",\"base_classes\":[\"Document\"],\"display_name\":\"JSON Document Builder\",\"custom_fields\":{\"document\":null,\"key\":null},\"output_types\":[\"JSONDocumentBuilder\"],\"documentation\":\"https://docs.langflow.org/components/utilities#json-document-builder\",\"beta\":true,\"error\":null}},\"output_parsers\":{\"ResponseSchema\":{\"template\":{\"description\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"description\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"name\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"type\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"string\",\"password\":false,\"name\":\"type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"ResponseSchema\"},\"description\":\"A schema for a response from a structured output parser.\",\"base_classes\":[\"ResponseSchema\"],\"display_name\":\"ResponseSchema\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/output_parsers/structured\",\"beta\":false,\"error\":null},\"StructuredOutputParser\":{\"template\":{\"response_schemas\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"response_schemas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"ResponseSchema\",\"list\":true},\"_type\":\"StructuredOutputParser\"},\"description\":\"\",\"base_classes\":[\"BaseLLMOutputParser\",\"BaseOutputParser\",\"StructuredOutputParser\"],\"display_name\":\"StructuredOutputParser\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/output_parsers/structured\",\"beta\":false,\"error\":null}},\"retrievers\":{\"MultiQueryRetriever\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLLM\",\"list\":false},\"prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{\"input_variables\":[\"question\"],\"input_types\":{},\"output_parser\":null,\"partial_variables\":{},\"template\":\"You are an AI language model assistant. Your task is \\n to generate 3 different versions of the given user \\n question to retrieve relevant documents from a vector database. \\n By generating multiple perspectives on the user question, \\n your goal is to help the user overcome some of the limitations \\n of distance-based similarity search. Provide these alternative \\n questions separated by newlines. Original question: {question}\",\"template_format\":\"f-string\",\"validate_template\":true,\"_type\":\"prompt\"},\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"PromptTemplate\",\"list\":false},\"retriever\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseRetriever\",\"list\":false},\"parser_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"lines\",\"password\":false,\"name\":\"parser_key\",\"display_name\":\"Parser Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"MultiQueryRetriever\"},\"description\":\"Initialize from llm using default template.\",\"base_classes\":[\"MultiQueryRetriever\",\"BaseRetriever\"],\"display_name\":\"MultiQueryRetriever\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/retrievers/how_to/MultiQueryRetriever\",\"beta\":false,\"error\":null},\"MetalRetriever\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional\\nfrom langflow import CustomComponent\\nfrom langchain.retrievers import MetalRetriever\\nfrom langchain.schema import BaseRetriever\\nfrom metal_sdk.metal import Metal # type: ignore\\n\\n\\nclass MetalRetrieverComponent(CustomComponent):\\n display_name: str = \\\"Metal Retriever\\\"\\n description: str = \\\"Retriever that uses the Metal API.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"api_key\\\": {\\\"display_name\\\": \\\"API Key\\\", \\\"password\\\": True},\\n \\\"client_id\\\": {\\\"display_name\\\": \\\"Client ID\\\", \\\"password\\\": True},\\n \\\"index_id\\\": {\\\"display_name\\\": \\\"Index ID\\\"},\\n \\\"params\\\": {\\\"display_name\\\": \\\"Parameters\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self, api_key: str, client_id: str, index_id: str, params: Optional[dict] = None\\n ) -> BaseRetriever:\\n try:\\n metal = Metal(api_key=api_key, client_id=client_id, index_id=index_id)\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to Metal API.\\\") from e\\n return MetalRetriever(client=metal, params=params or {})\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"api_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"api_key\",\"display_name\":\"API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"client_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"client_id\",\"display_name\":\"Client ID\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"index_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"index_id\",\"display_name\":\"Index ID\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"params\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"params\",\"display_name\":\"Parameters\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"dict\",\"list\":false}},\"description\":\"Retriever that uses the Metal API.\",\"base_classes\":[\"BaseRetriever\"],\"display_name\":\"Metal Retriever\",\"custom_fields\":{\"api_key\":null,\"client_id\":null,\"index_id\":null,\"params\":null},\"output_types\":[\"MetalRetriever\"],\"documentation\":\"\",\"beta\":true,\"error\":null}},\"custom_components\":{\"CustomComponent\":{\"template\":{\"code\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\n\\nfrom langflow.field_typing import (\\n Tool,\\n PromptTemplate,\\n Chain,\\n BaseChatMemory,\\n BaseLLM,\\n BaseLoader,\\n BaseMemory,\\n BaseOutputParser,\\n BaseRetriever,\\n VectorStore,\\n Embeddings,\\n TextSplitter,\\n Document,\\n AgentExecutor,\\n NestedDict,\\n Data,\\n)\\n\\n\\nclass Component(CustomComponent):\\n display_name: str = \\\"Custom Component\\\"\\n description: str = \\\"Create any custom component you want!\\\"\\n\\n def build_config(self):\\n return {\\\"param\\\": {\\\"display_name\\\": \\\"Parameter\\\"}}\\n\\n def build(self, param: Data) -> Data:\\n return param\\n\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\"},\"description\":null,\"base_classes\":[],\"display_name\":\"Custom Component\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":true,\"error\":null}}}" - }, - "headersSize": -1, - "bodySize": -1, - "redirectURL": "" - }, - "cache": {}, - "timings": { "send": -1, "wait": -1, "receive": 1.031 } + "cache": {}, + "timings": { "send": -1, "wait": -1, "receive": 0.944 } + }, + { + "startedDateTime": "2024-02-28T14:32:30.976Z", + "time": 0.697, + "request": { + "method": "GET", + "url": "http://localhost:3000/api/v1/version", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Accept", "value": "application/json, text/plain, */*" }, + { "name": "Accept-Encoding", "value": "gzip, deflate, br, zstd" }, + { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, + { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Connection", "value": "keep-alive" }, + { "name": "Cookie", "value": "access_token_lf=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Host", "value": "localhost:3000" }, + { "name": "Referer", "value": "http://localhost:3000/" }, + { "name": "Sec-Fetch-Dest", "value": "empty" }, + { "name": "Sec-Fetch-Mode", "value": "cors" }, + { "name": "Sec-Fetch-Site", "value": "same-origin" }, + { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36" }, + { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"123\", \"Not:A-Brand\";v=\"8\"" }, + { "name": "sec-ch-ua-mobile", "value": "?0" }, + { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } + ], + "queryString": [], + "headersSize": -1, + "bodySize": -1 }, - { - "startedDateTime": "2023-08-31T14:55:36.131Z", - "time": 0.743, - "request": { - "method": "GET", - "url": "http://localhost:3000/api/v1/flows/", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Accept", "value": "application/json, text/plain, */*" }, - { "name": "Accept-Encoding", "value": "gzip, deflate, br" }, - { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, - { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJiNWIxYjFhZC05M2VjLTRjMjgtYWMxNy01OGMxNDQ0MWI1ZGQiLCJleHAiOjE3MjUwMjk3MzV9.8qhEtryWqA9RbwEP2s20umKdVE7J7jHGoogXZqxLNWk" }, - { "name": "Connection", "value": "keep-alive" }, - { "name": "Cookie", "value": "access_token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJiNWIxYjFhZC05M2VjLTRjMjgtYWMxNy01OGMxNDQ0MWI1ZGQiLCJleHAiOjE3MjUwMjk3MzV9.8qhEtryWqA9RbwEP2s20umKdVE7J7jHGoogXZqxLNWk; refresh_token=auto" }, - { "name": "Host", "value": "localhost:3000" }, - { "name": "Referer", "value": "http://localhost:3000/" }, - { "name": "Sec-Fetch-Dest", "value": "empty" }, - { "name": "Sec-Fetch-Mode", "value": "cors" }, - { "name": "Sec-Fetch-Site", "value": "same-origin" }, - { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36" }, - { "name": "sec-ch-ua", "value": "\"Not)A;Brand\";v=\"24\", \"Chromium\";v=\"116\"" }, - { "name": "sec-ch-ua-mobile", "value": "?0" }, - { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } - ], - "queryString": [], - "headersSize": -1, - "bodySize": -1 + "response": { + "status": 200, + "statusText": "OK", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Access-Control-Allow-Origin", "value": "*" }, + { "name": "connection", "value": "close" }, + { "name": "content-length", "value": "19" }, + { "name": "content-type", "value": "application/json" }, + { "name": "date", "value": "Wed, 28 Feb 2024 14:32:30 GMT" }, + { "name": "server", "value": "uvicorn" } + ], + "content": { + "size": -1, + "mimeType": "application/json", + "text": "{\"version\":\"0.6.7\"}" }, - "response": { - "status": 200, - "statusText": "OK", - "httpVersion": "HTTP/1.1", - "cookies": [], - "headers": [ - { "name": "Access-Control-Allow-Origin", "value": "*" }, - { "name": "connection", "value": "close" }, - { "name": "content-length", "value": "2" }, - { "name": "content-type", "value": "application/json" }, - { "name": "date", "value": "Thu, 31 Aug 2023 14:55:35 GMT" }, - { "name": "server", "value": "uvicorn" } - ], - "content": { - "size": -1, - "mimeType": "application/json", - "text": "[]" - }, - "headersSize": -1, - "bodySize": -1, - "redirectURL": "" + "headersSize": -1, + "bodySize": -1, + "redirectURL": "" + }, + "cache": {}, + "timings": { "send": -1, "wait": -1, "receive": 0.697 } + }, + { + "startedDateTime": "2024-02-28T14:32:30.976Z", + "time": 2.771, + "request": { + "method": "GET", + "url": "http://localhost:3000/api/v1/all", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Accept", "value": "application/json, text/plain, */*" }, + { "name": "Accept-Encoding", "value": "gzip, deflate, br, zstd" }, + { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, + { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Connection", "value": "keep-alive" }, + { "name": "Cookie", "value": "access_token_lf=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Host", "value": "localhost:3000" }, + { "name": "Referer", "value": "http://localhost:3000/" }, + { "name": "Sec-Fetch-Dest", "value": "empty" }, + { "name": "Sec-Fetch-Mode", "value": "cors" }, + { "name": "Sec-Fetch-Site", "value": "same-origin" }, + { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36" }, + { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"123\", \"Not:A-Brand\";v=\"8\"" }, + { "name": "sec-ch-ua-mobile", "value": "?0" }, + { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } + ], + "queryString": [], + "headersSize": -1, + "bodySize": -1 + }, + "response": { + "status": 200, + "statusText": "OK", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Access-Control-Allow-Origin", "value": "*" }, + { "name": "connection", "value": "close" }, + { "name": "content-length", "value": "633593" }, + { "name": "content-type", "value": "application/json" }, + { "name": "date", "value": "Wed, 28 Feb 2024 14:32:30 GMT" }, + { "name": "server", "value": "uvicorn" } + ], + "content": { + "size": -1, + "mimeType": "application/json", + "text": "{\"chains\":{\"ConversationalRetrievalChain\":{\"template\":{\"callbacks\":{\"type\":\"Callbacks\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"condense_question_llm\":{\"type\":\"BaseLanguageModel\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"condense_question_llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"condense_question_prompt\":{\"type\":\"BasePromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":{\"name\":null,\"input_variables\":[\"chat_history\",\"question\"],\"input_types\":{},\"output_parser\":null,\"partial_variables\":{},\"metadata\":null,\"tags\":null,\"template\":\"Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question, in its original language.\\n\\nChat History:\\n{chat_history}\\nFollow Up Input: {question}\\nStandalone question:\",\"template_format\":\"f-string\",\"validate_template\":false},\"fileTypes\":[],\"password\":false,\"name\":\"condense_question_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"memory\":{\"type\":\"BaseChatMemory\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"retriever\":{\"type\":\"BaseRetriever\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"chain_type\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"stuff\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"stuff\",\"map_reduce\",\"map_rerank\",\"refine\"],\"name\":\"chain_type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"combine_docs_chain_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"combine_docs_chain_kwargs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"return_source_documents\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_source_documents\",\"display_name\":\"Return source documents\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"verbose\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"ConversationalRetrievalChain\"},\"description\":\"Convenience method to load chain from LLM and retriever.\",\"base_classes\":[\"BaseConversationalRetrievalChain\",\"Runnable\",\"Chain\",\"Generic\",\"Text\",\"RunnableSerializable\",\"Serializable\",\"object\",\"ConversationalRetrievalChain\",\"Callable\"],\"display_name\":\"ConversationalRetrievalChain\",\"documentation\":\"https://python.langchain.com/docs/modules/chains/popular/chat_vector_db\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false,\"output_type\":\"Chain\"},\"LLMCheckerChain\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Callable, Union\\n\\nfrom langchain.chains import LLMCheckerChain\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import BaseLanguageModel, Chain\\n\\n\\nclass LLMCheckerChainComponent(CustomComponent):\\n display_name = \\\"LLMCheckerChain\\\"\\n description = \\\"\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/chains/additional/llm_checker\\\"\\n\\n def build_config(self):\\n return {\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n }\\n\\n def build(\\n self,\\n llm: BaseLanguageModel,\\n ) -> Union[Chain, Callable]:\\n return LLMCheckerChain.from_llm(llm=llm)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"\",\"base_classes\":[\"Runnable\",\"Chain\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\",\"Callable\"],\"display_name\":\"LLMCheckerChain\",\"documentation\":\"https://python.langchain.com/docs/modules/chains/additional/llm_checker\",\"custom_fields\":{\"llm\":null},\"output_types\":[\"Chain\",\"Callable\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"LLMMathChain\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"llm_chain\":{\"type\":\"LLMChain\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm_chain\",\"display_name\":\"LLM Chain\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"memory\":{\"type\":\"BaseMemory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Callable, Optional, Union\\n\\nfrom langchain.chains import LLMChain, LLMMathChain\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import BaseLanguageModel, BaseMemory, Chain\\n\\n\\nclass LLMMathChainComponent(CustomComponent):\\n display_name = \\\"LLMMathChain\\\"\\n description = \\\"Chain that interprets a prompt and executes python code to do math.\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/chains/additional/llm_math\\\"\\n\\n def build_config(self):\\n return {\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"llm_chain\\\": {\\\"display_name\\\": \\\"LLM Chain\\\"},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"input_key\\\": {\\\"display_name\\\": \\\"Input Key\\\"},\\n \\\"output_key\\\": {\\\"display_name\\\": \\\"Output Key\\\"},\\n }\\n\\n def build(\\n self,\\n llm: BaseLanguageModel,\\n llm_chain: LLMChain,\\n input_key: str = \\\"question\\\",\\n output_key: str = \\\"answer\\\",\\n memory: Optional[BaseMemory] = None,\\n ) -> Union[LLMMathChain, Callable, Chain]:\\n return LLMMathChain(llm=llm, llm_chain=llm_chain, input_key=input_key, output_key=output_key, memory=memory)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"input_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"question\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"input_key\",\"display_name\":\"Input Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"output_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"answer\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"output_key\",\"display_name\":\"Output Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Chain that interprets a prompt and executes python code to do math.\",\"base_classes\":[\"LLMMathChain\",\"Runnable\",\"Chain\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\",\"Callable\"],\"display_name\":\"LLMMathChain\",\"documentation\":\"https://python.langchain.com/docs/modules/chains/additional/llm_math\",\"custom_fields\":{\"llm\":null,\"llm_chain\":null,\"input_key\":null,\"output_key\":null,\"memory\":null},\"output_types\":[\"LLMMathChain\",\"Callable\",\"Chain\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"RetrievalQA\":{\"template\":{\"combine_documents_chain\":{\"type\":\"BaseCombineDocumentsChain\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"combine_documents_chain\",\"display_name\":\"Combine Documents Chain\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"memory\":{\"type\":\"BaseMemory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"retriever\":{\"type\":\"BaseRetriever\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"retriever\",\"display_name\":\"Retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Callable, Optional, Union\\n\\nfrom langchain.chains.combine_documents.base import BaseCombineDocumentsChain\\nfrom langchain.chains.retrieval_qa.base import BaseRetrievalQA, RetrievalQA\\nfrom langchain_core.documents import Document\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import BaseMemory, BaseRetriever, Text\\n\\n\\nclass RetrievalQAComponent(CustomComponent):\\n display_name = \\\"Retrieval QA\\\"\\n description = \\\"Chain for question-answering against an index.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"combine_documents_chain\\\": {\\\"display_name\\\": \\\"Combine Documents Chain\\\"},\\n \\\"retriever\\\": {\\\"display_name\\\": \\\"Retriever\\\"},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\", \\\"required\\\": False},\\n \\\"input_key\\\": {\\\"display_name\\\": \\\"Input Key\\\", \\\"advanced\\\": True},\\n \\\"output_key\\\": {\\\"display_name\\\": \\\"Output Key\\\", \\\"advanced\\\": True},\\n \\\"return_source_documents\\\": {\\\"display_name\\\": \\\"Return Source Documents\\\"},\\n \\\"inputs\\\": {\\\"display_name\\\": \\\"Input\\\", \\\"input_types\\\": [\\\"Text\\\", \\\"Document\\\"]},\\n }\\n\\n def build(\\n self,\\n combine_documents_chain: BaseCombineDocumentsChain,\\n retriever: BaseRetriever,\\n inputs: str = \\\"\\\",\\n memory: Optional[BaseMemory] = None,\\n input_key: str = \\\"query\\\",\\n output_key: str = \\\"result\\\",\\n return_source_documents: bool = True,\\n ) -> Union[BaseRetrievalQA, Callable, Text]:\\n runnable = RetrievalQA(\\n combine_documents_chain=combine_documents_chain,\\n retriever=retriever,\\n memory=memory,\\n input_key=input_key,\\n output_key=output_key,\\n return_source_documents=return_source_documents,\\n )\\n if isinstance(inputs, Document):\\n inputs = inputs.page_content\\n self.status = runnable\\n result = runnable.invoke({input_key: inputs})\\n result = result.content if hasattr(result, \\\"content\\\") else result\\n # Result is a dict with keys \\\"query\\\", \\\"result\\\" and \\\"source_documents\\\"\\n # for now we just return the result\\n records = self.to_records(result.get(\\\"source_documents\\\"))\\n references_str = \\\"\\\"\\n if return_source_documents:\\n references_str = self.create_references_from_records(records)\\n result_str = result.get(\\\"result\\\")\\n final_result = \\\"\\\\n\\\".join([result_str, references_str])\\n self.status = final_result\\n return final_result\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"input_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"query\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"input_key\",\"display_name\":\"Input Key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"inputs\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"inputs\",\"display_name\":\"Input\",\"advanced\":false,\"input_types\":[\"Text\",\"Document\",\"Text\"],\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"output_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"result\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"output_key\",\"display_name\":\"Output Key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"return_source_documents\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_source_documents\",\"display_name\":\"Return Source Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Chain for question-answering against an index.\",\"base_classes\":[\"Runnable\",\"Chain\",\"BaseRetrievalQA\",\"Generic\",\"Text\",\"RunnableSerializable\",\"Serializable\",\"object\",\"Callable\"],\"display_name\":\"Retrieval QA\",\"documentation\":\"\",\"custom_fields\":{\"combine_documents_chain\":null,\"retriever\":null,\"inputs\":null,\"memory\":null,\"input_key\":null,\"output_key\":null,\"return_source_documents\":null},\"output_types\":[\"BaseRetrievalQA\",\"Callable\",\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"RetrievalQAWithSourcesChain\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"memory\":{\"type\":\"BaseMemory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"retriever\":{\"type\":\"BaseRetriever\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"retriever\",\"display_name\":\"Retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"chain_type\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"stuff\",\"map_reduce\",\"map_rerank\",\"refine\"],\"name\":\"chain_type\",\"display_name\":\"Chain Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"The type of chain to use to combined Documents.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.chains import RetrievalQAWithSourcesChain\\nfrom langchain_core.documents import Document\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import BaseLanguageModel, BaseMemory, BaseRetriever, Text\\n\\n\\nclass RetrievalQAWithSourcesChainComponent(CustomComponent):\\n display_name = \\\"RetrievalQAWithSourcesChain\\\"\\n description = \\\"Question-answering with sources over an index.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"chain_type\\\": {\\n \\\"display_name\\\": \\\"Chain Type\\\",\\n \\\"options\\\": [\\\"stuff\\\", \\\"map_reduce\\\", \\\"map_rerank\\\", \\\"refine\\\"],\\n \\\"info\\\": \\\"The type of chain to use to combined Documents.\\\",\\n },\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"return_source_documents\\\": {\\\"display_name\\\": \\\"Return Source Documents\\\"},\\n \\\"retriever\\\": {\\\"display_name\\\": \\\"Retriever\\\"},\\n }\\n\\n def build(\\n self,\\n inputs: str,\\n retriever: BaseRetriever,\\n llm: BaseLanguageModel,\\n chain_type: str,\\n memory: Optional[BaseMemory] = None,\\n return_source_documents: Optional[bool] = True,\\n ) -> Text:\\n runnable = RetrievalQAWithSourcesChain.from_chain_type(\\n llm=llm,\\n chain_type=chain_type,\\n memory=memory,\\n return_source_documents=return_source_documents,\\n retriever=retriever,\\n )\\n if isinstance(inputs, Document):\\n inputs = inputs.page_content\\n self.status = runnable\\n input_key = runnable.input_keys[0]\\n result = runnable.invoke({input_key: inputs})\\n result = result.content if hasattr(result, \\\"content\\\") else result\\n # Result is a dict with keys \\\"query\\\", \\\"result\\\" and \\\"source_documents\\\"\\n # for now we just return the result\\n records = self.to_records(result.get(\\\"source_documents\\\"))\\n references_str = \\\"\\\"\\n if return_source_documents:\\n references_str = self.create_references_from_records(records)\\n result_str = result.get(\\\"answer\\\")\\n final_result = \\\"\\\\n\\\".join([result_str, references_str])\\n self.status = final_result\\n return final_result\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"inputs\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"inputs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"return_source_documents\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_source_documents\",\"display_name\":\"Return Source Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Question-answering with sources over an index.\",\"base_classes\":[\"Text\",\"object\"],\"display_name\":\"RetrievalQAWithSourcesChain\",\"documentation\":\"\",\"custom_fields\":{\"inputs\":null,\"retriever\":null,\"llm\":null,\"chain_type\":null,\"memory\":null,\"return_source_documents\":null},\"output_types\":[\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"SQLDatabaseChain\":{\"template\":{\"db\":{\"type\":\"SQLDatabase\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"db\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"prompt\":{\"type\":\"BasePromptTemplate\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"SQLDatabaseChain\"},\"description\":\"Create a SQLDatabaseChain from an LLM and a database connection.\",\"base_classes\":[\"Runnable\",\"Chain\",\"Generic\",\"Text\",\"RunnableSerializable\",\"Serializable\",\"object\",\"SQLDatabaseChain\",\"Callable\"],\"display_name\":\"SQLDatabaseChain\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false,\"output_type\":\"Chain\"},\"CombineDocsChain\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"chain_type\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"stuff\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"stuff\",\"map_reduce\",\"map_rerank\",\"refine\"],\"name\":\"chain_type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"load_qa_chain\"},\"description\":\"Load question answering chain.\",\"base_classes\":[\"function\",\"BaseCombineDocumentsChain\"],\"display_name\":\"CombineDocsChain\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false,\"output_type\":\"Chain\"},\"SeriesCharacterChain\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"character\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"character\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"series\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"series\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"SeriesCharacterChain\"},\"description\":\"SeriesCharacterChain is a chain you can use to have a conversation with a character from a series.\",\"base_classes\":[\"Chain\",\"BaseCustomChain\",\"ConversationChain\",\"function\",\"SeriesCharacterChain\",\"LLMChain\"],\"display_name\":\"SeriesCharacterChain\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false,\"output_type\":\"Chain\"},\"MidJourneyPromptChain\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"memory\":{\"type\":\"BaseChatMemory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"MidJourneyPromptChain\"},\"description\":\"MidJourneyPromptChain is a chain you can use to generate new MidJourney prompts.\",\"base_classes\":[\"Chain\",\"BaseCustomChain\",\"MidJourneyPromptChain\",\"ConversationChain\",\"LLMChain\"],\"display_name\":\"MidJourneyPromptChain\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false,\"output_type\":\"Chain\"},\"TimeTravelGuideChain\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"memory\":{\"type\":\"BaseChatMemory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"TimeTravelGuideChain\"},\"description\":\"Time travel guide chain.\",\"base_classes\":[\"Chain\",\"BaseCustomChain\",\"ConversationChain\",\"TimeTravelGuideChain\",\"LLMChain\"],\"display_name\":\"TimeTravelGuideChain\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false,\"output_type\":\"Chain\"},\"LLMChain\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"memory\":{\"type\":\"BaseMemory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"prompt\":{\"type\":\"BasePromptTemplate\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.chains import LLMChain\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import (\\n BaseLanguageModel,\\n BaseMemory,\\n BasePromptTemplate,\\n Text,\\n)\\n\\n\\nclass LLMChainComponent(CustomComponent):\\n display_name = \\\"LLMChain\\\"\\n description = \\\"Chain to run queries against LLMs\\\"\\n\\n def build_config(self):\\n return {\\n \\\"prompt\\\": {\\\"display_name\\\": \\\"Prompt\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n prompt: BasePromptTemplate,\\n llm: BaseLanguageModel,\\n memory: Optional[BaseMemory] = None,\\n ) -> Text:\\n runnable = LLMChain(prompt=prompt, llm=llm, memory=memory)\\n result_dict = runnable.invoke({})\\n output_key = runnable.output_key\\n result = result_dict[output_key]\\n self.status = result\\n return result\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Chain to run queries against LLMs\",\"base_classes\":[\"Text\",\"object\"],\"display_name\":\"LLMChain\",\"documentation\":\"\",\"custom_fields\":{\"prompt\":null,\"llm\":null,\"memory\":null},\"output_types\":[\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"SQLGenerator\":{\"template\":{\"db\":{\"type\":\"SQLDatabase\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"db\",\"display_name\":\"Database\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"inputs\":{\"type\":\"Text\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"inputs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"prompt\":{\"type\":\"PromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"The prompt must contain `{question}`.\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.chains import create_sql_query_chain\\nfrom langchain_community.utilities.sql_database import SQLDatabase\\nfrom langchain_core.prompts import PromptTemplate\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import BaseLanguageModel, Text\\n\\n\\nclass SQLGeneratorComponent(CustomComponent):\\n display_name = \\\"Natural Language to SQL\\\"\\n description = \\\"Generate SQL from natural language.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"db\\\": {\\\"display_name\\\": \\\"Database\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"prompt\\\": {\\n \\\"display_name\\\": \\\"Prompt\\\",\\n \\\"info\\\": \\\"The prompt must contain `{question}`.\\\",\\n },\\n \\\"top_k\\\": {\\n \\\"display_name\\\": \\\"Top K\\\",\\n \\\"info\\\": \\\"The number of results per select statement to return. If 0, no limit.\\\",\\n },\\n }\\n\\n def build(\\n self,\\n inputs: Text,\\n db: SQLDatabase,\\n llm: BaseLanguageModel,\\n top_k: int = 5,\\n prompt: Optional[PromptTemplate] = None,\\n ) -> Text:\\n if top_k > 0:\\n kwargs = {\\n \\\"k\\\": top_k,\\n }\\n if not prompt:\\n sql_query_chain = create_sql_query_chain(llm=llm, db=db, **kwargs)\\n else:\\n template = prompt.template if hasattr(prompt, \\\"template\\\") else prompt\\n # Check if {question} is in the prompt\\n if \\\"{question}\\\" not in template or \\\"question\\\" not in template.input_variables:\\n raise ValueError(\\\"Prompt must contain `{question}` to be used with Natural Language to SQL.\\\")\\n sql_query_chain = create_sql_query_chain(llm=llm, db=db, prompt=prompt, **kwargs)\\n query_writer = sql_query_chain | {\\\"query\\\": lambda x: x.replace(\\\"SQLQuery:\\\", \\\"\\\").strip()}\\n response = query_writer.invoke({\\\"question\\\": inputs})\\n query = response.get(\\\"query\\\")\\n self.status = query\\n return query\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"top_k\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":5,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_k\",\"display_name\":\"Top K\",\"advanced\":false,\"dynamic\":false,\"info\":\"The number of results per select statement to return. If 0, no limit.\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Generate SQL from natural language.\",\"base_classes\":[\"Text\",\"object\"],\"display_name\":\"Natural Language to SQL\",\"documentation\":\"\",\"custom_fields\":{\"inputs\":null,\"db\":null,\"llm\":null,\"top_k\":null,\"prompt\":null},\"output_types\":[\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"ConversationChain\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"memory\":{\"type\":\"BaseMemory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"Memory to load context from. If none is provided, a ConversationBufferMemory will be used.\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Callable, Optional, Union\\n\\nfrom langchain.chains import ConversationChain\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import BaseLanguageModel, BaseMemory, Chain, Text\\n\\n\\nclass ConversationChainComponent(CustomComponent):\\n display_name = \\\"ConversationChain\\\"\\n description = \\\"Chain to have a conversation and load context from memory.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"prompt\\\": {\\\"display_name\\\": \\\"Prompt\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"memory\\\": {\\n \\\"display_name\\\": \\\"Memory\\\",\\n \\\"info\\\": \\\"Memory to load context from. If none is provided, a ConversationBufferMemory will be used.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n inputs: str,\\n llm: BaseLanguageModel,\\n memory: Optional[BaseMemory] = None,\\n ) -> Union[Chain, Callable, Text]:\\n if memory is None:\\n chain = ConversationChain(llm=llm)\\n else:\\n chain = ConversationChain(llm=llm, memory=memory)\\n result = chain.invoke(inputs)\\n # result is an AIMessage which is a subclass of BaseMessage\\n # We need to check if it is a string or a BaseMessage\\n if hasattr(result, \\\"content\\\") and isinstance(result.content, str):\\n self.status = \\\"is message\\\"\\n result = result.content\\n elif isinstance(result, str):\\n self.status = \\\"is_string\\\"\\n result = result\\n else:\\n # is dict\\n result = result.get(\\\"response\\\")\\n self.status = result\\n return result\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"inputs\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"inputs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Chain to have a conversation and load context from memory.\",\"base_classes\":[\"Runnable\",\"Chain\",\"Generic\",\"Text\",\"RunnableSerializable\",\"Serializable\",\"object\",\"Callable\"],\"display_name\":\"ConversationChain\",\"documentation\":\"\",\"custom_fields\":{\"inputs\":null,\"llm\":null,\"memory\":null},\"output_types\":[\"Chain\",\"Callable\",\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true}},\"agents\":{\"ZeroShotAgent\":{\"template\":{\"callback_manager\":{\"type\":\"BaseCallbackManager\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"callback_manager\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"output_parser\":{\"type\":\"AgentOutputParser\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"tools\":{\"type\":\"BaseTool\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"tools\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"format_instructions\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":true,\"value\":\"Use the following format:\\n\\nQuestion: the input question you must answer\\nThought: you should always think about what to do\\nAction: the action to take, should be one of [{tool_names}]\\nAction Input: the input to the action\\nObservation: the result of the action\\n... (this Thought/Action/Action Input/Observation can repeat N times)\\nThought: I now know the final answer\\nFinal Answer: the final answer to the original input question\",\"fileTypes\":[],\"password\":false,\"name\":\"format_instructions\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"input_variables\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"Answer the following questions as best you can. You have access to the following tools:\",\"fileTypes\":[],\"password\":false,\"name\":\"prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"suffix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"Begin!\\n\\nQuestion: {input}\\nThought:{agent_scratchpad}\",\"fileTypes\":[],\"password\":false,\"name\":\"suffix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"ZeroShotAgent\"},\"description\":\"Construct an agent from an LLM and tools.\",\"base_classes\":[\"ZeroShotAgent\",\"Callable\",\"BaseSingleActionAgent\",\"Agent\"],\"display_name\":\"ZeroShotAgent\",\"documentation\":\"https://python.langchain.com/docs/modules/agents/how_to/custom_mrkl_agent\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"JsonAgent\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"toolkit\":{\"type\":\"JsonToolkit\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"toolkit\",\"display_name\":\"Toolkit\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langchain.agents import AgentExecutor, create_json_agent\\nfrom langflow.field_typing import (\\n BaseLanguageModel,\\n)\\nfrom langchain_community.agent_toolkits.json.toolkit import JsonToolkit\\n\\n\\nclass JsonAgentComponent(CustomComponent):\\n display_name = \\\"JsonAgent\\\"\\n description = \\\"Construct a json agent from an LLM and tools.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"toolkit\\\": {\\\"display_name\\\": \\\"Toolkit\\\"},\\n }\\n\\n def build(\\n self,\\n llm: BaseLanguageModel,\\n toolkit: JsonToolkit,\\n ) -> AgentExecutor:\\n return create_json_agent(llm=llm, toolkit=toolkit)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Construct a json agent from an LLM and tools.\",\"base_classes\":[\"Runnable\",\"Chain\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"AgentExecutor\",\"object\"],\"display_name\":\"JsonAgent\",\"documentation\":\"\",\"custom_fields\":{\"llm\":null,\"toolkit\":null},\"output_types\":[\"AgentExecutor\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"CSVAgent\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[\".csv\"],\"file_path\":\"\",\"password\":false,\"name\":\"path\",\"display_name\":\"Path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import BaseLanguageModel, AgentExecutor\\nfrom langchain_experimental.agents.agent_toolkits.csv.base import create_csv_agent\\n\\n\\nclass CSVAgentComponent(CustomComponent):\\n display_name = \\\"CSVAgent\\\"\\n description = \\\"Construct a CSV agent from a CSV and tools.\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/agents/toolkits/csv\\\"\\n\\n def build_config(self):\\n return {\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\", \\\"type\\\": BaseLanguageModel},\\n \\\"path\\\": {\\\"display_name\\\": \\\"Path\\\", \\\"field_type\\\": \\\"file\\\", \\\"suffixes\\\": [\\\".csv\\\"], \\\"file_types\\\": [\\\".csv\\\"]},\\n }\\n\\n def build(\\n self,\\n llm: BaseLanguageModel,\\n path: str,\\n ) -> AgentExecutor:\\n # Instantiate and return the CSV agent class with the provided llm and path\\n return create_csv_agent(llm=llm, path=path)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Construct a CSV agent from a CSV and tools.\",\"base_classes\":[\"Runnable\",\"Chain\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"AgentExecutor\",\"object\"],\"display_name\":\"CSVAgent\",\"documentation\":\"https://python.langchain.com/docs/modules/agents/toolkits/csv\",\"custom_fields\":{\"llm\":null,\"path\":null},\"output_types\":[\"AgentExecutor\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"VectorStoreAgent\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"vector_store_toolkit\":{\"type\":\"VectorStoreToolkit\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"vector_store_toolkit\",\"display_name\":\"Vector Store Info\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langchain.agents import AgentExecutor, create_vectorstore_agent\\nfrom langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreToolkit\\nfrom typing import Union, Callable\\nfrom langflow.field_typing import BaseLanguageModel\\n\\n\\nclass VectorStoreAgentComponent(CustomComponent):\\n display_name = \\\"VectorStoreAgent\\\"\\n description = \\\"Construct an agent from a Vector Store.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"vector_store_toolkit\\\": {\\\"display_name\\\": \\\"Vector Store Info\\\"},\\n }\\n\\n def build(\\n self,\\n llm: BaseLanguageModel,\\n vector_store_toolkit: VectorStoreToolkit,\\n ) -> Union[AgentExecutor, Callable]:\\n return create_vectorstore_agent(llm=llm, toolkit=vector_store_toolkit)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Construct an agent from a Vector Store.\",\"base_classes\":[\"Runnable\",\"Chain\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"AgentExecutor\",\"object\",\"Callable\"],\"display_name\":\"VectorStoreAgent\",\"documentation\":\"\",\"custom_fields\":{\"llm\":null,\"vector_store_toolkit\":null},\"output_types\":[\"AgentExecutor\",\"Callable\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"VectorStoreRouterAgent\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"vectorstoreroutertoolkit\":{\"type\":\"VectorStoreRouterToolkit\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"vectorstoreroutertoolkit\",\"display_name\":\"Vector Store Router Toolkit\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langchain_core.language_models.base import BaseLanguageModel\\nfrom langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreRouterToolkit\\nfrom langchain.agents import create_vectorstore_router_agent\\nfrom typing import Callable\\n\\n\\nclass VectorStoreRouterAgentComponent(CustomComponent):\\n display_name = \\\"VectorStoreRouterAgent\\\"\\n description = \\\"Construct an agent from a Vector Store Router.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"vectorstoreroutertoolkit\\\": {\\\"display_name\\\": \\\"Vector Store Router Toolkit\\\"},\\n }\\n\\n def build(self, llm: BaseLanguageModel, vectorstoreroutertoolkit: VectorStoreRouterToolkit) -> Callable:\\n return create_vectorstore_router_agent(llm=llm, toolkit=vectorstoreroutertoolkit)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Construct an agent from a Vector Store Router.\",\"base_classes\":[\"Callable\"],\"display_name\":\"VectorStoreRouterAgent\",\"documentation\":\"\",\"custom_fields\":{\"llm\":null,\"vectorstoreroutertoolkit\":null},\"output_types\":[\"Callable\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"SQLAgent\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom typing import Union, Callable\\nfrom langchain.agents import AgentExecutor\\nfrom langflow.field_typing import BaseLanguageModel\\nfrom langchain_community.agent_toolkits.sql.base import create_sql_agent\\nfrom langchain.sql_database import SQLDatabase\\nfrom langchain_community.agent_toolkits import SQLDatabaseToolkit\\n\\n\\nclass SQLAgentComponent(CustomComponent):\\n display_name = \\\"SQLAgent\\\"\\n description = \\\"Construct an SQL agent from an LLM and tools.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"database_uri\\\": {\\\"display_name\\\": \\\"Database URI\\\"},\\n \\\"verbose\\\": {\\\"display_name\\\": \\\"Verbose\\\", \\\"value\\\": False, \\\"advanced\\\": True},\\n }\\n\\n def build(\\n self,\\n llm: BaseLanguageModel,\\n database_uri: str,\\n verbose: bool = False,\\n ) -> Union[AgentExecutor, Callable]:\\n db = SQLDatabase.from_uri(database_uri)\\n toolkit = SQLDatabaseToolkit(db=db, llm=llm)\\n return create_sql_agent(llm=llm, toolkit=toolkit)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"database_uri\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"database_uri\",\"display_name\":\"Database URI\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"verbose\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"verbose\",\"display_name\":\"Verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Construct an SQL agent from an LLM and tools.\",\"base_classes\":[\"Runnable\",\"Chain\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"AgentExecutor\",\"object\",\"Callable\"],\"display_name\":\"SQLAgent\",\"documentation\":\"\",\"custom_fields\":{\"llm\":null,\"database_uri\":null,\"verbose\":null},\"output_types\":[\"AgentExecutor\",\"Callable\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"OpenAIConversationalAgent\":{\"template\":{\"memory\":{\"type\":\"BaseMemory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"system_message\":{\"type\":\"SystemMessagePromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"system_message\",\"display_name\":\"System Message\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"tools\":{\"type\":\"Tool\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tools\",\"display_name\":\"Tools\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import List, Optional\\n\\nfrom langchain.agents.agent import AgentExecutor\\nfrom langchain.agents.agent_toolkits.conversational_retrieval.openai_functions import _get_default_system_message\\nfrom langchain.agents.openai_functions_agent.base import OpenAIFunctionsAgent\\nfrom langchain.memory.token_buffer import ConversationTokenBufferMemory\\nfrom langchain.prompts import SystemMessagePromptTemplate\\nfrom langchain.prompts.chat import MessagesPlaceholder\\nfrom langchain.schema.memory import BaseMemory\\nfrom langchain.tools import Tool\\nfrom langchain_community.chat_models import ChatOpenAI\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing.range_spec import RangeSpec\\n\\n\\nclass ConversationalAgent(CustomComponent):\\n display_name: str = \\\"OpenAI Conversational Agent\\\"\\n description: str = \\\"Conversational Agent that can use OpenAI's function calling API\\\"\\n\\n def build_config(self):\\n openai_function_models = [\\n \\\"gpt-4-turbo-preview\\\",\\n \\\"gpt-4-0125-preview\\\",\\n \\\"gpt-4-1106-preview\\\",\\n \\\"gpt-4-vision-preview\\\",\\n \\\"gpt-3.5-turbo-0125\\\",\\n \\\"gpt-3.5-turbo-1106\\\",\\n ]\\n return {\\n \\\"tools\\\": {\\\"display_name\\\": \\\"Tools\\\"},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"system_message\\\": {\\\"display_name\\\": \\\"System Message\\\"},\\n \\\"max_token_limit\\\": {\\\"display_name\\\": \\\"Max Token Limit\\\"},\\n \\\"model_name\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"options\\\": openai_function_models,\\n \\\"value\\\": openai_function_models[0],\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"value\\\": 0.2,\\n \\\"range_spec\\\": RangeSpec(min=0, max=2, step=0.1),\\n },\\n }\\n\\n def build(\\n self,\\n model_name: str,\\n openai_api_key: str,\\n tools: List[Tool],\\n openai_api_base: Optional[str] = None,\\n memory: Optional[BaseMemory] = None,\\n system_message: Optional[SystemMessagePromptTemplate] = None,\\n max_token_limit: int = 2000,\\n temperature: float = 0.9,\\n ) -> AgentExecutor:\\n llm = ChatOpenAI(\\n model=model_name,\\n api_key=openai_api_key,\\n base_url=openai_api_base,\\n max_tokens=max_token_limit,\\n temperature=temperature,\\n )\\n if not memory:\\n memory_key = \\\"chat_history\\\"\\n memory = ConversationTokenBufferMemory(\\n memory_key=memory_key,\\n return_messages=True,\\n output_key=\\\"output\\\",\\n llm=llm,\\n max_token_limit=max_token_limit,\\n )\\n else:\\n memory_key = memory.memory_key # type: ignore\\n\\n _system_message = system_message or _get_default_system_message()\\n prompt = OpenAIFunctionsAgent.create_prompt(\\n system_message=_system_message, # type: ignore\\n extra_prompt_messages=[MessagesPlaceholder(variable_name=memory_key)],\\n )\\n agent = OpenAIFunctionsAgent(\\n llm=llm,\\n tools=tools,\\n prompt=prompt, # type: ignore\\n )\\n return AgentExecutor(\\n agent=agent,\\n tools=tools, # type: ignore\\n memory=memory,\\n verbose=True,\\n return_intermediate_steps=True,\\n handle_parsing_errors=True,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"max_token_limit\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":2000,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_token_limit\",\"display_name\":\"Max Token Limit\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"gpt-4-turbo-preview\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"gpt-4-turbo-preview\",\"gpt-4-0125-preview\",\"gpt-4-1106-preview\",\"gpt-4-vision-preview\",\"gpt-3.5-turbo-0125\",\"gpt-3.5-turbo-1106\"],\"name\":\"model_name\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"openai_api_base\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"openai_api_base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"openai_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"openai_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"temperature\":{\"type\":\"float\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.2,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":0.0,\"max\":2.0,\"step\":0.1},\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Conversational Agent that can use OpenAI's function calling API\",\"base_classes\":[\"Runnable\",\"Chain\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"AgentExecutor\",\"object\"],\"display_name\":\"OpenAI Conversational Agent\",\"documentation\":\"\",\"custom_fields\":{\"model_name\":null,\"openai_api_key\":null,\"tools\":null,\"openai_api_base\":null,\"memory\":null,\"system_message\":null,\"max_token_limit\":null,\"temperature\":null},\"output_types\":[\"AgentExecutor\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"AgentInitializer\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"Language Model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"memory\":{\"type\":\"BaseChatMemory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"tools\":{\"type\":\"Tool\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tools\",\"display_name\":\"Tools\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"agent\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"zero-shot-react-description\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"zero-shot-react-description\",\"react-docstore\",\"self-ask-with-search\",\"conversational-react-description\",\"chat-zero-shot-react-description\",\"chat-conversational-react-description\",\"structured-chat-zero-shot-react-description\",\"openai-functions\",\"openai-multi-functions\",\"JsonAgent\",\"CSVAgent\",\"VectorStoreAgent\",\"VectorStoreRouterAgent\",\"SQLAgent\"],\"name\":\"agent\",\"display_name\":\"Agent Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Callable, List, Optional, Union\\n\\nfrom langchain.agents import AgentExecutor, AgentType, initialize_agent, types\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import BaseChatMemory, BaseLanguageModel, Tool\\n\\n\\nclass AgentInitializerComponent(CustomComponent):\\n display_name: str = \\\"Agent Initializer\\\"\\n description: str = \\\"Initialize a Langchain Agent.\\\"\\n documentation: str = \\\"https://python.langchain.com/docs/modules/agents/agent_types/\\\"\\n\\n def build_config(self):\\n agents = list(types.AGENT_TO_CLASS.keys())\\n # field_type and required are optional\\n return {\\n \\\"agent\\\": {\\\"options\\\": agents, \\\"value\\\": agents[0], \\\"display_name\\\": \\\"Agent Type\\\"},\\n \\\"max_iterations\\\": {\\\"display_name\\\": \\\"Max Iterations\\\", \\\"value\\\": 10},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"tools\\\": {\\\"display_name\\\": \\\"Tools\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"Language Model\\\"},\\n \\\"code\\\": {\\\"advanced\\\": True},\\n }\\n\\n def build(\\n self,\\n agent: str,\\n llm: BaseLanguageModel,\\n tools: List[Tool],\\n max_iterations: int,\\n memory: Optional[BaseChatMemory] = None,\\n ) -> Union[AgentExecutor, Callable]:\\n agent = AgentType(agent)\\n if memory:\\n return initialize_agent(\\n tools=tools,\\n llm=llm,\\n agent=agent,\\n memory=memory,\\n return_intermediate_steps=True,\\n handle_parsing_errors=True,\\n max_iterations=max_iterations,\\n )\\n return initialize_agent(\\n tools=tools,\\n llm=llm,\\n agent=agent,\\n return_intermediate_steps=True,\\n handle_parsing_errors=True,\\n max_iterations=max_iterations,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"max_iterations\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":10,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_iterations\",\"display_name\":\"Max Iterations\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Initialize a Langchain Agent.\",\"base_classes\":[\"Runnable\",\"Chain\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"AgentExecutor\",\"object\",\"Callable\"],\"display_name\":\"Agent Initializer\",\"documentation\":\"https://python.langchain.com/docs/modules/agents/agent_types/\",\"custom_fields\":{\"agent\":null,\"llm\":null,\"tools\":null,\"max_iterations\":null,\"memory\":null},\"output_types\":[\"AgentExecutor\",\"Callable\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true}},\"memories\":{\"ConversationBufferMemory\":{\"template\":{\"chat_memory\":{\"type\":\"BaseChatMessageHistory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"ai_prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"AI\",\"fileTypes\":[],\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"human_prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"Human\",\"fileTypes\":[],\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"input_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"memory_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"fileTypes\":[],\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"output_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\"title_case\":false,\"input_types\":[\"Text\"]},\"return_messages\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"ConversationBufferMemory\"},\"description\":\"Buffer for storing conversation memory.\",\"base_classes\":[\"BaseMemory\",\"BaseChatMemory\",\"ConversationBufferMemory\",\"Serializable\"],\"display_name\":\"ConversationBufferMemory\",\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/buffer\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":true,\"beta\":false},\"ConversationBufferWindowMemory\":{\"template\":{\"chat_memory\":{\"type\":\"BaseChatMessageHistory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"ai_prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"AI\",\"fileTypes\":[],\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"human_prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"Human\",\"fileTypes\":[],\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"input_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"k\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":10,\"fileTypes\":[],\"password\":false,\"name\":\"k\",\"display_name\":\"Memory Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"memory_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"fileTypes\":[],\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"output_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\"title_case\":false,\"input_types\":[\"Text\"]},\"return_messages\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"ConversationBufferWindowMemory\"},\"description\":\"Buffer for storing conversation memory inside a limited size window.\",\"base_classes\":[\"ConversationBufferWindowMemory\",\"BaseMemory\",\"BaseChatMemory\",\"Serializable\"],\"display_name\":\"ConversationBufferWindowMemory\",\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/buffer_window\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":true,\"beta\":false},\"ConversationEntityMemory\":{\"template\":{\"chat_memory\":{\"type\":\"BaseChatMessageHistory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"entity_extraction_prompt\":{\"type\":\"BasePromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"entity_extraction_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"entity_store\":{\"type\":\"BaseEntityStore\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"entity_store\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"entity_summarization_prompt\":{\"type\":\"BasePromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"entity_summarization_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"ai_prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"AI\",\"fileTypes\":[],\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"chat_history_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"history\",\"fileTypes\":[],\"password\":false,\"name\":\"chat_history_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"entity_cache\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"entity_cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"human_prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"Human\",\"fileTypes\":[],\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"input_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"k\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":10,\"fileTypes\":[],\"password\":false,\"name\":\"k\",\"display_name\":\"Memory Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"output_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\"title_case\":false,\"input_types\":[\"Text\"]},\"return_messages\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"ConversationEntityMemory\"},\"description\":\"Entity extractor & summarizer memory.\",\"base_classes\":[\"ConversationEntityMemory\",\"BaseMemory\",\"BaseChatMemory\",\"Serializable\"],\"display_name\":\"ConversationEntityMemory\",\"documentation\":\"https://python.langchain.com/docs/modules/memory/integrations/entity_memory_with_sqlite\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":true,\"beta\":false},\"ConversationKGMemory\":{\"template\":{\"chat_memory\":{\"type\":\"BaseChatMessageHistory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"entity_extraction_prompt\":{\"type\":\"BasePromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"entity_extraction_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"kg\":{\"type\":\"NetworkxEntityGraph\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"kg\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"knowledge_extraction_prompt\":{\"type\":\"BasePromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"knowledge_extraction_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"summary_message_cls\":{\"type\":\"Type[langchain_core.messages.base.BaseMessage]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"summary_message_cls\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"ai_prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"AI\",\"fileTypes\":[],\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"human_prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"Human\",\"fileTypes\":[],\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"input_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"k\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":10,\"fileTypes\":[],\"password\":false,\"name\":\"k\",\"display_name\":\"Memory Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"memory_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"fileTypes\":[],\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"output_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\"title_case\":false,\"input_types\":[\"Text\"]},\"return_messages\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"ConversationKGMemory\"},\"description\":\"Knowledge graph conversation memory.\",\"base_classes\":[\"BaseChatMemory\",\"BaseMemory\",\"ConversationKGMemory\",\"Serializable\"],\"display_name\":\"ConversationKGMemory\",\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/kg\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":true,\"beta\":false},\"ConversationSummaryMemory\":{\"template\":{\"chat_memory\":{\"type\":\"BaseChatMessageHistory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"prompt\":{\"type\":\"BasePromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"summary_message_cls\":{\"type\":\"Type[langchain_core.messages.base.BaseMessage]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"summary_message_cls\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"ai_prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"AI\",\"fileTypes\":[],\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"buffer\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"password\":false,\"name\":\"buffer\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"human_prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"Human\",\"fileTypes\":[],\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"input_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"memory_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"fileTypes\":[],\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"output_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\"title_case\":false,\"input_types\":[\"Text\"]},\"return_messages\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"ConversationSummaryMemory\"},\"description\":\"Conversation summarizer to chat memory.\",\"base_classes\":[\"BaseChatMemory\",\"Serializable\",\"ConversationSummaryMemory\",\"BaseMemory\",\"SummarizerMixin\"],\"display_name\":\"ConversationSummaryMemory\",\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/summary\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":true,\"beta\":false},\"MongoDBChatMessageHistory\":{\"template\":{\"collection_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"message_store\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"collection_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"connection_string\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"connection_string\",\"advanced\":false,\"dynamic\":false,\"info\":\"MongoDB connection string (e.g mongodb://mongo_user:password123@mongo:27017)\",\"title_case\":false,\"input_types\":[\"Text\"]},\"database_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"database_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"session_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"session_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"MongoDBChatMessageHistory\"},\"description\":\"Memory store with MongoDB\",\"base_classes\":[\"BaseChatMessageHistory\",\"MongoDBChatMessageHistory\"],\"display_name\":\"MongoDBChatMessageHistory\",\"documentation\":\"https://python.langchain.com/docs/modules/memory/integrations/mongodb_chat_message_history\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":true,\"beta\":false},\"MotorheadMemory\":{\"template\":{\"chat_memory\":{\"type\":\"BaseChatMessageHistory\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"api_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"client_id\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"client_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"context\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"context\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"input_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"memory_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"fileTypes\":[],\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"output_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\"title_case\":false,\"input_types\":[\"Text\"]},\"return_messages\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"session_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"session_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"timeout\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"url\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"https://api.getmetal.io/v1/motorhead\",\"fileTypes\":[],\"password\":false,\"name\":\"url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"MotorheadMemory\"},\"description\":\"Chat message memory backed by Motorhead service.\",\"base_classes\":[\"MotorheadMemory\",\"BaseChatMemory\",\"BaseMemory\",\"Serializable\"],\"display_name\":\"MotorheadMemory\",\"documentation\":\"https://python.langchain.com/docs/integrations/memory/motorhead_memory\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":true,\"beta\":false},\"PostgresChatMessageHistory\":{\"template\":{\"connection_string\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"postgresql://postgres:mypassword@localhost/chat_history\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"connection_string\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"session_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"session_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"table_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"message_store\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"table_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"PostgresChatMessageHistory\"},\"description\":\"Memory store with Postgres\",\"base_classes\":[\"BaseChatMessageHistory\",\"PostgresChatMessageHistory\"],\"display_name\":\"PostgresChatMessageHistory\",\"documentation\":\"https://python.langchain.com/docs/modules/memory/integrations/postgres_chat_message_history\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":true,\"beta\":false},\"VectorStoreRetrieverMemory\":{\"template\":{\"retriever\":{\"type\":\"VectorStoreRetriever\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"exclude_input_keys\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"exclude_input_keys\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"input_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"memory_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"fileTypes\":[],\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"return_docs\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"return_docs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"return_messages\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"VectorStoreRetrieverMemory\"},\"description\":\"VectorStoreRetriever-backed memory.\",\"base_classes\":[\"BaseMemory\",\"VectorStoreRetrieverMemory\",\"Serializable\"],\"display_name\":\"VectorStoreRetrieverMemory\",\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/vectorstore_retriever_memory\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":true,\"beta\":false}},\"tools\":{\"Calculator\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"Calculator\"},\"description\":\"Useful for when you need to answer questions about math.\",\"base_classes\":[\"BaseTool\",\"Tool\"],\"display_name\":\"Calculator\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"Search\":{\"template\":{\"aiosession\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"aiosession\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"serpapi_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"serpapi_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"Search\"},\"description\":\"A search engine. Useful for when you need to answer questions about current events. Input should be a search query.\",\"base_classes\":[\"BaseTool\",\"Tool\"],\"display_name\":\"Search\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"Tool\":{\"template\":{\"func\":{\"type\":\"Callable\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"func\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"description\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"description\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"return_direct\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_direct\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"Tool\"},\"description\":\"Converts a chain, agent or function into a tool.\",\"base_classes\":[\"BaseTool\",\"Tool\"],\"display_name\":\"Tool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"PythonFunctionTool\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"\\ndef python_function(text: str) -> str:\\n \\\"\\\"\\\"This is a default python function that returns the input text\\\"\\\"\\\"\\n return text\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"description\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"description\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"return_direct\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_direct\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"PythonFunctionTool\"},\"description\":\"Python function to be executed.\",\"base_classes\":[\"BaseTool\",\"Tool\"],\"display_name\":\"PythonFunctionTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"PythonFunction\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"\\ndef python_function(text: str) -> str:\\n \\\"\\\"\\\"This is a default python function that returns the input text\\\"\\\"\\\"\\n return text\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"PythonFunction\"},\"description\":\"Python function to be executed.\",\"base_classes\":[\"Callable\"],\"display_name\":\"PythonFunction\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"JsonSpec\":{\"template\":{\"path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".json\",\".yaml\",\".yml\"],\"file_path\":\"\",\"password\":false,\"name\":\"path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"max_value_length\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_value_length\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"JsonSpec\"},\"description\":\"\",\"base_classes\":[\"JsonSpec\",\"BaseTool\",\"Tool\"],\"display_name\":\"JsonSpec\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"BingSearchRun\":{\"template\":{\"api_wrapper\":{\"type\":\"BingSearchAPIWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_validation_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_validation_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"BingSearchRun\"},\"description\":\"\",\"base_classes\":[\"Runnable\",\"BingSearchRun\",\"Generic\",\"BaseTool\",\"RunnableSerializable\",\"Tool\",\"Serializable\",\"object\"],\"display_name\":\"BingSearchRun\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"GoogleSearchResults\":{\"template\":{\"api_wrapper\":{\"type\":\"GoogleSearchAPIWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_validation_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_validation_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"num_results\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":4,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"num_results\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"GoogleSearchResults\"},\"description\":\"\",\"base_classes\":[\"GoogleSearchResults\",\"Runnable\",\"Generic\",\"BaseTool\",\"RunnableSerializable\",\"Tool\",\"Serializable\",\"object\"],\"display_name\":\"GoogleSearchResults\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"GoogleSearchRun\":{\"template\":{\"api_wrapper\":{\"type\":\"GoogleSearchAPIWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_validation_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_validation_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"GoogleSearchRun\"},\"description\":\"\",\"base_classes\":[\"Runnable\",\"Generic\",\"BaseTool\",\"RunnableSerializable\",\"Tool\",\"GoogleSearchRun\",\"Serializable\",\"object\"],\"display_name\":\"GoogleSearchRun\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"GoogleSerperRun\":{\"template\":{\"api_wrapper\":{\"type\":\"GoogleSerperAPIWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_validation_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_validation_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"GoogleSerperRun\"},\"description\":\"\",\"base_classes\":[\"Runnable\",\"Generic\",\"BaseTool\",\"RunnableSerializable\",\"Tool\",\"Serializable\",\"GoogleSerperRun\",\"object\"],\"display_name\":\"GoogleSerperRun\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"InfoSQLDatabaseTool\":{\"template\":{\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"db\":{\"type\":\"SQLDatabase\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"db\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_validation_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_validation_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"InfoSQLDatabaseTool\"},\"description\":\"\",\"base_classes\":[\"InfoSQLDatabaseTool\",\"Runnable\",\"BaseSQLDatabaseTool\",\"Generic\",\"BaseTool\",\"RunnableSerializable\",\"Tool\",\"Serializable\",\"object\"],\"display_name\":\"InfoSQLDatabaseTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"JsonGetValueTool\":{\"template\":{\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"spec\":{\"type\":\"JsonSpec\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"spec\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_validation_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_validation_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"JsonGetValueTool\"},\"description\":\"\",\"base_classes\":[\"Runnable\",\"Generic\",\"BaseTool\",\"RunnableSerializable\",\"Tool\",\"Serializable\",\"object\",\"JsonGetValueTool\"],\"display_name\":\"JsonGetValueTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"JsonListKeysTool\":{\"template\":{\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"spec\":{\"type\":\"JsonSpec\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"spec\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_validation_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_validation_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"JsonListKeysTool\"},\"description\":\"\",\"base_classes\":[\"Runnable\",\"Generic\",\"BaseTool\",\"RunnableSerializable\",\"Tool\",\"Serializable\",\"object\",\"JsonListKeysTool\"],\"display_name\":\"JsonListKeysTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"ListSQLDatabaseTool\":{\"template\":{\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"db\":{\"type\":\"SQLDatabase\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"db\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_validation_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_validation_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"ListSQLDatabaseTool\"},\"description\":\"\",\"base_classes\":[\"Runnable\",\"BaseSQLDatabaseTool\",\"Generic\",\"BaseTool\",\"RunnableSerializable\",\"Tool\",\"Serializable\",\"object\",\"ListSQLDatabaseTool\"],\"display_name\":\"ListSQLDatabaseTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"QuerySQLDataBaseTool\":{\"template\":{\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"db\":{\"type\":\"SQLDatabase\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"db\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_validation_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_validation_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"QuerySQLDataBaseTool\"},\"description\":\"\",\"base_classes\":[\"Runnable\",\"BaseSQLDatabaseTool\",\"Generic\",\"BaseTool\",\"RunnableSerializable\",\"Tool\",\"QuerySQLDataBaseTool\",\"Serializable\",\"object\"],\"display_name\":\"QuerySQLDataBaseTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"RequestsDeleteTool\":{\"template\":{\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"requests_wrapper\":{\"type\":\"GenericRequestsWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_validation_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_validation_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"RequestsDeleteTool\"},\"description\":\"\",\"base_classes\":[\"Runnable\",\"Generic\",\"BaseTool\",\"RunnableSerializable\",\"Tool\",\"Serializable\",\"object\",\"RequestsDeleteTool\",\"BaseRequestsTool\"],\"display_name\":\"RequestsDeleteTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"RequestsGetTool\":{\"template\":{\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"requests_wrapper\":{\"type\":\"GenericRequestsWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_validation_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_validation_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"RequestsGetTool\"},\"description\":\"\",\"base_classes\":[\"Runnable\",\"Generic\",\"BaseTool\",\"RunnableSerializable\",\"Tool\",\"Serializable\",\"object\",\"RequestsGetTool\",\"BaseRequestsTool\"],\"display_name\":\"RequestsGetTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"RequestsPatchTool\":{\"template\":{\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"requests_wrapper\":{\"type\":\"GenericRequestsWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_validation_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_validation_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"RequestsPatchTool\"},\"description\":\"\",\"base_classes\":[\"Runnable\",\"RequestsPatchTool\",\"Generic\",\"BaseTool\",\"RunnableSerializable\",\"Tool\",\"Serializable\",\"object\",\"BaseRequestsTool\"],\"display_name\":\"RequestsPatchTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"RequestsPostTool\":{\"template\":{\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"requests_wrapper\":{\"type\":\"GenericRequestsWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_validation_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_validation_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"RequestsPostTool\"},\"description\":\"\",\"base_classes\":[\"Runnable\",\"Generic\",\"BaseTool\",\"RunnableSerializable\",\"Tool\",\"Serializable\",\"RequestsPostTool\",\"object\",\"BaseRequestsTool\"],\"display_name\":\"RequestsPostTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"RequestsPutTool\":{\"template\":{\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"requests_wrapper\":{\"type\":\"GenericRequestsWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_validation_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_validation_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"RequestsPutTool\"},\"description\":\"\",\"base_classes\":[\"RequestsPutTool\",\"Runnable\",\"Generic\",\"BaseTool\",\"RunnableSerializable\",\"Tool\",\"Serializable\",\"object\",\"BaseRequestsTool\"],\"display_name\":\"RequestsPutTool\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"WikipediaQueryRun\":{\"template\":{\"api_wrapper\":{\"type\":\"WikipediaAPIWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_validation_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_validation_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"WikipediaQueryRun\"},\"description\":\"\",\"base_classes\":[\"Runnable\",\"Generic\",\"BaseTool\",\"RunnableSerializable\",\"Tool\",\"WikipediaQueryRun\",\"Serializable\",\"object\"],\"display_name\":\"WikipediaQueryRun\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"WolframAlphaQueryRun\":{\"template\":{\"api_wrapper\":{\"type\":\"WolframAlphaAPIWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"args_schema\":{\"type\":\"Type[pydantic.v1.main.BaseModel]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"callbacks\":{\"type\":\"langchain_core.callbacks.base.BaseCallbackHandler\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_tool_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"handle_validation_error\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"handle_validation_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"WolframAlphaQueryRun\"},\"description\":\"\",\"base_classes\":[\"Runnable\",\"WolframAlphaQueryRun\",\"Generic\",\"BaseTool\",\"RunnableSerializable\",\"Tool\",\"Serializable\",\"object\"],\"display_name\":\"WolframAlphaQueryRun\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false}},\"toolkits\":{\"JsonToolkit\":{\"template\":{\"spec\":{\"type\":\"JsonSpec\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"spec\",\"display_name\":\"Spec\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langchain_community.tools.json.tool import JsonSpec\\nfrom langchain_community.agent_toolkits.json.toolkit import JsonToolkit\\n\\n\\nclass JsonToolkitComponent(CustomComponent):\\n display_name = \\\"JsonToolkit\\\"\\n description = \\\"Toolkit for interacting with a JSON spec.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"spec\\\": {\\\"display_name\\\": \\\"Spec\\\", \\\"type\\\": JsonSpec},\\n }\\n\\n def build(self, spec: JsonSpec) -> JsonToolkit:\\n return JsonToolkit(spec=spec)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Toolkit for interacting with a JSON spec.\",\"base_classes\":[\"BaseToolkit\",\"JsonToolkit\"],\"display_name\":\"JsonToolkit\",\"documentation\":\"\",\"custom_fields\":{\"spec\":null},\"output_types\":[\"JsonToolkit\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"OpenAPIToolkit\":{\"template\":{\"json_agent\":{\"type\":\"AgentExecutor\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"json_agent\",\"display_name\":\"JSON Agent\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"requests_wrapper\":{\"type\":\"TextRequestsWrapper\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"requests_wrapper\",\"display_name\":\"Text Requests Wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langchain_community.agent_toolkits.openapi.toolkit import BaseToolkit, OpenAPIToolkit\\nfrom langchain_community.utilities.requests import TextRequestsWrapper\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import AgentExecutor\\n\\n\\nclass OpenAPIToolkitComponent(CustomComponent):\\n display_name = \\\"OpenAPIToolkit\\\"\\n description = \\\"Toolkit for interacting with an OpenAPI API.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"json_agent\\\": {\\\"display_name\\\": \\\"JSON Agent\\\"},\\n \\\"requests_wrapper\\\": {\\\"display_name\\\": \\\"Text Requests Wrapper\\\"},\\n }\\n\\n def build(\\n self,\\n json_agent: AgentExecutor,\\n requests_wrapper: TextRequestsWrapper,\\n ) -> BaseToolkit:\\n return OpenAPIToolkit(json_agent=json_agent, requests_wrapper=requests_wrapper)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Toolkit for interacting with an OpenAPI API.\",\"base_classes\":[\"BaseToolkit\"],\"display_name\":\"OpenAPIToolkit\",\"documentation\":\"\",\"custom_fields\":{\"json_agent\":null,\"requests_wrapper\":null},\"output_types\":[\"BaseToolkit\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"VectorStoreInfo\":{\"template\":{\"vectorstore\":{\"type\":\"VectorStore\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"vectorstore\",\"display_name\":\"VectorStore\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Callable, Union\\n\\nfrom langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo\\nfrom langchain_community.vectorstores import VectorStore\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass VectorStoreInfoComponent(CustomComponent):\\n display_name = \\\"VectorStoreInfo\\\"\\n description = \\\"Information about a VectorStore\\\"\\n\\n def build_config(self):\\n return {\\n \\\"vectorstore\\\": {\\\"display_name\\\": \\\"VectorStore\\\"},\\n \\\"description\\\": {\\\"display_name\\\": \\\"Description\\\", \\\"multiline\\\": True},\\n \\\"name\\\": {\\\"display_name\\\": \\\"Name\\\"},\\n }\\n\\n def build(\\n self,\\n vectorstore: VectorStore,\\n description: str,\\n name: str,\\n ) -> Union[VectorStoreInfo, Callable]:\\n return VectorStoreInfo(vectorstore=vectorstore, description=description, name=name)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"description\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"description\",\"display_name\":\"Description\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"name\",\"display_name\":\"Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Information about a VectorStore\",\"base_classes\":[\"Callable\",\"VectorStoreInfo\"],\"display_name\":\"VectorStoreInfo\",\"documentation\":\"\",\"custom_fields\":{\"vectorstore\":null,\"description\":null,\"name\":null},\"output_types\":[\"VectorStoreInfo\",\"Callable\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"VectorStoreRouterToolkit\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"vectorstores\":{\"type\":\"VectorStoreInfo\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"vectorstores\",\"display_name\":\"Vector Stores\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom typing import List, Union\\nfrom langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreRouterToolkit\\nfrom langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo\\nfrom langflow.field_typing import BaseLanguageModel, Tool\\n\\n\\nclass VectorStoreRouterToolkitComponent(CustomComponent):\\n display_name = \\\"VectorStoreRouterToolkit\\\"\\n description = \\\"Toolkit for routing between Vector Stores.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"vectorstores\\\": {\\\"display_name\\\": \\\"Vector Stores\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n }\\n\\n def build(\\n self, vectorstores: List[VectorStoreInfo], llm: BaseLanguageModel\\n ) -> Union[Tool, VectorStoreRouterToolkit]:\\n print(\\\"vectorstores\\\", vectorstores)\\n print(\\\"llm\\\", llm)\\n return VectorStoreRouterToolkit(vectorstores=vectorstores, llm=llm)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Toolkit for routing between Vector Stores.\",\"base_classes\":[\"Runnable\",\"BaseToolkit\",\"Generic\",\"VectorStoreRouterToolkit\",\"BaseTool\",\"RunnableSerializable\",\"Tool\",\"Serializable\",\"object\"],\"display_name\":\"VectorStoreRouterToolkit\",\"documentation\":\"\",\"custom_fields\":{\"vectorstores\":null,\"llm\":null},\"output_types\":[\"Tool\",\"VectorStoreRouterToolkit\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"VectorStoreToolkit\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"vectorstore_info\":{\"type\":\"VectorStoreInfo\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"vectorstore_info\",\"display_name\":\"Vector Store Info\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreToolkit\\nfrom langchain.agents.agent_toolkits.vectorstore.toolkit import VectorStoreInfo\\nfrom langflow.field_typing import (\\n BaseLanguageModel,\\n)\\nfrom langflow.field_typing import (\\n Tool,\\n)\\nfrom typing import Union\\n\\n\\nclass VectorStoreToolkitComponent(CustomComponent):\\n display_name = \\\"VectorStoreToolkit\\\"\\n description = \\\"Toolkit for interacting with a Vector Store.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"vectorstore_info\\\": {\\\"display_name\\\": \\\"Vector Store Info\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n }\\n\\n def build(\\n self,\\n vectorstore_info: VectorStoreInfo,\\n llm: BaseLanguageModel,\\n ) -> Union[Tool, VectorStoreToolkit]:\\n return VectorStoreToolkit(vectorstore_info=vectorstore_info, llm=llm)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Toolkit for interacting with a Vector Store.\",\"base_classes\":[\"Runnable\",\"BaseToolkit\",\"Generic\",\"BaseTool\",\"RunnableSerializable\",\"Tool\",\"Serializable\",\"object\",\"VectorStoreToolkit\"],\"display_name\":\"VectorStoreToolkit\",\"documentation\":\"\",\"custom_fields\":{\"vectorstore_info\":null,\"llm\":null},\"output_types\":[\"Tool\",\"VectorStoreToolkit\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"Metaphor\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import List, Union\\n\\nfrom langchain.agents import tool\\nfrom langchain.agents.agent_toolkits.base import BaseToolkit\\nfrom langchain.tools import Tool\\nfrom metaphor_python import Metaphor # type: ignore\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass MetaphorToolkit(CustomComponent):\\n display_name: str = \\\"Metaphor\\\"\\n description: str = \\\"Metaphor Toolkit\\\"\\n documentation = \\\"https://python.langchain.com/docs/integrations/tools/metaphor_search\\\"\\n beta: bool = True\\n # api key should be password = True\\n field_config = {\\n \\\"metaphor_api_key\\\": {\\\"display_name\\\": \\\"Metaphor API Key\\\", \\\"password\\\": True},\\n \\\"code\\\": {\\\"advanced\\\": True},\\n }\\n\\n def build(\\n self,\\n metaphor_api_key: str,\\n use_autoprompt: bool = True,\\n search_num_results: int = 5,\\n similar_num_results: int = 5,\\n ) -> Union[Tool, BaseToolkit]:\\n # If documents, then we need to create a Vectara instance using .from_documents\\n client = Metaphor(api_key=metaphor_api_key)\\n\\n @tool\\n def search(query: str):\\n \\\"\\\"\\\"Call search engine with a query.\\\"\\\"\\\"\\n return client.search(query, use_autoprompt=use_autoprompt, num_results=search_num_results)\\n\\n @tool\\n def get_contents(ids: List[str]):\\n \\\"\\\"\\\"Get contents of a webpage.\\n\\n The ids passed in should be a list of ids as fetched from `search`.\\n \\\"\\\"\\\"\\n return client.get_contents(ids)\\n\\n @tool\\n def find_similar(url: str):\\n \\\"\\\"\\\"Get search results similar to a given URL.\\n\\n The url passed in should be a URL returned from `search`\\n \\\"\\\"\\\"\\n return client.find_similar(url, num_results=similar_num_results)\\n\\n return [search, get_contents, find_similar] # type: ignore\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"metaphor_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"metaphor_api_key\",\"display_name\":\"Metaphor API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"search_num_results\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":5,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"search_num_results\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"similar_num_results\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":5,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"similar_num_results\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"use_autoprompt\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"use_autoprompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Metaphor Toolkit\",\"base_classes\":[\"Runnable\",\"BaseToolkit\",\"Generic\",\"BaseTool\",\"RunnableSerializable\",\"Tool\",\"Serializable\",\"object\"],\"display_name\":\"Metaphor\",\"documentation\":\"https://python.langchain.com/docs/integrations/tools/metaphor_search\",\"custom_fields\":{\"metaphor_api_key\":null,\"use_autoprompt\":null,\"search_num_results\":null,\"similar_num_results\":null},\"output_types\":[\"Tool\",\"BaseToolkit\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true}},\"wrappers\":{\"TextRequestsWrapper\":{\"template\":{\"aiosession\":{\"type\":\"ClientSession\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"aiosession\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"auth\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"auth\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"headers\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"{\\\"Authorization\\\": \\\"Bearer \\\"}\",\"fileTypes\":[],\"password\":false,\"name\":\"headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"response_content_type\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":false,\"multiline\":false,\"value\":\"text\",\"fileTypes\":[],\"password\":false,\"name\":\"response_content_type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"TextRequestsWrapper\"},\"description\":\"Lightweight wrapper around requests library, with async support.\",\"base_classes\":[\"TextRequestsWrapper\",\"GenericRequestsWrapper\"],\"display_name\":\"TextRequestsWrapper\",\"documentation\":\"\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false}},\"embeddings\":{\"OpenAIEmbeddings\":{\"template\":{\"allowed_special\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":[],\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"allowed_special\",\"display_name\":\"Allowed Special\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"chunk_size\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1000,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chunk_size\",\"display_name\":\"Chunk Size\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"client\",\"display_name\":\"Client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Any, Callable, Dict, List, Optional, Union\\n\\nfrom langchain_openai.embeddings.base import OpenAIEmbeddings\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import NestedDict\\nfrom pydantic.v1.types import SecretStr\\n\\n\\nclass OpenAIEmbeddingsComponent(CustomComponent):\\n display_name = \\\"OpenAIEmbeddings\\\"\\n description = \\\"OpenAI embedding models\\\"\\n\\n def build_config(self):\\n return {\\n \\\"allowed_special\\\": {\\n \\\"display_name\\\": \\\"Allowed Special\\\",\\n \\\"advanced\\\": True,\\n \\\"field_type\\\": \\\"str\\\",\\n \\\"is_list\\\": True,\\n },\\n \\\"default_headers\\\": {\\n \\\"display_name\\\": \\\"Default Headers\\\",\\n \\\"advanced\\\": True,\\n \\\"field_type\\\": \\\"dict\\\",\\n },\\n \\\"default_query\\\": {\\n \\\"display_name\\\": \\\"Default Query\\\",\\n \\\"advanced\\\": True,\\n \\\"field_type\\\": \\\"NestedDict\\\",\\n },\\n \\\"disallowed_special\\\": {\\n \\\"display_name\\\": \\\"Disallowed Special\\\",\\n \\\"advanced\\\": True,\\n \\\"field_type\\\": \\\"str\\\",\\n \\\"is_list\\\": True,\\n },\\n \\\"chunk_size\\\": {\\\"display_name\\\": \\\"Chunk Size\\\", \\\"advanced\\\": True},\\n \\\"client\\\": {\\\"display_name\\\": \\\"Client\\\", \\\"advanced\\\": True},\\n \\\"deployment\\\": {\\\"display_name\\\": \\\"Deployment\\\", \\\"advanced\\\": True},\\n \\\"embedding_ctx_length\\\": {\\n \\\"display_name\\\": \\\"Embedding Context Length\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"max_retries\\\": {\\\"display_name\\\": \\\"Max Retries\\\", \\\"advanced\\\": True},\\n \\\"model\\\": {\\n \\\"display_name\\\": \\\"Model\\\",\\n \\\"advanced\\\": False,\\n \\\"options\\\": [\\\"text-embedding-3-small\\\", \\\"text-embedding-3-large\\\", \\\"text-embedding-ada-002\\\"],\\n },\\n \\\"model_kwargs\\\": {\\\"display_name\\\": \\\"Model Kwargs\\\", \\\"advanced\\\": True},\\n \\\"openai_api_base\\\": {\\\"display_name\\\": \\\"OpenAI API Base\\\", \\\"password\\\": True, \\\"advanced\\\": True},\\n \\\"openai_api_key\\\": {\\\"display_name\\\": \\\"OpenAI API Key\\\", \\\"password\\\": True},\\n \\\"openai_api_type\\\": {\\\"display_name\\\": \\\"OpenAI API Type\\\", \\\"advanced\\\": True, \\\"password\\\": True},\\n \\\"openai_api_version\\\": {\\n \\\"display_name\\\": \\\"OpenAI API Version\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"openai_organization\\\": {\\n \\\"display_name\\\": \\\"OpenAI Organization\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"openai_proxy\\\": {\\\"display_name\\\": \\\"OpenAI Proxy\\\", \\\"advanced\\\": True},\\n \\\"request_timeout\\\": {\\\"display_name\\\": \\\"Request Timeout\\\", \\\"advanced\\\": True},\\n \\\"show_progress_bar\\\": {\\n \\\"display_name\\\": \\\"Show Progress Bar\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"skip_empty\\\": {\\\"display_name\\\": \\\"Skip Empty\\\", \\\"advanced\\\": True},\\n \\\"tiktoken_model_name\\\": {\\\"display_name\\\": \\\"TikToken Model Name\\\"},\\n \\\"tikToken_enable\\\": {\\\"display_name\\\": \\\"TikToken Enable\\\", \\\"advanced\\\": True},\\n }\\n\\n def build(\\n self,\\n default_headers: Optional[Dict[str, str]] = None,\\n default_query: Optional[NestedDict] = {},\\n allowed_special: List[str] = [],\\n disallowed_special: List[str] = [\\\"all\\\"],\\n chunk_size: int = 1000,\\n client: Optional[Any] = None,\\n deployment: str = \\\"text-embedding-3-small\\\",\\n embedding_ctx_length: int = 8191,\\n max_retries: int = 6,\\n model: str = \\\"text-embedding-3-small\\\",\\n model_kwargs: NestedDict = {},\\n openai_api_base: Optional[str] = None,\\n openai_api_key: Optional[str] = \\\"\\\",\\n openai_api_type: Optional[str] = None,\\n openai_api_version: Optional[str] = None,\\n openai_organization: Optional[str] = None,\\n openai_proxy: Optional[str] = None,\\n request_timeout: Optional[float] = None,\\n show_progress_bar: bool = False,\\n skip_empty: bool = False,\\n tiktoken_enable: bool = True,\\n tiktoken_model_name: Optional[str] = None,\\n ) -> Union[OpenAIEmbeddings, Callable]:\\n # This is to avoid errors with Vector Stores (e.g Chroma)\\n if disallowed_special == [\\\"all\\\"]:\\n disallowed_special = \\\"all\\\" # type: ignore\\n\\n api_key = SecretStr(openai_api_key) if openai_api_key else None\\n\\n return OpenAIEmbeddings(\\n tiktoken_enabled=tiktoken_enable,\\n default_headers=default_headers,\\n default_query=default_query,\\n allowed_special=set(allowed_special),\\n disallowed_special=\\\"all\\\",\\n chunk_size=chunk_size,\\n client=client,\\n deployment=deployment,\\n embedding_ctx_length=embedding_ctx_length,\\n max_retries=max_retries,\\n model=model,\\n model_kwargs=model_kwargs,\\n base_url=openai_api_base,\\n api_key=api_key,\\n openai_api_type=openai_api_type,\\n api_version=openai_api_version,\\n organization=openai_organization,\\n openai_proxy=openai_proxy,\\n timeout=request_timeout,\\n show_progress_bar=show_progress_bar,\\n skip_empty=skip_empty,\\n tiktoken_model_name=tiktoken_model_name,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"default_headers\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"default_headers\",\"display_name\":\"Default Headers\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"default_query\":{\"type\":\"NestedDict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"default_query\",\"display_name\":\"Default Query\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"deployment\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"text-embedding-3-small\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"deployment\",\"display_name\":\"Deployment\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"disallowed_special\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":[\"all\"],\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"disallowed_special\",\"display_name\":\"Disallowed Special\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"embedding_ctx_length\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":8191,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"embedding_ctx_length\",\"display_name\":\"Embedding Context Length\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"max_retries\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":6,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_retries\",\"display_name\":\"Max Retries\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"text-embedding-3-small\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"text-embedding-3-small\",\"text-embedding-3-large\",\"text-embedding-ada-002\"],\"name\":\"model\",\"display_name\":\"Model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"model_kwargs\":{\"type\":\"NestedDict\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_kwargs\",\"display_name\":\"Model Kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"openai_api_base\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"openai_api_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"openai_api_type\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"openai_api_type\",\"display_name\":\"OpenAI API Type\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"openai_api_version\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"openai_api_version\",\"display_name\":\"OpenAI API Version\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"openai_organization\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"openai_proxy\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"request_timeout\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"request_timeout\",\"display_name\":\"Request Timeout\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"show_progress_bar\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"show_progress_bar\",\"display_name\":\"Show Progress Bar\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"skip_empty\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"skip_empty\",\"display_name\":\"Skip Empty\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"tiktoken_enable\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tiktoken_enable\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"tiktoken_model_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tiktoken_model_name\",\"display_name\":\"TikToken Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"OpenAI embedding models\",\"base_classes\":[\"Embeddings\",\"OpenAIEmbeddings\",\"Callable\"],\"display_name\":\"OpenAIEmbeddings\",\"documentation\":\"\",\"custom_fields\":{\"default_headers\":null,\"default_query\":null,\"allowed_special\":null,\"disallowed_special\":null,\"chunk_size\":null,\"client\":null,\"deployment\":null,\"embedding_ctx_length\":null,\"max_retries\":null,\"model\":null,\"model_kwargs\":null,\"openai_api_base\":null,\"openai_api_key\":null,\"openai_api_type\":null,\"openai_api_version\":null,\"openai_organization\":null,\"openai_proxy\":null,\"request_timeout\":null,\"show_progress_bar\":null,\"skip_empty\":null,\"tiktoken_enable\":null,\"tiktoken_model_name\":null},\"output_types\":[\"OpenAIEmbeddings\",\"Callable\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"CohereEmbeddings\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain_community.embeddings.cohere import CohereEmbeddings\\nfrom langflow import CustomComponent\\n\\n\\nclass CohereEmbeddingsComponent(CustomComponent):\\n display_name = \\\"CohereEmbeddings\\\"\\n description = \\\"Cohere embedding models.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"cohere_api_key\\\": {\\\"display_name\\\": \\\"Cohere API Key\\\", \\\"password\\\": True},\\n \\\"model\\\": {\\\"display_name\\\": \\\"Model\\\", \\\"default\\\": \\\"embed-english-v2.0\\\", \\\"advanced\\\": True},\\n \\\"truncate\\\": {\\\"display_name\\\": \\\"Truncate\\\", \\\"advanced\\\": True},\\n \\\"max_retries\\\": {\\\"display_name\\\": \\\"Max Retries\\\", \\\"advanced\\\": True},\\n \\\"user_agent\\\": {\\\"display_name\\\": \\\"User Agent\\\", \\\"advanced\\\": True},\\n }\\n\\n def build(\\n self,\\n request_timeout: Optional[float] = None,\\n cohere_api_key: str = \\\"\\\",\\n max_retries: Optional[int] = None,\\n model: str = \\\"embed-english-v2.0\\\",\\n truncate: Optional[str] = None,\\n user_agent: str = \\\"langchain\\\",\\n ) -> CohereEmbeddings:\\n return CohereEmbeddings( # type: ignore\\n max_retries=max_retries,\\n user_agent=user_agent,\\n request_timeout=request_timeout,\\n cohere_api_key=cohere_api_key,\\n model=model,\\n truncate=truncate,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"cohere_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"cohere_api_key\",\"display_name\":\"Cohere API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"max_retries\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_retries\",\"display_name\":\"Max Retries\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"embed-english-v2.0\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model\",\"display_name\":\"Model\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"request_timeout\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"truncate\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"truncate\",\"display_name\":\"Truncate\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"user_agent\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"langchain\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"user_agent\",\"display_name\":\"User Agent\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Cohere embedding models.\",\"base_classes\":[\"Embeddings\",\"CohereEmbeddings\"],\"display_name\":\"CohereEmbeddings\",\"documentation\":\"\",\"custom_fields\":{\"request_timeout\":null,\"cohere_api_key\":null,\"max_retries\":null,\"model\":null,\"truncate\":null,\"user_agent\":null},\"output_types\":[\"CohereEmbeddings\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"HuggingFaceEmbeddings\":{\"template\":{\"cache_folder\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"cache_folder\",\"display_name\":\"Cache Folder\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom typing import Optional, Dict\\nfrom langchain_community.embeddings.huggingface import HuggingFaceEmbeddings\\n\\n\\nclass HuggingFaceEmbeddingsComponent(CustomComponent):\\n display_name = \\\"HuggingFaceEmbeddings\\\"\\n description = \\\"HuggingFace sentence_transformers embedding models.\\\"\\n documentation = (\\n \\\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/sentence_transformers\\\"\\n )\\n\\n def build_config(self):\\n return {\\n \\\"cache_folder\\\": {\\\"display_name\\\": \\\"Cache Folder\\\", \\\"advanced\\\": True},\\n \\\"encode_kwargs\\\": {\\\"display_name\\\": \\\"Encode Kwargs\\\", \\\"advanced\\\": True, \\\"field_type\\\": \\\"dict\\\"},\\n \\\"model_kwargs\\\": {\\\"display_name\\\": \\\"Model Kwargs\\\", \\\"field_type\\\": \\\"dict\\\", \\\"advanced\\\": True},\\n \\\"model_name\\\": {\\\"display_name\\\": \\\"Model Name\\\"},\\n \\\"multi_process\\\": {\\\"display_name\\\": \\\"Multi Process\\\", \\\"advanced\\\": True},\\n }\\n\\n def build(\\n self,\\n cache_folder: Optional[str] = None,\\n encode_kwargs: Optional[Dict] = {},\\n model_kwargs: Optional[Dict] = {},\\n model_name: str = \\\"sentence-transformers/all-mpnet-base-v2\\\",\\n multi_process: bool = False,\\n ) -> HuggingFaceEmbeddings:\\n return HuggingFaceEmbeddings(\\n cache_folder=cache_folder,\\n encode_kwargs=encode_kwargs,\\n model_kwargs=model_kwargs,\\n model_name=model_name,\\n multi_process=multi_process,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"encode_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"encode_kwargs\",\"display_name\":\"Encode Kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_kwargs\",\"display_name\":\"Model Kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"sentence-transformers/all-mpnet-base-v2\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_name\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"multi_process\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"multi_process\",\"display_name\":\"Multi Process\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"HuggingFace sentence_transformers embedding models.\",\"base_classes\":[\"Embeddings\",\"HuggingFaceEmbeddings\"],\"display_name\":\"HuggingFaceEmbeddings\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/sentence_transformers\",\"custom_fields\":{\"cache_folder\":null,\"encode_kwargs\":null,\"model_kwargs\":null,\"model_name\":null,\"multi_process\":null},\"output_types\":[\"HuggingFaceEmbeddings\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"VertexAIEmbeddings\":{\"template\":{\"credentials\":{\"type\":\"file\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".json\"],\"file_path\":\"\",\"password\":false,\"name\":\"credentials\",\"display_name\":\"Credentials\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langchain_community.embeddings import VertexAIEmbeddings\\nfrom typing import Optional, List\\n\\n\\nclass VertexAIEmbeddingsComponent(CustomComponent):\\n display_name = \\\"VertexAIEmbeddings\\\"\\n description = \\\"Google Cloud VertexAI embedding models.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"credentials\\\": {\\n \\\"display_name\\\": \\\"Credentials\\\",\\n \\\"value\\\": \\\"\\\",\\n \\\"file_types\\\": [\\\".json\\\"],\\n \\\"field_type\\\": \\\"file\\\",\\n },\\n \\\"instance\\\": {\\n \\\"display_name\\\": \\\"instance\\\",\\n \\\"advanced\\\": True,\\n \\\"field_type\\\": \\\"dict\\\",\\n },\\n \\\"location\\\": {\\n \\\"display_name\\\": \\\"Location\\\",\\n \\\"value\\\": \\\"us-central1\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"max_output_tokens\\\": {\\\"display_name\\\": \\\"Max Output Tokens\\\", \\\"value\\\": 128},\\n \\\"max_retries\\\": {\\n \\\"display_name\\\": \\\"Max Retries\\\",\\n \\\"value\\\": 6,\\n \\\"advanced\\\": True,\\n },\\n \\\"model_name\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"value\\\": \\\"textembedding-gecko\\\",\\n },\\n \\\"n\\\": {\\\"display_name\\\": \\\"N\\\", \\\"value\\\": 1, \\\"advanced\\\": True},\\n \\\"project\\\": {\\\"display_name\\\": \\\"Project\\\", \\\"advanced\\\": True},\\n \\\"request_parallelism\\\": {\\n \\\"display_name\\\": \\\"Request Parallelism\\\",\\n \\\"value\\\": 5,\\n \\\"advanced\\\": True,\\n },\\n \\\"stop\\\": {\\\"display_name\\\": \\\"Stop\\\", \\\"advanced\\\": True},\\n \\\"streaming\\\": {\\n \\\"display_name\\\": \\\"Streaming\\\",\\n \\\"value\\\": False,\\n \\\"advanced\\\": True,\\n },\\n \\\"temperature\\\": {\\\"display_name\\\": \\\"Temperature\\\", \\\"value\\\": 0.0},\\n \\\"top_k\\\": {\\\"display_name\\\": \\\"Top K\\\", \\\"value\\\": 40, \\\"advanced\\\": True},\\n \\\"top_p\\\": {\\\"display_name\\\": \\\"Top P\\\", \\\"value\\\": 0.95, \\\"advanced\\\": True},\\n }\\n\\n def build(\\n self,\\n instance: Optional[str] = None,\\n credentials: Optional[str] = None,\\n location: str = \\\"us-central1\\\",\\n max_output_tokens: int = 128,\\n max_retries: int = 6,\\n model_name: str = \\\"textembedding-gecko\\\",\\n n: int = 1,\\n project: Optional[str] = None,\\n request_parallelism: int = 5,\\n stop: Optional[List[str]] = None,\\n streaming: bool = False,\\n temperature: float = 0.0,\\n top_k: int = 40,\\n top_p: float = 0.95,\\n ) -> VertexAIEmbeddings:\\n return VertexAIEmbeddings(\\n instance=instance,\\n credentials=credentials,\\n location=location,\\n max_output_tokens=max_output_tokens,\\n max_retries=max_retries,\\n model_name=model_name,\\n n=n,\\n project=project,\\n request_parallelism=request_parallelism,\\n stop=stop,\\n streaming=streaming,\\n temperature=temperature,\\n top_k=top_k,\\n top_p=top_p,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"instance\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"instance\",\"display_name\":\"instance\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"location\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"us-central1\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"location\",\"display_name\":\"Location\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"max_output_tokens\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":128,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_output_tokens\",\"display_name\":\"Max Output Tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"max_retries\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":6,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_retries\",\"display_name\":\"Max Retries\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"textembedding-gecko\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_name\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"n\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"n\",\"display_name\":\"N\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"project\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"project\",\"display_name\":\"Project\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"request_parallelism\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":5,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"request_parallelism\",\"display_name\":\"Request Parallelism\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"stop\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"stop\",\"display_name\":\"Stop\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"streaming\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"streaming\",\"display_name\":\"Streaming\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"temperature\":{\"type\":\"float\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.0,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"top_k\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":40,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_k\",\"display_name\":\"Top K\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"top_p\":{\"type\":\"float\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.95,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_p\",\"display_name\":\"Top P\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Google Cloud VertexAI embedding models.\",\"base_classes\":[\"_VertexAICommon\",\"Embeddings\",\"_VertexAIBase\",\"VertexAIEmbeddings\"],\"display_name\":\"VertexAIEmbeddings\",\"documentation\":\"\",\"custom_fields\":{\"instance\":null,\"credentials\":null,\"location\":null,\"max_output_tokens\":null,\"max_retries\":null,\"model_name\":null,\"n\":null,\"project\":null,\"request_parallelism\":null,\"stop\":null,\"streaming\":null,\"temperature\":null,\"top_k\":null,\"top_p\":null},\"output_types\":[\"VertexAIEmbeddings\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"OllamaEmbeddings\":{\"template\":{\"base_url\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"http://localhost:11434\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"base_url\",\"display_name\":\"Ollama Base URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langflow import CustomComponent\\nfrom langchain.embeddings.base import Embeddings\\nfrom langchain_community.embeddings import OllamaEmbeddings\\n\\n\\nclass OllamaEmbeddingsComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing an Embeddings Model using Ollama.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"Ollama Embeddings\\\"\\n description: str = \\\"Embeddings model from Ollama.\\\"\\n documentation = \\\"https://python.langchain.com/docs/integrations/text_embedding/ollama\\\"\\n beta = True\\n\\n def build_config(self):\\n return {\\n \\\"model\\\": {\\n \\\"display_name\\\": \\\"Ollama Model\\\",\\n },\\n \\\"base_url\\\": {\\\"display_name\\\": \\\"Ollama Base URL\\\"},\\n \\\"temperature\\\": {\\\"display_name\\\": \\\"Model Temperature\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model: str = \\\"llama2\\\",\\n base_url: str = \\\"http://localhost:11434\\\",\\n temperature: Optional[float] = None,\\n ) -> Embeddings:\\n try:\\n output = OllamaEmbeddings(model=model, base_url=base_url, temperature=temperature) # type: ignore\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to Ollama API.\\\") from e\\n return output\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"llama2\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model\",\"display_name\":\"Ollama Model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Model Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Embeddings model from Ollama.\",\"base_classes\":[\"Embeddings\"],\"display_name\":\"Ollama Embeddings\",\"documentation\":\"https://python.langchain.com/docs/integrations/text_embedding/ollama\",\"custom_fields\":{\"model\":null,\"base_url\":null,\"temperature\":null},\"output_types\":[\"Embeddings\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"AmazonBedrockEmbeddings\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.embeddings.base import Embeddings\\nfrom langchain_community.embeddings import BedrockEmbeddings\\n\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass AmazonBedrockEmeddingsComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing an Embeddings Model using Amazon Bedrock.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"Amazon Bedrock Embeddings\\\"\\n description: str = \\\"Embeddings model from Amazon Bedrock.\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\\\"\\n beta = True\\n\\n def build_config(self):\\n return {\\n \\\"model_id\\\": {\\n \\\"display_name\\\": \\\"Model Id\\\",\\n \\\"options\\\": [\\\"amazon.titan-embed-text-v1\\\"],\\n },\\n \\\"credentials_profile_name\\\": {\\\"display_name\\\": \\\"Credentials Profile Name\\\"},\\n \\\"endpoint_url\\\": {\\\"display_name\\\": \\\"Bedrock Endpoint URL\\\"},\\n \\\"region_name\\\": {\\\"display_name\\\": \\\"AWS Region\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model_id: str = \\\"amazon.titan-embed-text-v1\\\",\\n credentials_profile_name: Optional[str] = None,\\n endpoint_url: Optional[str] = None,\\n region_name: Optional[str] = None,\\n ) -> Embeddings:\\n try:\\n output = BedrockEmbeddings(\\n credentials_profile_name=credentials_profile_name,\\n model_id=model_id,\\n endpoint_url=endpoint_url,\\n region_name=region_name,\\n ) # type: ignore\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to AmazonBedrock API.\\\") from e\\n return output\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"credentials_profile_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"credentials_profile_name\",\"display_name\":\"Credentials Profile Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"endpoint_url\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"endpoint_url\",\"display_name\":\"Bedrock Endpoint URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"model_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"amazon.titan-embed-text-v1\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"amazon.titan-embed-text-v1\"],\"name\":\"model_id\",\"display_name\":\"Model Id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"region_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"region_name\",\"display_name\":\"AWS Region\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Embeddings model from Amazon Bedrock.\",\"base_classes\":[\"Embeddings\"],\"display_name\":\"Amazon Bedrock Embeddings\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/bedrock\",\"custom_fields\":{\"model_id\":null,\"credentials_profile_name\":null,\"endpoint_url\":null,\"region_name\":null},\"output_types\":[\"Embeddings\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"AzureOpenAIEmbeddings\":{\"template\":{\"api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_key\",\"display_name\":\"API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"api_version\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"2023-08-01-preview\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"2022-12-01\",\"2023-03-15-preview\",\"2023-05-15\",\"2023-06-01-preview\",\"2023-07-01-preview\",\"2023-08-01-preview\"],\"name\":\"api_version\",\"display_name\":\"API Version\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"azure_deployment\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"azure_deployment\",\"display_name\":\"Deployment Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"azure_endpoint\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"azure_endpoint\",\"display_name\":\"Azure Endpoint\",\"advanced\":false,\"dynamic\":false,\"info\":\"Your Azure endpoint, including the resource.. Example: `https://example-resource.azure.openai.com/`\",\"title_case\":false,\"input_types\":[\"Text\"]},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langchain.embeddings.base import Embeddings\\nfrom langchain_community.embeddings import AzureOpenAIEmbeddings\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass AzureOpenAIEmbeddingsComponent(CustomComponent):\\n display_name: str = \\\"AzureOpenAIEmbeddings\\\"\\n description: str = \\\"Embeddings model from Azure OpenAI.\\\"\\n documentation: str = \\\"https://python.langchain.com/docs/integrations/text_embedding/azureopenai\\\"\\n beta = False\\n\\n API_VERSION_OPTIONS = [\\n \\\"2022-12-01\\\",\\n \\\"2023-03-15-preview\\\",\\n \\\"2023-05-15\\\",\\n \\\"2023-06-01-preview\\\",\\n \\\"2023-07-01-preview\\\",\\n \\\"2023-08-01-preview\\\",\\n ]\\n\\n def build_config(self):\\n return {\\n \\\"azure_endpoint\\\": {\\n \\\"display_name\\\": \\\"Azure Endpoint\\\",\\n \\\"required\\\": True,\\n \\\"info\\\": \\\"Your Azure endpoint, including the resource.. Example: `https://example-resource.azure.openai.com/`\\\",\\n },\\n \\\"azure_deployment\\\": {\\n \\\"display_name\\\": \\\"Deployment Name\\\",\\n \\\"required\\\": True,\\n },\\n \\\"api_version\\\": {\\n \\\"display_name\\\": \\\"API Version\\\",\\n \\\"options\\\": self.API_VERSION_OPTIONS,\\n \\\"value\\\": self.API_VERSION_OPTIONS[-1],\\n \\\"advanced\\\": True,\\n },\\n \\\"api_key\\\": {\\n \\\"display_name\\\": \\\"API Key\\\",\\n \\\"required\\\": True,\\n \\\"password\\\": True,\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n azure_endpoint: str,\\n azure_deployment: str,\\n api_version: str,\\n api_key: str,\\n ) -> Embeddings:\\n try:\\n embeddings = AzureOpenAIEmbeddings(\\n azure_endpoint=azure_endpoint,\\n azure_deployment=azure_deployment,\\n api_version=api_version,\\n api_key=api_key,\\n )\\n\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to AzureOpenAIEmbeddings API.\\\") from e\\n\\n return embeddings\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Embeddings model from Azure OpenAI.\",\"base_classes\":[\"Embeddings\"],\"display_name\":\"AzureOpenAIEmbeddings\",\"documentation\":\"https://python.langchain.com/docs/integrations/text_embedding/azureopenai\",\"custom_fields\":{\"azure_endpoint\":null,\"azure_deployment\":null,\"api_version\":null,\"api_key\":null},\"output_types\":[\"Embeddings\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"HuggingFaceInferenceAPIEmbeddings\":{\"template\":{\"api_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_key\",\"display_name\":\"API Key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"api_url\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"http://localhost:8080\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"api_url\",\"display_name\":\"API URL\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"cache_folder\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"cache_folder\",\"display_name\":\"Cache Folder\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Dict, Optional\\n\\nfrom langchain_community.embeddings.huggingface import HuggingFaceInferenceAPIEmbeddings\\nfrom langflow import CustomComponent\\nfrom pydantic.v1.types import SecretStr\\n\\n\\nclass HuggingFaceInferenceAPIEmbeddingsComponent(CustomComponent):\\n display_name = \\\"HuggingFaceInferenceAPIEmbeddings\\\"\\n description = \\\"HuggingFace sentence_transformers embedding models, API version.\\\"\\n documentation = \\\"https://github.com/huggingface/text-embeddings-inference\\\"\\n\\n def build_config(self):\\n return {\\n \\\"api_key\\\": {\\\"display_name\\\": \\\"API Key\\\", \\\"password\\\": True, \\\"advanced\\\": True},\\n \\\"api_url\\\": {\\\"display_name\\\": \\\"API URL\\\", \\\"advanced\\\": True},\\n \\\"model_name\\\": {\\\"display_name\\\": \\\"Model Name\\\"},\\n \\\"cache_folder\\\": {\\\"display_name\\\": \\\"Cache Folder\\\", \\\"advanced\\\": True},\\n \\\"encode_kwargs\\\": {\\\"display_name\\\": \\\"Encode Kwargs\\\", \\\"advanced\\\": True, \\\"field_type\\\": \\\"dict\\\"},\\n \\\"model_kwargs\\\": {\\\"display_name\\\": \\\"Model Kwargs\\\", \\\"field_type\\\": \\\"dict\\\", \\\"advanced\\\": True},\\n \\\"multi_process\\\": {\\\"display_name\\\": \\\"Multi Process\\\", \\\"advanced\\\": True},\\n }\\n\\n def build(\\n self,\\n api_key: Optional[str] = \\\"\\\",\\n api_url: str = \\\"http://localhost:8080\\\",\\n model_name: str = \\\"BAAI/bge-large-en-v1.5\\\",\\n cache_folder: Optional[str] = None,\\n encode_kwargs: Optional[Dict] = {},\\n model_kwargs: Optional[Dict] = {},\\n multi_process: bool = False,\\n ) -> HuggingFaceInferenceAPIEmbeddings:\\n if api_key:\\n secret_api_key = SecretStr(api_key)\\n else:\\n raise ValueError(\\\"API Key is required\\\")\\n return HuggingFaceInferenceAPIEmbeddings(\\n api_key=secret_api_key,\\n api_url=api_url,\\n model_name=model_name,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"encode_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"encode_kwargs\",\"display_name\":\"Encode Kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_kwargs\",\"display_name\":\"Model Kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"BAAI/bge-large-en-v1.5\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_name\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"multi_process\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"multi_process\",\"display_name\":\"Multi Process\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"HuggingFace sentence_transformers embedding models, API version.\",\"base_classes\":[\"Embeddings\",\"HuggingFaceInferenceAPIEmbeddings\"],\"display_name\":\"HuggingFaceInferenceAPIEmbeddings\",\"documentation\":\"https://github.com/huggingface/text-embeddings-inference\",\"custom_fields\":{\"api_key\":null,\"api_url\":null,\"model_name\":null,\"cache_folder\":null,\"encode_kwargs\":null,\"model_kwargs\":null,\"multi_process\":null},\"output_types\":[\"HuggingFaceInferenceAPIEmbeddings\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true}},\"documentloaders\":{\"AZLyricsLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"web_path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"AZLyricsLoader\"},\"description\":\"Load `AZLyrics` webpages.\",\"base_classes\":[\"Document\"],\"display_name\":\"AZLyricsLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/azlyrics\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"AirbyteJSONLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".json\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"AirbyteJSONLoader\"},\"description\":\"Load local `Airbyte` json files.\",\"base_classes\":[\"Document\"],\"display_name\":\"AirbyteJSONLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"BSHTMLLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".html\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"BSHTMLLoader\"},\"description\":\"Load `HTML` files and parse them with `beautiful soup`.\",\"base_classes\":[\"Document\"],\"display_name\":\"BSHTMLLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/html\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"CSVLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".csv\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CSVLoader\"},\"description\":\"Load a `CSV` file into a list of Documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"CSVLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"CoNLLULoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".csv\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CoNLLULoader\"},\"description\":\"Load `CoNLL-U` files.\",\"base_classes\":[\"Document\"],\"display_name\":\"CoNLLULoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/conll-u\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"CollegeConfidentialLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"web_path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CollegeConfidentialLoader\"},\"description\":\"Load `College Confidential` webpages.\",\"base_classes\":[\"Document\"],\"display_name\":\"CollegeConfidentialLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/college_confidential\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"DirectoryLoader\":{\"template\":{\"glob\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"**/*.txt\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"glob\",\"display_name\":\"glob\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"load_hidden\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"False\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"load_hidden\",\"display_name\":\"Load hidden files\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"max_concurrency\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":10,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_concurrency\",\"display_name\":\"Max concurrency\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"path\",\"display_name\":\"Local directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"recursive\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"True\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"recursive\",\"display_name\":\"Recursive\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"silent_errors\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"False\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"silent_errors\",\"display_name\":\"Silent errors\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"use_multithreading\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"True\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"use_multithreading\",\"display_name\":\"Use multithreading\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"DirectoryLoader\"},\"description\":\"Load from a directory.\",\"base_classes\":[\"Document\"],\"display_name\":\"DirectoryLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/file_directory\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"EverNoteLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".xml\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"EverNoteLoader\"},\"description\":\"Load from `EverNote`.\",\"base_classes\":[\"Document\"],\"display_name\":\"EverNoteLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/evernote\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"FacebookChatLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".json\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"FacebookChatLoader\"},\"description\":\"Load `Facebook Chat` messages directory dump.\",\"base_classes\":[\"Document\"],\"display_name\":\"FacebookChatLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/facebook_chat\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"GitLoader\":{\"template\":{\"branch\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"branch\",\"display_name\":\"Branch\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"clone_url\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"clone_url\",\"display_name\":\"Clone URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"file_filter\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"file_filter\",\"display_name\":\"File extensions (comma-separated)\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"repo_path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"repo_path\",\"display_name\":\"Path to repository\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"GitLoader\"},\"description\":\"Load `Git` repository files.\",\"base_classes\":[\"Document\"],\"display_name\":\"GitLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/git\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"GitbookLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"web_page\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"web_page\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"GitbookLoader\"},\"description\":\"Load `GitBook` data.\",\"base_classes\":[\"Document\"],\"display_name\":\"GitbookLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/gitbook\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"GutenbergLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"web_path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"GutenbergLoader\"},\"description\":\"Load from `Gutenberg.org`.\",\"base_classes\":[\"Document\"],\"display_name\":\"GutenbergLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/gutenberg\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"HNLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"web_path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"HNLoader\"},\"description\":\"Load `Hacker News` data.\",\"base_classes\":[\"Document\"],\"display_name\":\"HNLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/hacker_news\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"IFixitLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"web_path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"IFixitLoader\"},\"description\":\"Load `iFixit` repair guides, device wikis and answers.\",\"base_classes\":[\"Document\"],\"display_name\":\"IFixitLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/ifixit\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"IMSDbLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"web_path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"IMSDbLoader\"},\"description\":\"Load `IMSDb` webpages.\",\"base_classes\":[\"Document\"],\"display_name\":\"IMSDbLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/imsdb\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"NotionDirectoryLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"path\",\"display_name\":\"Local directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"NotionDirectoryLoader\"},\"description\":\"Load `Notion directory` dump.\",\"base_classes\":[\"Document\"],\"display_name\":\"NotionDirectoryLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/notion\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"PyPDFLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".pdf\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"PyPDFLoader\"},\"description\":\"Load PDF using pypdf into list of documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"PyPDFLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/pdf\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"PyPDFDirectoryLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"path\",\"display_name\":\"Local directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"PyPDFDirectoryLoader\"},\"description\":\"Load a directory with `PDF` files using `pypdf` and chunks at character level.\",\"base_classes\":[\"Document\"],\"display_name\":\"PyPDFDirectoryLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/pdf\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"ReadTheDocsLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"path\",\"display_name\":\"Local directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"ReadTheDocsLoader\"},\"description\":\"Load `ReadTheDocs` documentation directory.\",\"base_classes\":[\"Document\"],\"display_name\":\"ReadTheDocsLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/readthedocs_documentation\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"SRTLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".srt\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"SRTLoader\"},\"description\":\"Load `.srt` (subtitle) files.\",\"base_classes\":[\"Document\"],\"display_name\":\"SRTLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/subtitle\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"SlackDirectoryLoader\":{\"template\":{\"zip_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".zip\"],\"file_path\":\"\",\"password\":false,\"name\":\"zip_path\",\"display_name\":\"Path to zip file\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"workspace_url\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"workspace_url\",\"display_name\":\"Workspace URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"SlackDirectoryLoader\"},\"description\":\"Load from a `Slack` directory dump.\",\"base_classes\":[\"Document\"],\"display_name\":\"SlackDirectoryLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/slack\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"TextLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".txt\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"TextLoader\"},\"description\":\"Load text file.\",\"base_classes\":[\"Document\"],\"display_name\":\"TextLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"UnstructuredEmailLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".eml\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"UnstructuredEmailLoader\"},\"description\":\"Load email files using `Unstructured`.\",\"base_classes\":[\"Document\"],\"display_name\":\"UnstructuredEmailLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/email\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"UnstructuredHTMLLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".html\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"UnstructuredHTMLLoader\"},\"description\":\"Load `HTML` files using `Unstructured`.\",\"base_classes\":[\"Document\"],\"display_name\":\"UnstructuredHTMLLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/html\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"UnstructuredMarkdownLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".md\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"UnstructuredMarkdownLoader\"},\"description\":\"Load `Markdown` files using `Unstructured`.\",\"base_classes\":[\"Document\"],\"display_name\":\"UnstructuredMarkdownLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/markdown\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"UnstructuredPowerPointLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".pptx\",\".ppt\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"UnstructuredPowerPointLoader\"},\"description\":\"Load `Microsoft PowerPoint` files using `Unstructured`.\",\"base_classes\":[\"Document\"],\"display_name\":\"UnstructuredPowerPointLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/microsoft_powerpoint\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"UnstructuredWordDocumentLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[\".docx\",\".doc\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"UnstructuredWordDocumentLoader\"},\"description\":\"Load `Microsoft Word` file using `Unstructured`.\",\"base_classes\":[\"Document\"],\"display_name\":\"UnstructuredWordDocumentLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/microsoft_word\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"WebBaseLoader\":{\"template\":{\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"web_path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"WebBaseLoader\"},\"description\":\"Load HTML pages using `urllib` and parse them with `BeautifulSoup'.\",\"base_classes\":[\"Document\"],\"display_name\":\"WebBaseLoader\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/web_base\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"FileLoader\":{\"template\":{\"file_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[\".json\",\".txt\",\".csv\",\".jsonl\",\".html\",\".htm\",\".conllu\",\".enex\",\".msg\",\".pdf\",\".srt\",\".eml\",\".md\",\".mdx\",\".pptx\",\".docx\"],\"file_path\":\"\",\"password\":false,\"name\":\"file_path\",\"display_name\":\"File Path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langchain_core.documents import Document\\n\\nfrom langflow import CustomComponent\\nfrom langflow.utils.constants import LOADERS_INFO\\n\\n\\nclass FileLoaderComponent(CustomComponent):\\n display_name: str = \\\"File Loader\\\"\\n description: str = \\\"Generic File Loader\\\"\\n beta = True\\n\\n def build_config(self):\\n loader_options = [\\\"Automatic\\\"] + [loader_info[\\\"name\\\"] for loader_info in LOADERS_INFO]\\n\\n file_types = []\\n suffixes = []\\n\\n for loader_info in LOADERS_INFO:\\n if \\\"allowedTypes\\\" in loader_info:\\n file_types.extend(loader_info[\\\"allowedTypes\\\"])\\n suffixes.extend([f\\\".{ext}\\\" for ext in loader_info[\\\"allowedTypes\\\"]])\\n\\n return {\\n \\\"file_path\\\": {\\n \\\"display_name\\\": \\\"File Path\\\",\\n \\\"required\\\": True,\\n \\\"field_type\\\": \\\"file\\\",\\n \\\"file_types\\\": [\\n \\\"json\\\",\\n \\\"txt\\\",\\n \\\"csv\\\",\\n \\\"jsonl\\\",\\n \\\"html\\\",\\n \\\"htm\\\",\\n \\\"conllu\\\",\\n \\\"enex\\\",\\n \\\"msg\\\",\\n \\\"pdf\\\",\\n \\\"srt\\\",\\n \\\"eml\\\",\\n \\\"md\\\",\\n \\\"mdx\\\",\\n \\\"pptx\\\",\\n \\\"docx\\\",\\n ],\\n \\\"suffixes\\\": [\\n \\\".json\\\",\\n \\\".txt\\\",\\n \\\".csv\\\",\\n \\\".jsonl\\\",\\n \\\".html\\\",\\n \\\".htm\\\",\\n \\\".conllu\\\",\\n \\\".enex\\\",\\n \\\".msg\\\",\\n \\\".pdf\\\",\\n \\\".srt\\\",\\n \\\".eml\\\",\\n \\\".md\\\",\\n \\\".mdx\\\",\\n \\\".pptx\\\",\\n \\\".docx\\\",\\n ],\\n # \\\"file_types\\\" : file_types,\\n # \\\"suffixes\\\": suffixes,\\n },\\n \\\"loader\\\": {\\n \\\"display_name\\\": \\\"Loader\\\",\\n \\\"is_list\\\": True,\\n \\\"required\\\": True,\\n \\\"options\\\": loader_options,\\n \\\"value\\\": \\\"Automatic\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(self, file_path: str, loader: str) -> Document:\\n file_type = file_path.split(\\\".\\\")[-1]\\n\\n # Map the loader to the correct loader class\\n selected_loader_info = None\\n for loader_info in LOADERS_INFO:\\n if loader_info[\\\"name\\\"] == loader:\\n selected_loader_info = loader_info\\n break\\n\\n if selected_loader_info is None and loader != \\\"Automatic\\\":\\n raise ValueError(f\\\"Loader {loader} not found in the loader info list\\\")\\n\\n if loader == \\\"Automatic\\\":\\n # Determine the loader based on the file type\\n default_loader_info = None\\n for info in LOADERS_INFO:\\n if \\\"defaultFor\\\" in info and file_type in info[\\\"defaultFor\\\"]:\\n default_loader_info = info\\n break\\n\\n if default_loader_info is None:\\n raise ValueError(f\\\"No default loader found for file type: {file_type}\\\")\\n\\n selected_loader_info = default_loader_info\\n if isinstance(selected_loader_info, dict):\\n loader_import: str = selected_loader_info[\\\"import\\\"]\\n else:\\n raise ValueError(f\\\"Loader info for {loader} is not a dict\\\\nLoader info:\\\\n{selected_loader_info}\\\")\\n module_name, class_name = loader_import.rsplit(\\\".\\\", 1)\\n\\n try:\\n # Import the loader class\\n loader_module = __import__(module_name, fromlist=[class_name])\\n loader_instance = getattr(loader_module, class_name)\\n except ImportError as e:\\n raise ValueError(f\\\"Loader {loader} could not be imported\\\\nLoader info:\\\\n{selected_loader_info}\\\") from e\\n\\n result = loader_instance(file_path=file_path)\\n return result.load()\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"loader\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"Automatic\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"Automatic\",\"Airbyte JSON (.jsonl)\",\"JSON (.json)\",\"BeautifulSoup4 HTML (.html, .htm)\",\"CSV (.csv)\",\"CoNLL-U (.conllu)\",\"EverNote (.enex)\",\"Facebook Chat (.json)\",\"Outlook Message (.msg)\",\"PyPDF (.pdf)\",\"Subtitle (.str)\",\"Text (.txt)\",\"Unstructured Email (.eml)\",\"Unstructured HTML (.html, .htm)\",\"Unstructured Markdown (.md)\",\"Unstructured PowerPoint (.pptx)\",\"Unstructured Word (.docx)\"],\"name\":\"loader\",\"display_name\":\"Loader\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Generic File Loader\",\"base_classes\":[\"Serializable\",\"Document\"],\"display_name\":\"File Loader\",\"documentation\":\"\",\"custom_fields\":{\"file_path\":null,\"loader\":null},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"UrlLoader\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import List\\n\\nfrom langchain import document_loaders\\nfrom langchain_core.documents import Document\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass UrlLoaderComponent(CustomComponent):\\n display_name: str = \\\"Url Loader\\\"\\n description: str = \\\"Generic Url Loader Component\\\"\\n\\n def build_config(self):\\n return {\\n \\\"web_path\\\": {\\n \\\"display_name\\\": \\\"Url\\\",\\n \\\"required\\\": True,\\n },\\n \\\"loader\\\": {\\n \\\"display_name\\\": \\\"Loader\\\",\\n \\\"is_list\\\": True,\\n \\\"required\\\": True,\\n \\\"options\\\": [\\n \\\"AZLyricsLoader\\\",\\n \\\"CollegeConfidentialLoader\\\",\\n \\\"GitbookLoader\\\",\\n \\\"HNLoader\\\",\\n \\\"IFixitLoader\\\",\\n \\\"IMSDbLoader\\\",\\n \\\"WebBaseLoader\\\",\\n ],\\n \\\"value\\\": \\\"WebBaseLoader\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(self, web_path: str, loader: str) -> List[Document]:\\n try:\\n loader_instance = getattr(document_loaders, loader)(web_path=web_path)\\n except Exception as e:\\n raise ValueError(f\\\"No loader found for: {web_path}\\\") from e\\n docs = loader_instance.load()\\n avg_length = sum(len(doc.page_content) for doc in docs if hasattr(doc, \\\"page_content\\\")) / len(docs)\\n self.status = f\\\"\\\"\\\"{len(docs)} documents)\\n \\\\nAvg. Document Length (characters): {int(avg_length)}\\n Documents: {docs[:3]}...\\\"\\\"\\\"\\n return docs\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"loader\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"WebBaseLoader\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"AZLyricsLoader\",\"CollegeConfidentialLoader\",\"GitbookLoader\",\"HNLoader\",\"IFixitLoader\",\"IMSDbLoader\",\"WebBaseLoader\"],\"name\":\"loader\",\"display_name\":\"Loader\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"web_path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Generic Url Loader Component\",\"base_classes\":[\"Serializable\",\"Document\"],\"display_name\":\"Url Loader\",\"documentation\":\"\",\"custom_fields\":{\"web_path\":null,\"loader\":null},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"GatherRecords\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from concurrent import futures\\nfrom pathlib import Path\\nfrom typing import Any, Dict, List\\n\\nfrom langflow import CustomComponent\\nfrom langflow.schema import Record\\n\\n\\nclass GatherRecordsComponent(CustomComponent):\\n display_name = \\\"Gather Records\\\"\\n description = \\\"Gather records from a directory.\\\"\\n\\n def build_config(self) -> Dict[str, Any]:\\n return {\\n \\\"load_hidden\\\": {\\n \\\"display_name\\\": \\\"Load Hidden Files\\\",\\n \\\"value\\\": False,\\n \\\"advanced\\\": True,\\n },\\n \\\"max_concurrency\\\": {\\n \\\"display_name\\\": \\\"Max Concurrency\\\",\\n \\\"value\\\": 10,\\n \\\"advanced\\\": True,\\n },\\n \\\"path\\\": {\\\"display_name\\\": \\\"Local Directory\\\"},\\n \\\"recursive\\\": {\\\"display_name\\\": \\\"Recursive\\\", \\\"value\\\": True, \\\"advanced\\\": True},\\n \\\"use_multithreading\\\": {\\n \\\"display_name\\\": \\\"Use Multithreading\\\",\\n \\\"value\\\": True,\\n \\\"advanced\\\": True,\\n },\\n }\\n\\n def is_hidden(self, path: Path) -> bool:\\n return path.name.startswith(\\\".\\\")\\n\\n def retrieve_file_paths(\\n self,\\n path: str,\\n types: List[str],\\n load_hidden: bool,\\n recursive: bool,\\n depth: int,\\n ) -> List[str]:\\n path_obj = Path(path)\\n if not path_obj.exists() or not path_obj.is_dir():\\n raise ValueError(f\\\"Path {path} must exist and be a directory.\\\")\\n\\n def match_types(p: Path) -> bool:\\n return any(p.suffix == f\\\".{t}\\\" for t in types) if types else True\\n\\n def is_not_hidden(p: Path) -> bool:\\n return not self.is_hidden(p) or load_hidden\\n\\n def walk_level(directory: Path, max_depth: int):\\n directory = directory.resolve()\\n prefix_length = len(directory.parts)\\n for p in directory.rglob(\\\"*\\\" if recursive else \\\"[!.]*\\\"):\\n if len(p.parts) - prefix_length <= max_depth:\\n yield p\\n\\n glob = \\\"**/*\\\" if recursive else \\\"*\\\"\\n paths = walk_level(path_obj, depth) if depth else path_obj.glob(glob)\\n file_paths = [str(p) for p in paths if p.is_file() and match_types(p) and is_not_hidden(p)]\\n\\n return file_paths\\n\\n def parse_file_to_record(self, file_path: str, silent_errors: bool) -> Record:\\n # Use the partition function to load the file\\n from unstructured.partition.auto import partition\\n\\n try:\\n elements = partition(file_path)\\n except Exception as e:\\n if not silent_errors:\\n raise ValueError(f\\\"Error loading file {file_path}: {e}\\\") from e\\n return None\\n\\n # Create a Record\\n text = \\\"\\\\n\\\\n\\\".join([str(el) for el in elements])\\n metadata = elements.metadata if hasattr(elements, \\\"metadata\\\") else {}\\n metadata[\\\"file_path\\\"] = file_path\\n record = Record(text=text, data=metadata)\\n return record\\n\\n def get_elements(\\n self,\\n file_paths: List[str],\\n silent_errors: bool,\\n max_concurrency: int,\\n use_multithreading: bool,\\n ) -> List[Record]:\\n if use_multithreading:\\n records = self.parallel_load_records(file_paths, silent_errors, max_concurrency)\\n else:\\n records = [self.parse_file_to_record(file_path, silent_errors) for file_path in file_paths]\\n records = list(filter(None, records))\\n return records\\n\\n def parallel_load_records(self, file_paths: List[str], silent_errors: bool, max_concurrency: int) -> List[Record]:\\n with futures.ThreadPoolExecutor(max_workers=max_concurrency) as executor:\\n loaded_files = executor.map(\\n lambda file_path: self.parse_file_to_record(file_path, silent_errors),\\n file_paths,\\n )\\n return loaded_files\\n\\n def build(\\n self,\\n path: str,\\n types: List[str] = None,\\n depth: int = 0,\\n max_concurrency: int = 2,\\n load_hidden: bool = False,\\n recursive: bool = True,\\n silent_errors: bool = False,\\n use_multithreading: bool = True,\\n ) -> List[Record]:\\n resolved_path = self.resolve_path(path)\\n file_paths = self.retrieve_file_paths(resolved_path, types, load_hidden, recursive, depth)\\n loaded_records = []\\n\\n if use_multithreading:\\n loaded_records = self.parallel_load_records(file_paths, silent_errors, max_concurrency)\\n else:\\n loaded_records = [self.parse_file_to_record(file_path, silent_errors) for file_path in file_paths]\\n loaded_records = list(filter(None, loaded_records))\\n self.status = loaded_records\\n return loaded_records\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"depth\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"depth\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"load_hidden\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"load_hidden\",\"display_name\":\"Load Hidden Files\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"max_concurrency\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":10,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_concurrency\",\"display_name\":\"Max Concurrency\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"path\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"path\",\"display_name\":\"Local Directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"recursive\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"recursive\",\"display_name\":\"Recursive\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"silent_errors\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"silent_errors\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"types\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"types\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"use_multithreading\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"use_multithreading\",\"display_name\":\"Use Multithreading\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Gather records from a directory.\",\"base_classes\":[\"Record\"],\"display_name\":\"Gather Records\",\"documentation\":\"\",\"custom_fields\":{\"path\":null,\"types\":null,\"depth\":null,\"max_concurrency\":null,\"load_hidden\":null,\"recursive\":null,\"silent_errors\":null,\"use_multithreading\":null},\"output_types\":[\"Record\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true}},\"textsplitters\":{\"CharacterTextSplitter\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"chunk_overlap\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":200,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chunk_overlap\",\"display_name\":\"Chunk Overlap\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"chunk_size\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1000,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chunk_size\",\"display_name\":\"Chunk Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import List\\n\\nfrom langchain.text_splitter import CharacterTextSplitter\\nfrom langchain_core.documents.base import Document\\nfrom langflow import CustomComponent\\n\\n\\nclass CharacterTextSplitterComponent(CustomComponent):\\n display_name = \\\"CharacterTextSplitter\\\"\\n description = \\\"Splitting text that looks at characters.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"documents\\\": {\\\"display_name\\\": \\\"Documents\\\"},\\n \\\"chunk_overlap\\\": {\\\"display_name\\\": \\\"Chunk Overlap\\\", \\\"default\\\": 200},\\n \\\"chunk_size\\\": {\\\"display_name\\\": \\\"Chunk Size\\\", \\\"default\\\": 1000},\\n \\\"separator\\\": {\\\"display_name\\\": \\\"Separator\\\", \\\"default\\\": \\\"\\\\n\\\"},\\n }\\n\\n def build(\\n self,\\n documents: List[Document],\\n chunk_overlap: int = 200,\\n chunk_size: int = 1000,\\n separator: str = \\\"\\\\n\\\",\\n ) -> List[Document]:\\n # separator may come escaped from the frontend\\n separator = separator.encode().decode(\\\"unicode_escape\\\")\\n docs = CharacterTextSplitter(\\n chunk_overlap=chunk_overlap,\\n chunk_size=chunk_size,\\n separator=separator,\\n ).split_documents(documents)\\n self.status = docs\\n return docs\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"separator\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\\\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"separator\",\"display_name\":\"Separator\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Splitting text that looks at characters.\",\"base_classes\":[\"Serializable\",\"Document\"],\"display_name\":\"CharacterTextSplitter\",\"documentation\":\"\",\"custom_fields\":{\"documents\":null,\"chunk_overlap\":null,\"chunk_size\":null,\"separator\":null},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"RecursiveCharacterTextSplitter\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"The documents to split.\",\"title_case\":false},\"chunk_overlap\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":200,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chunk_overlap\",\"display_name\":\"Chunk Overlap\",\"advanced\":false,\"dynamic\":false,\"info\":\"The amount of overlap between chunks.\",\"title_case\":false},\"chunk_size\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1000,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chunk_size\",\"display_name\":\"Chunk Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"The maximum length of each chunk.\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain_core.documents import Document\\n\\nfrom langflow import CustomComponent\\nfrom langflow.utils.util import build_loader_repr_from_documents\\nfrom langchain.text_splitter import RecursiveCharacterTextSplitter\\n\\n\\nclass RecursiveCharacterTextSplitterComponent(CustomComponent):\\n display_name: str = \\\"Recursive Character Text Splitter\\\"\\n description: str = \\\"Split text into chunks of a specified length.\\\"\\n documentation: str = \\\"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\\\"\\n\\n def build_config(self):\\n return {\\n \\\"documents\\\": {\\n \\\"display_name\\\": \\\"Documents\\\",\\n \\\"info\\\": \\\"The documents to split.\\\",\\n },\\n \\\"separators\\\": {\\n \\\"display_name\\\": \\\"Separators\\\",\\n \\\"info\\\": 'The characters to split on.\\\\nIf left empty defaults to [\\\"\\\\\\\\n\\\\\\\\n\\\", \\\"\\\\\\\\n\\\", \\\" \\\", \\\"\\\"].',\\n \\\"is_list\\\": True,\\n },\\n \\\"chunk_size\\\": {\\n \\\"display_name\\\": \\\"Chunk Size\\\",\\n \\\"info\\\": \\\"The maximum length of each chunk.\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 1000,\\n },\\n \\\"chunk_overlap\\\": {\\n \\\"display_name\\\": \\\"Chunk Overlap\\\",\\n \\\"info\\\": \\\"The amount of overlap between chunks.\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 200,\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n documents: list[Document],\\n separators: Optional[list[str]] = None,\\n chunk_size: Optional[int] = 1000,\\n chunk_overlap: Optional[int] = 200,\\n ) -> list[Document]:\\n \\\"\\\"\\\"\\n Split text into chunks of a specified length.\\n\\n Args:\\n separators (list[str]): The characters to split on.\\n chunk_size (int): The maximum length of each chunk.\\n chunk_overlap (int): The amount of overlap between chunks.\\n length_function (function): The function to use to calculate the length of the text.\\n\\n Returns:\\n list[str]: The chunks of text.\\n \\\"\\\"\\\"\\n\\n if separators == \\\"\\\":\\n separators = None\\n elif separators:\\n # check if the separators list has escaped characters\\n # if there are escaped characters, unescape them\\n separators = [x.encode().decode(\\\"unicode-escape\\\") for x in separators]\\n\\n # Make sure chunk_size and chunk_overlap are ints\\n if isinstance(chunk_size, str):\\n chunk_size = int(chunk_size)\\n if isinstance(chunk_overlap, str):\\n chunk_overlap = int(chunk_overlap)\\n splitter = RecursiveCharacterTextSplitter(\\n separators=separators,\\n chunk_size=chunk_size,\\n chunk_overlap=chunk_overlap,\\n )\\n\\n docs = splitter.split_documents(documents)\\n self.repr_value = build_loader_repr_from_documents(docs)\\n return docs\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"separators\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"separators\",\"display_name\":\"Separators\",\"advanced\":false,\"dynamic\":false,\"info\":\"The characters to split on.\\nIf left empty defaults to [\\\"\\\\n\\\\n\\\", \\\"\\\\n\\\", \\\" \\\", \\\"\\\"].\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Split text into chunks of a specified length.\",\"base_classes\":[\"Serializable\",\"Document\"],\"display_name\":\"Recursive Character Text Splitter\",\"documentation\":\"https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter\",\"custom_fields\":{\"documents\":null,\"separators\":null,\"chunk_size\":null,\"chunk_overlap\":null},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"LanguageRecursiveTextSplitter\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"The documents to split.\",\"title_case\":false},\"chunk_overlap\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":200,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chunk_overlap\",\"display_name\":\"Chunk Overlap\",\"advanced\":false,\"dynamic\":false,\"info\":\"The amount of overlap between chunks.\",\"title_case\":false},\"chunk_size\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1000,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chunk_size\",\"display_name\":\"Chunk Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"The maximum length of each chunk.\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.text_splitter import Language\\nfrom langchain_core.documents import Document\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass LanguageRecursiveTextSplitterComponent(CustomComponent):\\n display_name: str = \\\"Language Recursive Text Splitter\\\"\\n description: str = \\\"Split text into chunks of a specified length based on language.\\\"\\n documentation: str = \\\"https://docs.langflow.org/components/text-splitters#languagerecursivetextsplitter\\\"\\n\\n def build_config(self):\\n options = [x.value for x in Language]\\n return {\\n \\\"documents\\\": {\\n \\\"display_name\\\": \\\"Documents\\\",\\n \\\"info\\\": \\\"The documents to split.\\\",\\n },\\n \\\"separator_type\\\": {\\n \\\"display_name\\\": \\\"Separator Type\\\",\\n \\\"info\\\": \\\"The type of separator to use.\\\",\\n \\\"field_type\\\": \\\"str\\\",\\n \\\"options\\\": options,\\n \\\"value\\\": \\\"Python\\\",\\n },\\n \\\"separators\\\": {\\n \\\"display_name\\\": \\\"Separators\\\",\\n \\\"info\\\": \\\"The characters to split on.\\\",\\n \\\"is_list\\\": True,\\n },\\n \\\"chunk_size\\\": {\\n \\\"display_name\\\": \\\"Chunk Size\\\",\\n \\\"info\\\": \\\"The maximum length of each chunk.\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 1000,\\n },\\n \\\"chunk_overlap\\\": {\\n \\\"display_name\\\": \\\"Chunk Overlap\\\",\\n \\\"info\\\": \\\"The amount of overlap between chunks.\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 200,\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n documents: list[Document],\\n chunk_size: Optional[int] = 1000,\\n chunk_overlap: Optional[int] = 200,\\n separator_type: str = \\\"Python\\\",\\n ) -> list[Document]:\\n \\\"\\\"\\\"\\n Split text into chunks of a specified length.\\n\\n Args:\\n separators (list[str]): The characters to split on.\\n chunk_size (int): The maximum length of each chunk.\\n chunk_overlap (int): The amount of overlap between chunks.\\n length_function (function): The function to use to calculate the length of the text.\\n\\n Returns:\\n list[str]: The chunks of text.\\n \\\"\\\"\\\"\\n from langchain.text_splitter import RecursiveCharacterTextSplitter\\n\\n # Make sure chunk_size and chunk_overlap are ints\\n if isinstance(chunk_size, str):\\n chunk_size = int(chunk_size)\\n if isinstance(chunk_overlap, str):\\n chunk_overlap = int(chunk_overlap)\\n\\n splitter = RecursiveCharacterTextSplitter.from_language(\\n language=Language(separator_type),\\n chunk_size=chunk_size,\\n chunk_overlap=chunk_overlap,\\n )\\n\\n docs = splitter.split_documents(documents)\\n return docs\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"separator_type\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"Python\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"cpp\",\"go\",\"java\",\"kotlin\",\"js\",\"ts\",\"php\",\"proto\",\"python\",\"rst\",\"ruby\",\"rust\",\"scala\",\"swift\",\"markdown\",\"latex\",\"html\",\"sol\",\"csharp\",\"cobol\",\"c\",\"lua\",\"perl\"],\"name\":\"separator_type\",\"display_name\":\"Separator Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"The type of separator to use.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Split text into chunks of a specified length based on language.\",\"base_classes\":[\"Serializable\",\"Document\"],\"display_name\":\"Language Recursive Text Splitter\",\"documentation\":\"https://docs.langflow.org/components/text-splitters#languagerecursivetextsplitter\",\"custom_fields\":{\"documents\":null,\"chunk_size\":null,\"chunk_overlap\":null,\"separator_type\":null},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true}},\"utilities\":{\"BingSearchAPIWrapper\":{\"template\":{\"bing_search_url\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"bing_search_url\",\"display_name\":\"Bing Search URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"bing_subscription_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"bing_subscription_key\",\"display_name\":\"Bing Subscription Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\n\\n# Assuming `BingSearchAPIWrapper` is a class that exists in the context\\n# and has the appropriate methods and attributes.\\n# We need to make sure this class is importable from the context where this code will be running.\\nfrom langchain_community.utilities.bing_search import BingSearchAPIWrapper\\n\\n\\nclass BingSearchAPIWrapperComponent(CustomComponent):\\n display_name = \\\"BingSearchAPIWrapper\\\"\\n description = \\\"Wrapper for Bing Search API.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"bing_search_url\\\": {\\\"display_name\\\": \\\"Bing Search URL\\\"},\\n \\\"bing_subscription_key\\\": {\\n \\\"display_name\\\": \\\"Bing Subscription Key\\\",\\n \\\"password\\\": True,\\n },\\n \\\"k\\\": {\\\"display_name\\\": \\\"Number of results\\\", \\\"advanced\\\": True},\\n # 'k' is not included as it is not shown (show=False)\\n }\\n\\n def build(\\n self,\\n bing_search_url: str,\\n bing_subscription_key: str,\\n k: int = 10,\\n ) -> BingSearchAPIWrapper:\\n # 'k' has a default value and is not shown (show=False), so it is hardcoded here\\n return BingSearchAPIWrapper(bing_search_url=bing_search_url, bing_subscription_key=bing_subscription_key, k=k)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"k\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":10,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"k\",\"display_name\":\"Number of results\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Wrapper for Bing Search API.\",\"base_classes\":[\"BingSearchAPIWrapper\"],\"display_name\":\"BingSearchAPIWrapper\",\"documentation\":\"\",\"custom_fields\":{\"bing_search_url\":null,\"bing_subscription_key\":null,\"k\":null},\"output_types\":[\"BingSearchAPIWrapper\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"GoogleSearchAPIWrapper\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Callable, Union\\n\\nfrom langchain_community.utilities.google_search import GoogleSearchAPIWrapper\\nfrom langflow import CustomComponent\\n\\n\\nclass GoogleSearchAPIWrapperComponent(CustomComponent):\\n display_name = \\\"GoogleSearchAPIWrapper\\\"\\n description = \\\"Wrapper for Google Search API.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"google_api_key\\\": {\\\"display_name\\\": \\\"Google API Key\\\", \\\"password\\\": True},\\n \\\"google_cse_id\\\": {\\\"display_name\\\": \\\"Google CSE ID\\\", \\\"password\\\": True},\\n }\\n\\n def build(\\n self,\\n google_api_key: str,\\n google_cse_id: str,\\n ) -> Union[GoogleSearchAPIWrapper, Callable]:\\n return GoogleSearchAPIWrapper(google_api_key=google_api_key, google_cse_id=google_cse_id) # type: ignore\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"google_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"google_api_key\",\"display_name\":\"Google API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"google_cse_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"google_cse_id\",\"display_name\":\"Google CSE ID\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Wrapper for Google Search API.\",\"base_classes\":[\"GoogleSearchAPIWrapper\",\"Callable\"],\"display_name\":\"GoogleSearchAPIWrapper\",\"documentation\":\"\",\"custom_fields\":{\"google_api_key\":null,\"google_cse_id\":null},\"output_types\":[\"GoogleSearchAPIWrapper\",\"Callable\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"GoogleSerperAPIWrapper\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Dict\\n\\n# Assuming the existence of GoogleSerperAPIWrapper class in the serper module\\n# If this class does not exist, you would need to create it or import the appropriate class from another module\\nfrom langchain_community.utilities.google_serper import GoogleSerperAPIWrapper\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass GoogleSerperAPIWrapperComponent(CustomComponent):\\n display_name = \\\"GoogleSerperAPIWrapper\\\"\\n description = \\\"Wrapper around the Serper.dev Google Search API.\\\"\\n\\n def build_config(self) -> Dict[str, Dict]:\\n return {\\n \\\"result_key_for_type\\\": {\\n \\\"display_name\\\": \\\"Result Key for Type\\\",\\n \\\"show\\\": True,\\n \\\"multiline\\\": False,\\n \\\"password\\\": False,\\n \\\"advanced\\\": False,\\n \\\"dynamic\\\": False,\\n \\\"info\\\": \\\"\\\",\\n \\\"field_type\\\": \\\"dict\\\",\\n \\\"list\\\": False,\\n \\\"value\\\": {\\n \\\"news\\\": \\\"news\\\",\\n \\\"places\\\": \\\"places\\\",\\n \\\"images\\\": \\\"images\\\",\\n \\\"search\\\": \\\"organic\\\",\\n },\\n },\\n \\\"serper_api_key\\\": {\\n \\\"display_name\\\": \\\"Serper API Key\\\",\\n \\\"show\\\": True,\\n \\\"multiline\\\": False,\\n \\\"password\\\": True,\\n \\\"advanced\\\": False,\\n \\\"dynamic\\\": False,\\n \\\"info\\\": \\\"\\\",\\n \\\"type\\\": \\\"str\\\",\\n \\\"list\\\": False,\\n },\\n }\\n\\n def build(\\n self,\\n serper_api_key: str,\\n ) -> GoogleSerperAPIWrapper:\\n return GoogleSerperAPIWrapper(serper_api_key=serper_api_key)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"serper_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"serper_api_key\",\"display_name\":\"Serper API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Wrapper around the Serper.dev Google Search API.\",\"base_classes\":[\"GoogleSerperAPIWrapper\"],\"display_name\":\"GoogleSerperAPIWrapper\",\"documentation\":\"\",\"custom_fields\":{\"serper_api_key\":null},\"output_types\":[\"GoogleSerperAPIWrapper\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"SearxSearchWrapper\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom typing import Optional, Dict\\nfrom langchain_community.utilities.searx_search import SearxSearchWrapper\\n\\n\\nclass SearxSearchWrapperComponent(CustomComponent):\\n display_name = \\\"SearxSearchWrapper\\\"\\n description = \\\"Wrapper for Searx API.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"headers\\\": {\\n \\\"field_type\\\": \\\"dict\\\",\\n \\\"display_name\\\": \\\"Headers\\\",\\n \\\"multiline\\\": True,\\n \\\"value\\\": '{\\\"Authorization\\\": \\\"Bearer \\\"}',\\n },\\n \\\"k\\\": {\\\"display_name\\\": \\\"k\\\", \\\"advanced\\\": True, \\\"field_type\\\": \\\"int\\\", \\\"value\\\": 10},\\n \\\"searx_host\\\": {\\n \\\"display_name\\\": \\\"Searx Host\\\",\\n \\\"field_type\\\": \\\"str\\\",\\n \\\"value\\\": \\\"https://searx.example.com\\\",\\n \\\"advanced\\\": True,\\n },\\n }\\n\\n def build(\\n self,\\n k: int = 10,\\n headers: Optional[Dict[str, str]] = None,\\n searx_host: str = \\\"https://searx.example.com\\\",\\n ) -> SearxSearchWrapper:\\n return SearxSearchWrapper(headers=headers, k=k, searx_host=searx_host)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"headers\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"{\\\"Authorization\\\": \\\"Bearer \\\"}\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"headers\",\"display_name\":\"Headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"k\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":10,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"k\",\"display_name\":\"k\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"searx_host\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"https://searx.example.com\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"searx_host\",\"display_name\":\"Searx Host\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Wrapper for Searx API.\",\"base_classes\":[\"SearxSearchWrapper\"],\"display_name\":\"SearxSearchWrapper\",\"documentation\":\"\",\"custom_fields\":{\"k\":null,\"headers\":null,\"searx_host\":null},\"output_types\":[\"SearxSearchWrapper\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"SerpAPIWrapper\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Callable, Union\\n\\nfrom langchain_community.utilities.serpapi import SerpAPIWrapper\\nfrom langflow import CustomComponent\\n\\n\\nclass SerpAPIWrapperComponent(CustomComponent):\\n display_name = \\\"SerpAPIWrapper\\\"\\n description = \\\"Wrapper around SerpAPI\\\"\\n\\n def build_config(self):\\n return {\\n \\\"serpapi_api_key\\\": {\\\"display_name\\\": \\\"SerpAPI API Key\\\", \\\"type\\\": \\\"str\\\", \\\"password\\\": True},\\n \\\"params\\\": {\\n \\\"display_name\\\": \\\"Parameters\\\",\\n \\\"type\\\": \\\"dict\\\",\\n \\\"advanced\\\": True,\\n \\\"multiline\\\": True,\\n \\\"value\\\": '{\\\"engine\\\": \\\"google\\\",\\\"google_domain\\\": \\\"google.com\\\",\\\"gl\\\": \\\"us\\\",\\\"hl\\\": \\\"en\\\"}',\\n },\\n }\\n\\n def build(\\n self,\\n serpapi_api_key: str,\\n params: dict,\\n ) -> Union[SerpAPIWrapper, Callable]: # Removed quotes around SerpAPIWrapper\\n return SerpAPIWrapper( # type: ignore\\n serpapi_api_key=serpapi_api_key,\\n params=params,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"params\":{\"type\":\"dict\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"{\\\"engine\\\": \\\"google\\\",\\\"google_domain\\\": \\\"google.com\\\",\\\"gl\\\": \\\"us\\\",\\\"hl\\\": \\\"en\\\"}\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"params\",\"display_name\":\"Parameters\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"serpapi_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"serpapi_api_key\",\"display_name\":\"SerpAPI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Wrapper around SerpAPI\",\"base_classes\":[\"Callable\",\"SerpAPIWrapper\"],\"display_name\":\"SerpAPIWrapper\",\"documentation\":\"\",\"custom_fields\":{\"serpapi_api_key\":null,\"params\":null},\"output_types\":[\"SerpAPIWrapper\",\"Callable\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"WikipediaAPIWrapper\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Callable, Union\\n\\nfrom langchain_community.utilities.wikipedia import WikipediaAPIWrapper\\nfrom langflow import CustomComponent\\n\\n# Assuming WikipediaAPIWrapper is a class that needs to be imported.\\n# The import statement is not included as it is not provided in the JSON\\n# and the actual implementation details are unknown.\\n\\n\\nclass WikipediaAPIWrapperComponent(CustomComponent):\\n display_name = \\\"WikipediaAPIWrapper\\\"\\n description = \\\"Wrapper around WikipediaAPI.\\\"\\n\\n def build_config(self):\\n return {}\\n\\n def build(\\n self,\\n top_k_results: int = 3,\\n lang: str = \\\"en\\\",\\n load_all_available_meta: bool = False,\\n doc_content_chars_max: int = 4000,\\n ) -> Union[WikipediaAPIWrapper, Callable]:\\n return WikipediaAPIWrapper( # type: ignore\\n top_k_results=top_k_results,\\n lang=lang,\\n load_all_available_meta=load_all_available_meta,\\n doc_content_chars_max=doc_content_chars_max,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"doc_content_chars_max\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":4000,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"doc_content_chars_max\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"lang\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"en\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"lang\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"load_all_available_meta\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"load_all_available_meta\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"top_k_results\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":3,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_k_results\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Wrapper around WikipediaAPI.\",\"base_classes\":[\"WikipediaAPIWrapper\",\"Callable\"],\"display_name\":\"WikipediaAPIWrapper\",\"documentation\":\"\",\"custom_fields\":{\"top_k_results\":null,\"lang\":null,\"load_all_available_meta\":null,\"doc_content_chars_max\":null},\"output_types\":[\"WikipediaAPIWrapper\",\"Callable\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"WolframAlphaAPIWrapper\":{\"template\":{\"appid\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"appid\",\"display_name\":\"App ID\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Callable, Union\\n\\nfrom langchain_community.utilities.wolfram_alpha import WolframAlphaAPIWrapper\\nfrom langflow import CustomComponent\\n\\n# Since all the fields in the JSON have show=False, we will only create a basic component\\n# without any configurable fields.\\n\\n\\nclass WolframAlphaAPIWrapperComponent(CustomComponent):\\n display_name = \\\"WolframAlphaAPIWrapper\\\"\\n description = \\\"Wrapper for Wolfram Alpha.\\\"\\n\\n def build_config(self):\\n return {\\\"appid\\\": {\\\"display_name\\\": \\\"App ID\\\", \\\"type\\\": \\\"str\\\", \\\"password\\\": True}}\\n\\n def build(self, appid: str) -> Union[Callable, WolframAlphaAPIWrapper]:\\n return WolframAlphaAPIWrapper(wolfram_alpha_appid=appid) # type: ignore\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Wrapper for Wolfram Alpha.\",\"base_classes\":[\"WolframAlphaAPIWrapper\",\"Callable\"],\"display_name\":\"WolframAlphaAPIWrapper\",\"documentation\":\"\",\"custom_fields\":{\"appid\":null},\"output_types\":[\"Callable\",\"WolframAlphaAPIWrapper\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"RunnableExecutor\":{\"template\":{\"runnable\":{\"type\":\"Runnable\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"runnable\",\"display_name\":\"Runnable\",\"advanced\":false,\"dynamic\":false,\"info\":\"The runnable to execute.\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langchain_core.runnables import Runnable\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Text\\n\\n\\nclass RunnableExecComponent(CustomComponent):\\n documentation: str = \\\"http://docs.langflow.org/components/custom\\\"\\n display_name = \\\"Runnable Executor\\\"\\n beta = True\\n\\n def build_config(self):\\n return {\\n \\\"input_key\\\": {\\n \\\"display_name\\\": \\\"Input Key\\\",\\n \\\"info\\\": \\\"The key to use for the input.\\\",\\n },\\n \\\"inputs\\\": {\\n \\\"display_name\\\": \\\"Inputs\\\",\\n \\\"info\\\": \\\"The inputs to pass to the runnable.\\\",\\n },\\n \\\"runnable\\\": {\\n \\\"display_name\\\": \\\"Runnable\\\",\\n \\\"info\\\": \\\"The runnable to execute.\\\",\\n },\\n \\\"output_key\\\": {\\n \\\"display_name\\\": \\\"Output Key\\\",\\n \\\"info\\\": \\\"The key to use for the output.\\\",\\n },\\n }\\n\\n def build(\\n self,\\n input_key: str,\\n inputs: str,\\n runnable: Runnable,\\n output_key: str = \\\"output\\\",\\n ) -> Text:\\n result = runnable.invoke({input_key: inputs})\\n result = result.get(output_key)\\n self.status = result\\n return result\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"input_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"input_key\",\"display_name\":\"Input Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The key to use for the input.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"inputs\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"inputs\",\"display_name\":\"Inputs\",\"advanced\":false,\"dynamic\":false,\"info\":\"The inputs to pass to the runnable.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"output_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"output\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"output_key\",\"display_name\":\"Output Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The key to use for the output.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"base_classes\":[\"Text\",\"object\"],\"display_name\":\"Runnable Executor\",\"documentation\":\"http://docs.langflow.org/components/custom\",\"custom_fields\":{\"input_key\":null,\"inputs\":null,\"runnable\":null,\"output_key\":null},\"output_types\":[\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"DocumentToRecord\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import List\\n\\nfrom langchain_core.documents import Document\\n\\nfrom langflow import CustomComponent\\nfrom langflow.schema import Record\\n\\n\\nclass DocumentToRecordComponent(CustomComponent):\\n display_name = \\\"Documents to Records\\\"\\n description = \\\"Convert documents to records.\\\"\\n\\n field_config = {\\n \\\"documents\\\": {\\\"display_name\\\": \\\"Documents\\\"},\\n }\\n\\n def build(self, documents: List[Document]) -> List[Record]:\\n if isinstance(documents, Document):\\n documents = [documents]\\n records = [Record.from_document(document) for document in documents]\\n self.status = records\\n return records\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Convert documents to records.\",\"base_classes\":[\"Record\"],\"display_name\":\"Documents to Records\",\"documentation\":\"\",\"custom_fields\":{\"documents\":null},\"output_types\":[\"Record\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"GetRequest\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nimport requests\\nfrom langchain_core.documents import Document\\nfrom langflow import CustomComponent\\nfrom langflow.services.database.models.base import orjson_dumps\\n\\n\\nclass GetRequest(CustomComponent):\\n display_name: str = \\\"GET Request\\\"\\n description: str = \\\"Make a GET request to the given URL.\\\"\\n output_types: list[str] = [\\\"Document\\\"]\\n documentation: str = \\\"https://docs.langflow.org/components/utilities#get-request\\\"\\n beta: bool = True\\n field_config = {\\n \\\"url\\\": {\\n \\\"display_name\\\": \\\"URL\\\",\\n \\\"info\\\": \\\"The URL to make the request to\\\",\\n \\\"is_list\\\": True,\\n },\\n \\\"headers\\\": {\\n \\\"display_name\\\": \\\"Headers\\\",\\n \\\"info\\\": \\\"The headers to send with the request.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"timeout\\\": {\\n \\\"display_name\\\": \\\"Timeout\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"The timeout to use for the request.\\\",\\n \\\"value\\\": 5,\\n },\\n }\\n\\n def get_document(self, session: requests.Session, url: str, headers: Optional[dict], timeout: int) -> Document:\\n try:\\n response = session.get(url, headers=headers, timeout=int(timeout))\\n try:\\n response_json = response.json()\\n result = orjson_dumps(response_json, indent_2=False)\\n except Exception:\\n result = response.text\\n self.repr_value = result\\n return Document(\\n page_content=result,\\n metadata={\\n \\\"source\\\": url,\\n \\\"headers\\\": headers,\\n \\\"status_code\\\": response.status_code,\\n },\\n )\\n except requests.Timeout:\\n return Document(\\n page_content=\\\"Request Timed Out\\\",\\n metadata={\\\"source\\\": url, \\\"headers\\\": headers, \\\"status_code\\\": 408},\\n )\\n except Exception as exc:\\n return Document(\\n page_content=str(exc),\\n metadata={\\\"source\\\": url, \\\"headers\\\": headers, \\\"status_code\\\": 500},\\n )\\n\\n def build(\\n self,\\n url: str,\\n headers: Optional[dict] = None,\\n timeout: int = 5,\\n ) -> list[Document]:\\n if headers is None:\\n headers = {}\\n urls = url if isinstance(url, list) else [url]\\n with requests.Session() as session:\\n documents = [self.get_document(session, u, headers, timeout) for u in urls]\\n self.repr_value = documents\\n return documents\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"headers\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"headers\",\"display_name\":\"Headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"The headers to send with the request.\",\"title_case\":false},\"timeout\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":5,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"timeout\",\"display_name\":\"Timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"The timeout to use for the request.\",\"title_case\":false},\"url\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"url\",\"display_name\":\"URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"The URL to make the request to\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Make a GET request to the given URL.\",\"base_classes\":[\"Serializable\",\"Document\"],\"display_name\":\"GET Request\",\"documentation\":\"https://docs.langflow.org/components/utilities#get-request\",\"custom_fields\":{\"url\":null,\"headers\":null,\"timeout\":null},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"SQLExecutor\":{\"template\":{\"database\":{\"type\":\"SQLDatabase\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"database\",\"display_name\":\"Database\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"add_error\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"add_error\",\"display_name\":\"Add Error\",\"advanced\":false,\"dynamic\":false,\"info\":\"Add the error to the result.\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langchain_community.tools.sql_database.tool import QuerySQLDataBaseTool\\nfrom langchain_experimental.sql.base import SQLDatabase\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Text\\n\\n\\nclass SQLExecutorComponent(CustomComponent):\\n display_name = \\\"SQL Executor\\\"\\n description = \\\"Execute SQL query.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"database\\\": {\\\"display_name\\\": \\\"Database\\\"},\\n \\\"include_columns\\\": {\\n \\\"display_name\\\": \\\"Include Columns\\\",\\n \\\"info\\\": \\\"Include columns in the result.\\\",\\n },\\n \\\"passthrough\\\": {\\n \\\"display_name\\\": \\\"Passthrough\\\",\\n \\\"info\\\": \\\"If an error occurs, return the query instead of raising an exception.\\\",\\n },\\n \\\"add_error\\\": {\\n \\\"display_name\\\": \\\"Add Error\\\",\\n \\\"info\\\": \\\"Add the error to the result.\\\",\\n },\\n }\\n\\n def build(\\n self,\\n query: str,\\n database: SQLDatabase,\\n include_columns: bool = False,\\n passthrough: bool = False,\\n add_error: bool = False,\\n ) -> Text:\\n error = None\\n try:\\n tool = QuerySQLDataBaseTool(db=database)\\n result = tool.run(query, include_columns=include_columns)\\n self.status = result\\n except Exception as e:\\n result = str(e)\\n self.status = result\\n if not passthrough:\\n raise e\\n error = repr(e)\\n\\n if add_error and error is not None:\\n result = f\\\"{result}\\\\n\\\\nError: {error}\\\\n\\\\nQuery: {query}\\\"\\n elif error is not None:\\n # Then we won't add the error to the result\\n # but since we are in passthrough mode, we will return the query\\n result = query\\n\\n return result\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"include_columns\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"include_columns\",\"display_name\":\"Include Columns\",\"advanced\":false,\"dynamic\":false,\"info\":\"Include columns in the result.\",\"title_case\":false},\"passthrough\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"passthrough\",\"display_name\":\"Passthrough\",\"advanced\":false,\"dynamic\":false,\"info\":\"If an error occurs, return the query instead of raising an exception.\",\"title_case\":false},\"query\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"query\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Execute SQL query.\",\"base_classes\":[\"Text\",\"object\"],\"display_name\":\"SQL Executor\",\"documentation\":\"\",\"custom_fields\":{\"query\":null,\"database\":null,\"include_columns\":null,\"passthrough\":null,\"add_error\":null},\"output_types\":[\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"ShouldRunNext\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"The language model to use for the decision.\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"# Implement ShouldRunNext component\\nfrom langchain_core.prompts import PromptTemplate\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import BaseLanguageModel, Prompt\\n\\n\\nclass ShouldRunNext(CustomComponent):\\n display_name = \\\"Should Run Next\\\"\\n description = \\\"Decides whether to run the next component.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"prompt\\\": {\\n \\\"display_name\\\": \\\"Prompt\\\",\\n \\\"info\\\": \\\"The prompt to use for the decision. It should generate a boolean response (True or False).\\\",\\n },\\n \\\"llm\\\": {\\n \\\"display_name\\\": \\\"LLM\\\",\\n \\\"info\\\": \\\"The language model to use for the decision.\\\",\\n },\\n }\\n\\n def build(self, template: Prompt, llm: BaseLanguageModel, **kwargs) -> dict:\\n # This is a simple component that always returns True\\n prompt_template = PromptTemplate.from_template(template)\\n\\n attributes_to_check = [\\\"text\\\", \\\"page_content\\\"]\\n for key, value in kwargs.items():\\n for attribute in attributes_to_check:\\n if hasattr(value, attribute):\\n kwargs[key] = getattr(value, attribute)\\n\\n chain = prompt_template | llm\\n result = chain.invoke(kwargs)\\n if hasattr(result, \\\"content\\\") and isinstance(result.content, str):\\n result = result.content\\n elif isinstance(result, str):\\n result = result\\n else:\\n result = result.get(\\\"response\\\")\\n\\n if result.lower() not in [\\\"true\\\", \\\"false\\\"]:\\n raise ValueError(\\\"The prompt should generate a boolean response (True or False).\\\")\\n # The string should be the words true or false\\n # if not raise an error\\n bool_result = result.lower() == \\\"true\\\"\\n return {\\\"condition\\\": bool_result, \\\"result\\\": kwargs}\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"template\":{\"type\":\"prompt\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"template\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Decides whether to run the next component.\",\"base_classes\":[\"object\",\"dict\"],\"display_name\":\"Should Run Next\",\"documentation\":\"\",\"custom_fields\":{\"template\":null,\"llm\":null},\"output_types\":[\"dict\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"PythonFunction\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Callable\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Code\\nfrom langflow.interface.custom.utils import get_function\\n\\n\\nclass PythonFunctionComponent(CustomComponent):\\n display_name = \\\"Python Function\\\"\\n description = \\\"Define a Python function.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"function_code\\\": {\\n \\\"display_name\\\": \\\"Code\\\",\\n \\\"info\\\": \\\"The code for the function.\\\",\\n \\\"show\\\": True,\\n },\\n }\\n\\n def build(self, function_code: Code) -> Callable:\\n self.status = function_code\\n func = get_function(function_code)\\n return func\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"function_code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"function_code\",\"display_name\":\"Code\",\"advanced\":false,\"dynamic\":false,\"info\":\"The code for the function.\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Define a Python function.\",\"base_classes\":[\"Callable\"],\"display_name\":\"Python Function\",\"documentation\":\"\",\"custom_fields\":{\"function_code\":null},\"output_types\":[\"Callable\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"PostRequest\":{\"template\":{\"document\":{\"type\":\"Document\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"document\",\"display_name\":\"Document\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nimport requests\\nfrom langchain_core.documents import Document\\nfrom langflow import CustomComponent\\nfrom langflow.services.database.models.base import orjson_dumps\\n\\n\\nclass PostRequest(CustomComponent):\\n display_name: str = \\\"POST Request\\\"\\n description: str = \\\"Make a POST request to the given URL.\\\"\\n output_types: list[str] = [\\\"Document\\\"]\\n documentation: str = \\\"https://docs.langflow.org/components/utilities#post-request\\\"\\n beta: bool = True\\n field_config = {\\n \\\"url\\\": {\\\"display_name\\\": \\\"URL\\\", \\\"info\\\": \\\"The URL to make the request to.\\\"},\\n \\\"headers\\\": {\\n \\\"display_name\\\": \\\"Headers\\\",\\n \\\"info\\\": \\\"The headers to send with the request.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"document\\\": {\\\"display_name\\\": \\\"Document\\\"},\\n }\\n\\n def post_document(\\n self,\\n session: requests.Session,\\n document: Document,\\n url: str,\\n headers: Optional[dict] = None,\\n ) -> Document:\\n try:\\n response = session.post(url, headers=headers, data=document.page_content)\\n try:\\n response_json = response.json()\\n result = orjson_dumps(response_json, indent_2=False)\\n except Exception:\\n result = response.text\\n self.repr_value = result\\n return Document(\\n page_content=result,\\n metadata={\\n \\\"source\\\": url,\\n \\\"headers\\\": headers,\\n \\\"status_code\\\": response,\\n },\\n )\\n except Exception as exc:\\n return Document(\\n page_content=str(exc),\\n metadata={\\n \\\"source\\\": url,\\n \\\"headers\\\": headers,\\n \\\"status_code\\\": 500,\\n },\\n )\\n\\n def build(\\n self,\\n document: Document,\\n url: str,\\n headers: Optional[dict] = None,\\n ) -> list[Document]:\\n if headers is None:\\n headers = {}\\n\\n if not isinstance(document, list) and isinstance(document, Document):\\n documents: list[Document] = [document]\\n elif isinstance(document, list) and all(isinstance(doc, Document) for doc in document):\\n documents = document\\n else:\\n raise ValueError(\\\"document must be a Document or a list of Documents\\\")\\n\\n with requests.Session() as session:\\n documents = [self.post_document(session, doc, url, headers) for doc in documents]\\n self.repr_value = documents\\n return documents\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"headers\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"headers\",\"display_name\":\"Headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"The headers to send with the request.\",\"title_case\":false},\"url\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"url\",\"display_name\":\"URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"The URL to make the request to.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Make a POST request to the given URL.\",\"base_classes\":[\"Serializable\",\"Document\"],\"display_name\":\"POST Request\",\"documentation\":\"https://docs.langflow.org/components/utilities#post-request\",\"custom_fields\":{\"document\":null,\"url\":null,\"headers\":null},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"IDGenerator\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"import uuid\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass UUIDGeneratorComponent(CustomComponent):\\n documentation: str = \\\"http://docs.langflow.org/components/custom\\\"\\n display_name = \\\"Unique ID Generator\\\"\\n description = \\\"Generates a unique ID.\\\"\\n\\n def generate(self, *args, **kwargs):\\n return str(uuid.uuid4().hex)\\n\\n def build_config(self):\\n return {\\\"unique_id\\\": {\\\"display_name\\\": \\\"Value\\\", \\\"value\\\": self.generate}}\\n\\n def build(self, unique_id: str) -> str:\\n return unique_id\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"unique_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"a62d43140aba4c799af4ddc400295790\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"unique_id\",\"display_name\":\"Value\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"refresh\":true,\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Generates a unique ID.\",\"base_classes\":[\"object\",\"str\"],\"display_name\":\"Unique ID Generator\",\"documentation\":\"http://docs.langflow.org/components/custom\",\"custom_fields\":{\"unique_id\":null},\"output_types\":[\"str\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"SQLDatabase\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langchain_experimental.sql.base import SQLDatabase\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass SQLDatabaseComponent(CustomComponent):\\n display_name = \\\"SQLDatabase\\\"\\n description = \\\"SQL Database\\\"\\n\\n def build_config(self):\\n return {\\n \\\"uri\\\": {\\\"display_name\\\": \\\"URI\\\", \\\"info\\\": \\\"URI to the database.\\\"},\\n }\\n\\n def clean_up_uri(self, uri: str) -> str:\\n if uri.startswith(\\\"postgresql://\\\"):\\n uri = uri.replace(\\\"postgresql://\\\", \\\"postgres://\\\")\\n return uri.strip()\\n\\n def build(self, uri: str) -> SQLDatabase:\\n uri = self.clean_up_uri(uri)\\n return SQLDatabase.from_uri(uri)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"uri\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"uri\",\"display_name\":\"URI\",\"advanced\":false,\"dynamic\":false,\"info\":\"URI to the database.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"SQL Database\",\"base_classes\":[\"object\",\"SQLDatabase\"],\"display_name\":\"SQLDatabase\",\"documentation\":\"\",\"custom_fields\":{\"uri\":null},\"output_types\":[\"SQLDatabase\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"RecordsAsText\":{\"template\":{\"records\":{\"type\":\"Record\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"records\",\"display_name\":\"Records\",\"advanced\":false,\"dynamic\":false,\"info\":\"The records to convert to text.\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Text\\nfrom langflow.schema import Record\\n\\n\\nclass RecordsAsTextComponent(CustomComponent):\\n display_name = \\\"Records to Text\\\"\\n description = \\\"Converts Records a list of Records to text using a template.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"records\\\": {\\n \\\"display_name\\\": \\\"Records\\\",\\n \\\"info\\\": \\\"The records to convert to text.\\\",\\n },\\n \\\"template\\\": {\\n \\\"display_name\\\": \\\"Template\\\",\\n \\\"info\\\": \\\"The template to use for formatting the records. It must contain the keys {text} and {data}.\\\",\\n },\\n }\\n\\n def build(\\n self,\\n records: list[Record],\\n template: str = \\\"Text: {text}\\\\nData: {data}\\\",\\n ) -> Text:\\n if isinstance(records, Record):\\n records = [records]\\n\\n formated_records = [\\n template.format(text=record.text, data=record.data, **record.data)\\n for record in records\\n ]\\n result_string = \\\"\\\\n\\\".join(formated_records)\\n self.status = result_string\\n return result_string\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"template\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"Text: {text}\\\\nData: {data}\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"template\",\"display_name\":\"Template\",\"advanced\":false,\"dynamic\":false,\"info\":\"The template to use for formatting the records. It must contain the keys {text} and {data}.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Converts Records a list of Records to text using a template.\",\"base_classes\":[\"Text\",\"object\"],\"display_name\":\"Records to Text\",\"documentation\":\"\",\"custom_fields\":{\"records\":null,\"template\":null},\"output_types\":[\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"UpdateRequest\":{\"template\":{\"document\":{\"type\":\"Document\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"document\",\"display_name\":\"Document\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import List, Optional\\n\\nimport requests\\nfrom langchain_core.documents import Document\\nfrom langflow import CustomComponent\\nfrom langflow.services.database.models.base import orjson_dumps\\n\\n\\nclass UpdateRequest(CustomComponent):\\n display_name: str = \\\"Update Request\\\"\\n description: str = \\\"Make a PATCH request to the given URL.\\\"\\n output_types: list[str] = [\\\"Document\\\"]\\n documentation: str = \\\"https://docs.langflow.org/components/utilities#update-request\\\"\\n beta: bool = True\\n field_config = {\\n \\\"url\\\": {\\\"display_name\\\": \\\"URL\\\", \\\"info\\\": \\\"The URL to make the request to.\\\"},\\n \\\"headers\\\": {\\n \\\"display_name\\\": \\\"Headers\\\",\\n \\\"field_type\\\": \\\"NestedDict\\\",\\n \\\"info\\\": \\\"The headers to send with the request.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"document\\\": {\\\"display_name\\\": \\\"Document\\\"},\\n \\\"method\\\": {\\n \\\"display_name\\\": \\\"Method\\\",\\n \\\"field_type\\\": \\\"str\\\",\\n \\\"info\\\": \\\"The HTTP method to use.\\\",\\n \\\"options\\\": [\\\"PATCH\\\", \\\"PUT\\\"],\\n \\\"value\\\": \\\"PATCH\\\",\\n },\\n }\\n\\n def update_document(\\n self,\\n session: requests.Session,\\n document: Document,\\n url: str,\\n headers: Optional[dict] = None,\\n method: str = \\\"PATCH\\\",\\n ) -> Document:\\n try:\\n if method == \\\"PATCH\\\":\\n response = session.patch(url, headers=headers, data=document.page_content)\\n elif method == \\\"PUT\\\":\\n response = session.put(url, headers=headers, data=document.page_content)\\n else:\\n raise ValueError(f\\\"Unsupported method: {method}\\\")\\n try:\\n response_json = response.json()\\n result = orjson_dumps(response_json, indent_2=False)\\n except Exception:\\n result = response.text\\n self.repr_value = result\\n return Document(\\n page_content=result,\\n metadata={\\n \\\"source\\\": url,\\n \\\"headers\\\": headers,\\n \\\"status_code\\\": response.status_code,\\n },\\n )\\n except Exception as exc:\\n return Document(\\n page_content=str(exc),\\n metadata={\\\"source\\\": url, \\\"headers\\\": headers, \\\"status_code\\\": 500},\\n )\\n\\n def build(\\n self,\\n method: str,\\n document: Document,\\n url: str,\\n headers: Optional[dict] = None,\\n ) -> List[Document]:\\n if headers is None:\\n headers = {}\\n\\n if not isinstance(document, list) and isinstance(document, Document):\\n documents: list[Document] = [document]\\n elif isinstance(document, list) and all(isinstance(doc, Document) for doc in document):\\n documents = document\\n else:\\n raise ValueError(\\\"document must be a Document or a list of Documents\\\")\\n\\n with requests.Session() as session:\\n documents = [self.update_document(session, doc, url, headers, method) for doc in documents]\\n self.repr_value = documents\\n return documents\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"headers\":{\"type\":\"NestedDict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"headers\",\"display_name\":\"Headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"The headers to send with the request.\",\"title_case\":false},\"method\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"PATCH\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"PATCH\",\"PUT\"],\"name\":\"method\",\"display_name\":\"Method\",\"advanced\":false,\"dynamic\":false,\"info\":\"The HTTP method to use.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"url\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"url\",\"display_name\":\"URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"The URL to make the request to.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Make a PATCH request to the given URL.\",\"base_classes\":[\"Serializable\",\"Document\"],\"display_name\":\"Update Request\",\"documentation\":\"https://docs.langflow.org/components/utilities#update-request\",\"custom_fields\":{\"method\":null,\"document\":null,\"url\":null,\"headers\":null},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"JSONDocumentBuilder\":{\"template\":{\"document\":{\"type\":\"Document\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"document\",\"display_name\":\"Document\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"### JSON Document Builder\\n\\n# Build a Document containing a JSON object using a key and another Document page content.\\n\\n# **Params**\\n\\n# - **Key:** The key to use for the JSON object.\\n# - **Document:** The Document page to use for the JSON object.\\n\\n# **Output**\\n\\n# - **Document:** The Document containing the JSON object.\\n\\nfrom langchain_core.documents import Document\\nfrom langflow import CustomComponent\\nfrom langflow.services.database.models.base import orjson_dumps\\n\\n\\nclass JSONDocumentBuilder(CustomComponent):\\n display_name: str = \\\"JSON Document Builder\\\"\\n description: str = \\\"Build a Document containing a JSON object using a key and another Document page content.\\\"\\n output_types: list[str] = [\\\"Document\\\"]\\n beta = True\\n documentation: str = \\\"https://docs.langflow.org/components/utilities#json-document-builder\\\"\\n\\n field_config = {\\n \\\"key\\\": {\\\"display_name\\\": \\\"Key\\\"},\\n \\\"document\\\": {\\\"display_name\\\": \\\"Document\\\"},\\n }\\n\\n def build(\\n self,\\n key: str,\\n document: Document,\\n ) -> Document:\\n documents = None\\n if isinstance(document, list):\\n documents = [\\n Document(page_content=orjson_dumps({key: doc.page_content}, indent_2=False)) for doc in document\\n ]\\n elif isinstance(document, Document):\\n documents = Document(page_content=orjson_dumps({key: document.page_content}, indent_2=False))\\n else:\\n raise TypeError(f\\\"Expected Document or list of Documents, got {type(document)}\\\")\\n self.repr_value = documents\\n return documents\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"key\",\"display_name\":\"Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Build a Document containing a JSON object using a key and another Document page content.\",\"base_classes\":[\"Serializable\",\"Document\"],\"display_name\":\"JSON Document Builder\",\"documentation\":\"https://docs.langflow.org/components/utilities#json-document-builder\",\"custom_fields\":{\"key\":null,\"document\":null},\"output_types\":[\"Document\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true}},\"output_parsers\":{\"ResponseSchema\":{\"template\":{\"description\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"fileTypes\":[],\"password\":false,\"name\":\"description\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"type\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"string\",\"fileTypes\":[],\"password\":false,\"name\":\"type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"ResponseSchema\"},\"description\":\"A schema for a response from a structured output parser.\",\"base_classes\":[\"ResponseSchema\"],\"display_name\":\"ResponseSchema\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/output_parsers/structured\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"StructuredOutputParser\":{\"template\":{\"response_schemas\":{\"type\":\"ResponseSchema\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"password\":false,\"name\":\"response_schemas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"StructuredOutputParser\"},\"description\":\"\",\"base_classes\":[\"BaseOutputParser\",\"Runnable\",\"BaseLLMOutputParser\",\"Generic\",\"RunnableSerializable\",\"StructuredOutputParser\",\"Serializable\",\"object\"],\"display_name\":\"StructuredOutputParser\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/output_parsers/structured\",\"custom_fields\":{},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":false}},\"retrievers\":{\"AmazonKendra\":{\"template\":{\"attribute_filter\":{\"type\":\"code\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"attribute_filter\",\"display_name\":\"Attribute Filter\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.schema import BaseRetriever\\nfrom langchain_community.retrievers import AmazonKendraRetriever\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass AmazonKendraRetrieverComponent(CustomComponent):\\n display_name: str = \\\"Amazon Kendra Retriever\\\"\\n description: str = \\\"Retriever that uses the Amazon Kendra API.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"index_id\\\": {\\\"display_name\\\": \\\"Index ID\\\"},\\n \\\"region_name\\\": {\\\"display_name\\\": \\\"Region Name\\\"},\\n \\\"credentials_profile_name\\\": {\\\"display_name\\\": \\\"Credentials Profile Name\\\"},\\n \\\"attribute_filter\\\": {\\n \\\"display_name\\\": \\\"Attribute Filter\\\",\\n \\\"field_type\\\": \\\"code\\\",\\n },\\n \\\"top_k\\\": {\\\"display_name\\\": \\\"Top K\\\", \\\"field_type\\\": \\\"int\\\"},\\n \\\"user_context\\\": {\\n \\\"display_name\\\": \\\"User Context\\\",\\n \\\"field_type\\\": \\\"code\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n index_id: str,\\n top_k: int = 3,\\n region_name: Optional[str] = None,\\n credentials_profile_name: Optional[str] = None,\\n attribute_filter: Optional[dict] = None,\\n user_context: Optional[dict] = None,\\n ) -> BaseRetriever:\\n try:\\n output = AmazonKendraRetriever(\\n index_id=index_id,\\n top_k=top_k,\\n region_name=region_name,\\n credentials_profile_name=credentials_profile_name,\\n attribute_filter=attribute_filter,\\n user_context=user_context,\\n ) # type: ignore\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to AmazonKendra API.\\\") from e\\n return output\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"credentials_profile_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"credentials_profile_name\",\"display_name\":\"Credentials Profile Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"index_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"index_id\",\"display_name\":\"Index ID\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"region_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"region_name\",\"display_name\":\"Region Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"top_k\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":3,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_k\",\"display_name\":\"Top K\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"user_context\":{\"type\":\"code\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"user_context\",\"display_name\":\"User Context\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Retriever that uses the Amazon Kendra API.\",\"base_classes\":[\"Runnable\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseRetriever\"],\"display_name\":\"Amazon Kendra Retriever\",\"documentation\":\"\",\"custom_fields\":{\"index_id\":null,\"top_k\":null,\"region_name\":null,\"credentials_profile_name\":null,\"attribute_filter\":null,\"user_context\":null},\"output_types\":[\"BaseRetriever\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"VectaraSelfQueryRetriver\":{\"template\":{\"llm\":{\"type\":\"BaseLanguageModel\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"For self query retriever\",\"title_case\":false},\"vectorstore\":{\"type\":\"VectorStore\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"vectorstore\",\"display_name\":\"Vector Store\",\"advanced\":false,\"dynamic\":false,\"info\":\"Input Vectara Vectore Store\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import List\\nfrom langflow import CustomComponent\\nimport json\\nfrom langchain.schema import BaseRetriever\\nfrom langchain.schema.vectorstore import VectorStore\\nfrom langchain.base_language import BaseLanguageModel\\nfrom langchain.retrievers.self_query.base import SelfQueryRetriever\\nfrom langchain.chains.query_constructor.base import AttributeInfo\\n\\n\\nclass VectaraSelfQueryRetriverComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing Vectara Self Query Retriever using a vector store.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"Vectara Self Query Retriever for Vectara Vector Store\\\"\\n description: str = \\\"Implementation of Vectara Self Query Retriever\\\"\\n documentation = \\\"https://python.langchain.com/docs/integrations/retrievers/self_query/vectara_self_query\\\"\\n beta = True\\n\\n field_config = {\\n \\\"code\\\": {\\\"show\\\": True},\\n \\\"vectorstore\\\": {\\\"display_name\\\": \\\"Vector Store\\\", \\\"info\\\": \\\"Input Vectara Vectore Store\\\"},\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\", \\\"info\\\": \\\"For self query retriever\\\"},\\n \\\"document_content_description\\\": {\\n \\\"display_name\\\": \\\"Document Content Description\\\",\\n \\\"info\\\": \\\"For self query retriever\\\",\\n },\\n \\\"metadata_field_info\\\": {\\n \\\"display_name\\\": \\\"Metadata Field Info\\\",\\n \\\"info\\\": 'Each metadata field info is a string in the form of key value pair dictionary containing additional search metadata.\\\\nExample input: {\\\"name\\\":\\\"speech\\\",\\\"description\\\":\\\"what name of the speech\\\",\\\"type\\\":\\\"string or list[string]\\\"}.\\\\nThe keys should remain constant(name, description, type)',\\n },\\n }\\n\\n def build(\\n self,\\n vectorstore: VectorStore,\\n document_content_description: str,\\n llm: BaseLanguageModel,\\n metadata_field_info: List[str],\\n ) -> BaseRetriever:\\n metadata_field_obj = []\\n\\n for meta in metadata_field_info:\\n meta_obj = json.loads(meta)\\n if \\\"name\\\" not in meta_obj or \\\"description\\\" not in meta_obj or \\\"type\\\" not in meta_obj:\\n raise Exception(\\\"Incorrect metadata field info format.\\\")\\n attribute_info = AttributeInfo(\\n name=meta_obj[\\\"name\\\"],\\n description=meta_obj[\\\"description\\\"],\\n type=meta_obj[\\\"type\\\"],\\n )\\n metadata_field_obj.append(attribute_info)\\n\\n return SelfQueryRetriever.from_llm(\\n llm, vectorstore, document_content_description, metadata_field_obj, verbose=True\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"document_content_description\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"document_content_description\",\"display_name\":\"Document Content Description\",\"advanced\":false,\"dynamic\":false,\"info\":\"For self query retriever\",\"title_case\":false,\"input_types\":[\"Text\"]},\"metadata_field_info\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata_field_info\",\"display_name\":\"Metadata Field Info\",\"advanced\":false,\"dynamic\":false,\"info\":\"Each metadata field info is a string in the form of key value pair dictionary containing additional search metadata.\\nExample input: {\\\"name\\\":\\\"speech\\\",\\\"description\\\":\\\"what name of the speech\\\",\\\"type\\\":\\\"string or list[string]\\\"}.\\nThe keys should remain constant(name, description, type)\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Implementation of Vectara Self Query Retriever\",\"base_classes\":[\"Runnable\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseRetriever\"],\"display_name\":\"Vectara Self Query Retriever for Vectara Vector Store\",\"documentation\":\"https://python.langchain.com/docs/integrations/retrievers/self_query/vectara_self_query\",\"custom_fields\":{\"vectorstore\":null,\"document_content_description\":null,\"llm\":null,\"metadata_field_info\":null},\"output_types\":[\"BaseRetriever\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"MultiQueryRetriever\":{\"template\":{\"llm\":{\"type\":\"BaseLLM\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"prompt\":{\"type\":\"PromptTemplate\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"retriever\":{\"type\":\"BaseRetriever\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"retriever\",\"display_name\":\"Retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Callable, Optional, Union\\n\\nfrom langchain.retrievers import MultiQueryRetriever\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import BaseLLM, BaseRetriever, PromptTemplate\\n\\n\\nclass MultiQueryRetrieverComponent(CustomComponent):\\n display_name = \\\"MultiQueryRetriever\\\"\\n description = \\\"Initialize from llm using default template.\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/data_connection/retrievers/how_to/MultiQueryRetriever\\\"\\n\\n def build_config(self):\\n return {\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"prompt\\\": {\\n \\\"display_name\\\": \\\"Prompt\\\",\\n \\\"default\\\": {\\n \\\"input_variables\\\": [\\\"question\\\"],\\n \\\"input_types\\\": {},\\n \\\"output_parser\\\": None,\\n \\\"partial_variables\\\": {},\\n \\\"template\\\": \\\"You are an AI language model assistant. Your task is \\\\n\\\"\\n \\\"to generate 3 different versions of the given user \\\\n\\\"\\n \\\"question to retrieve relevant documents from a vector database. \\\\n\\\"\\n \\\"By generating multiple perspectives on the user question, \\\\n\\\"\\n \\\"your goal is to help the user overcome some of the limitations \\\\n\\\"\\n \\\"of distance-based similarity search. Provide these alternative \\\\n\\\"\\n \\\"questions separated by newlines. Original question: {question}\\\",\\n \\\"template_format\\\": \\\"f-string\\\",\\n \\\"validate_template\\\": False,\\n \\\"_type\\\": \\\"prompt\\\",\\n },\\n },\\n \\\"retriever\\\": {\\\"display_name\\\": \\\"Retriever\\\"},\\n \\\"parser_key\\\": {\\\"display_name\\\": \\\"Parser Key\\\", \\\"default\\\": \\\"lines\\\"},\\n }\\n\\n def build(\\n self,\\n llm: BaseLLM,\\n retriever: BaseRetriever,\\n prompt: Optional[PromptTemplate] = None,\\n parser_key: str = \\\"lines\\\",\\n ) -> Union[Callable, MultiQueryRetriever]:\\n if not prompt:\\n return MultiQueryRetriever.from_llm(llm=llm, retriever=retriever, parser_key=parser_key)\\n else:\\n return MultiQueryRetriever.from_llm(llm=llm, retriever=retriever, prompt=prompt, parser_key=parser_key)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"parser_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"lines\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"parser_key\",\"display_name\":\"Parser Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Initialize from llm using default template.\",\"base_classes\":[],\"display_name\":\"MultiQueryRetriever\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/retrievers/how_to/MultiQueryRetriever\",\"custom_fields\":{\"llm\":null,\"retriever\":null,\"prompt\":null,\"parser_key\":null},\"output_types\":[],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"MetalRetriever\":{\"template\":{\"api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_key\",\"display_name\":\"API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"client_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"client_id\",\"display_name\":\"Client ID\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.schema import BaseRetriever\\nfrom langchain_community.retrievers import MetalRetriever\\nfrom metal_sdk.metal import Metal # type: ignore\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass MetalRetrieverComponent(CustomComponent):\\n display_name: str = \\\"Metal Retriever\\\"\\n description: str = \\\"Retriever that uses the Metal API.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"api_key\\\": {\\\"display_name\\\": \\\"API Key\\\", \\\"password\\\": True},\\n \\\"client_id\\\": {\\\"display_name\\\": \\\"Client ID\\\", \\\"password\\\": True},\\n \\\"index_id\\\": {\\\"display_name\\\": \\\"Index ID\\\"},\\n \\\"params\\\": {\\\"display_name\\\": \\\"Parameters\\\"},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(self, api_key: str, client_id: str, index_id: str, params: Optional[dict] = None) -> BaseRetriever:\\n try:\\n metal = Metal(api_key=api_key, client_id=client_id, index_id=index_id)\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to Metal API.\\\") from e\\n return MetalRetriever(client=metal, params=params or {})\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"index_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"index_id\",\"display_name\":\"Index ID\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"params\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"params\",\"display_name\":\"Parameters\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Retriever that uses the Metal API.\",\"base_classes\":[\"Runnable\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseRetriever\"],\"display_name\":\"Metal Retriever\",\"documentation\":\"\",\"custom_fields\":{\"api_key\":null,\"client_id\":null,\"index_id\":null,\"params\":null},\"output_types\":[\"BaseRetriever\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true}},\"custom_components\":{\"CustomComponent\":{\"template\":{\"param\":{\"type\":\"Data\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"param\",\"display_name\":\"Parameter\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langflow.field_typing import Data\\n\\n\\nclass Component(CustomComponent):\\n documentation: str = \\\"http://docs.langflow.org/components/custom\\\"\\n\\n def build_config(self):\\n return {\\\"param\\\": {\\\"display_name\\\": \\\"Parameter\\\"}}\\n\\n def build(self, param: Data) -> Data:\\n return param\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"base_classes\":[\"object\",\"Data\"],\"display_name\":\"CustomComponent\",\"documentation\":\"http://docs.langflow.org/components/custom\",\"custom_fields\":{\"param\":null},\"output_types\":[\"Data\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true}},\"vectorstores\":{\"Weaviate\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"embedding\":{\"type\":\"Embeddings\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"api_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_key\",\"display_name\":\"API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"attributes\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"attributes\",\"display_name\":\"Attributes\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional, Union\\n\\nimport weaviate # type: ignore\\nfrom langchain.embeddings.base import Embeddings\\nfrom langchain.schema import BaseRetriever, Document\\nfrom langchain_community.vectorstores import VectorStore, Weaviate\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass WeaviateVectorStore(CustomComponent):\\n display_name: str = \\\"Weaviate\\\"\\n description: str = \\\"Implementation of Vector Store using Weaviate\\\"\\n documentation = \\\"https://python.langchain.com/docs/integrations/vectorstores/weaviate\\\"\\n beta = True\\n field_config = {\\n \\\"url\\\": {\\\"display_name\\\": \\\"Weaviate URL\\\", \\\"value\\\": \\\"http://localhost:8080\\\"},\\n \\\"api_key\\\": {\\n \\\"display_name\\\": \\\"API Key\\\",\\n \\\"password\\\": True,\\n \\\"required\\\": False,\\n },\\n \\\"index_name\\\": {\\n \\\"display_name\\\": \\\"Index name\\\",\\n \\\"required\\\": False,\\n },\\n \\\"text_key\\\": {\\\"display_name\\\": \\\"Text Key\\\", \\\"required\\\": False, \\\"advanced\\\": True, \\\"value\\\": \\\"text\\\"},\\n \\\"documents\\\": {\\\"display_name\\\": \\\"Documents\\\", \\\"is_list\\\": True},\\n \\\"embedding\\\": {\\\"display_name\\\": \\\"Embedding\\\"},\\n \\\"attributes\\\": {\\n \\\"display_name\\\": \\\"Attributes\\\",\\n \\\"required\\\": False,\\n \\\"is_list\\\": True,\\n \\\"field_type\\\": \\\"str\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"search_by_text\\\": {\\\"display_name\\\": \\\"Search By Text\\\", \\\"field_type\\\": \\\"bool\\\", \\\"advanced\\\": True},\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n url: str,\\n search_by_text: bool = False,\\n api_key: Optional[str] = None,\\n index_name: Optional[str] = None,\\n text_key: str = \\\"text\\\",\\n embedding: Optional[Embeddings] = None,\\n documents: Optional[Document] = None,\\n attributes: Optional[list] = None,\\n ) -> Union[VectorStore, BaseRetriever]:\\n if api_key:\\n auth_config = weaviate.AuthApiKey(api_key=api_key)\\n client = weaviate.Client(url=url, auth_client_secret=auth_config)\\n else:\\n client = weaviate.Client(url=url)\\n\\n def _to_pascal_case(word: str):\\n if word and not word[0].isupper():\\n word = word.capitalize()\\n\\n if word.isidentifier():\\n return word\\n\\n word = word.replace(\\\"-\\\", \\\" \\\").replace(\\\"_\\\", \\\" \\\")\\n parts = word.split()\\n pascal_case_word = \\\"\\\".join([part.capitalize() for part in parts])\\n\\n return pascal_case_word\\n\\n index_name = _to_pascal_case(index_name) if index_name else None\\n\\n if documents is not None and embedding is not None:\\n return Weaviate.from_documents(\\n client=client,\\n index_name=index_name,\\n documents=documents,\\n embedding=embedding,\\n by_text=search_by_text,\\n )\\n\\n return Weaviate(\\n client=client,\\n index_name=index_name,\\n text_key=text_key,\\n embedding=embedding,\\n by_text=search_by_text,\\n attributes=attributes if attributes is not None else [],\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"index_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"index_name\",\"display_name\":\"Index name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"search_by_text\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"search_by_text\",\"display_name\":\"Search By Text\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"text_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"text\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"text_key\",\"display_name\":\"Text Key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"url\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"http://localhost:8080\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"url\",\"display_name\":\"Weaviate URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Implementation of Vector Store using Weaviate\",\"base_classes\":[\"Runnable\",\"Generic\",\"VectorStore\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseRetriever\"],\"display_name\":\"Weaviate\",\"documentation\":\"https://python.langchain.com/docs/integrations/vectorstores/weaviate\",\"custom_fields\":{\"url\":null,\"search_by_text\":null,\"api_key\":null,\"index_name\":null,\"text_key\":null,\"embedding\":null,\"documents\":null,\"attributes\":null},\"output_types\":[\"VectorStore\",\"BaseRetriever\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"Vectara\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"If provided, will be upserted to corpus (optional)\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"import tempfile\\nimport urllib\\nimport urllib.request\\nfrom typing import List, Optional, Union\\n\\nfrom langchain_community.embeddings import FakeEmbeddings\\nfrom langchain_community.vectorstores.vectara import Vectara\\nfrom langchain_core.vectorstores import VectorStore\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import BaseRetriever, Document\\n\\n\\nclass VectaraComponent(CustomComponent):\\n display_name: str = \\\"Vectara\\\"\\n description: str = \\\"Implementation of Vector Store using Vectara\\\"\\n documentation = \\\"https://python.langchain.com/docs/integrations/vectorstores/vectara\\\"\\n beta = True\\n field_config = {\\n \\\"vectara_customer_id\\\": {\\n \\\"display_name\\\": \\\"Vectara Customer ID\\\",\\n },\\n \\\"vectara_corpus_id\\\": {\\n \\\"display_name\\\": \\\"Vectara Corpus ID\\\",\\n },\\n \\\"vectara_api_key\\\": {\\n \\\"display_name\\\": \\\"Vectara API Key\\\",\\n \\\"password\\\": True,\\n },\\n \\\"documents\\\": {\\\"display_name\\\": \\\"Documents\\\", \\\"info\\\": \\\"If provided, will be upserted to corpus (optional)\\\"},\\n \\\"files_url\\\": {\\n \\\"display_name\\\": \\\"Files Url\\\",\\n \\\"info\\\": \\\"Make vectara object using url of files (optional)\\\",\\n },\\n }\\n\\n def build(\\n self,\\n vectara_customer_id: str,\\n vectara_corpus_id: str,\\n vectara_api_key: str,\\n files_url: Optional[List[str]] = None,\\n documents: Optional[Document] = None,\\n ) -> Union[VectorStore, BaseRetriever]:\\n source = \\\"Langflow\\\"\\n\\n if documents is not None:\\n return Vectara.from_documents(\\n documents=documents, # type: ignore\\n embedding=FakeEmbeddings(size=768),\\n vectara_customer_id=vectara_customer_id,\\n vectara_corpus_id=vectara_corpus_id,\\n vectara_api_key=vectara_api_key,\\n source=source,\\n )\\n\\n if files_url is not None:\\n files_list = []\\n for url in files_url:\\n name = tempfile.NamedTemporaryFile().name\\n urllib.request.urlretrieve(url, name)\\n files_list.append(name)\\n\\n return Vectara.from_files(\\n files=files_list,\\n embedding=FakeEmbeddings(size=768),\\n vectara_customer_id=vectara_customer_id,\\n vectara_corpus_id=vectara_corpus_id,\\n vectara_api_key=vectara_api_key,\\n source=source,\\n )\\n\\n return Vectara(\\n vectara_customer_id=vectara_customer_id,\\n vectara_corpus_id=vectara_corpus_id,\\n vectara_api_key=vectara_api_key,\\n source=source,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"files_url\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"files_url\",\"display_name\":\"Files Url\",\"advanced\":false,\"dynamic\":false,\"info\":\"Make vectara object using url of files (optional)\",\"title_case\":false,\"input_types\":[\"Text\"]},\"vectara_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"vectara_api_key\",\"display_name\":\"Vectara API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"vectara_corpus_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"vectara_corpus_id\",\"display_name\":\"Vectara Corpus ID\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"vectara_customer_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"vectara_customer_id\",\"display_name\":\"Vectara Customer ID\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Implementation of Vector Store using Vectara\",\"base_classes\":[\"Runnable\",\"Generic\",\"VectorStore\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseRetriever\"],\"display_name\":\"Vectara\",\"documentation\":\"https://python.langchain.com/docs/integrations/vectorstores/vectara\",\"custom_fields\":{\"vectara_customer_id\":null,\"vectara_corpus_id\":null,\"vectara_api_key\":null,\"files_url\":null,\"documents\":null},\"output_types\":[\"VectorStore\",\"BaseRetriever\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"Chroma\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"embedding\":{\"type\":\"Embeddings\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"chroma_server_cors_allow_origins\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chroma_server_cors_allow_origins\",\"display_name\":\"Server CORS Allow Origins\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"chroma_server_grpc_port\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chroma_server_grpc_port\",\"display_name\":\"Server gRPC Port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"chroma_server_host\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chroma_server_host\",\"display_name\":\"Server Host\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"chroma_server_port\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chroma_server_port\",\"display_name\":\"Server Port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"chroma_server_ssl_enabled\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chroma_server_ssl_enabled\",\"display_name\":\"Server SSL Enabled\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import List, Optional, Union\\n\\nimport chromadb # type: ignore\\nfrom langchain.embeddings.base import Embeddings\\nfrom langchain.schema import BaseRetriever, Document\\nfrom langchain_community.vectorstores import VectorStore\\nfrom langchain_community.vectorstores.chroma import Chroma\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass ChromaComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing a Vector Store using Chroma.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"Chroma\\\"\\n description: str = \\\"Implementation of Vector Store using Chroma\\\"\\n documentation = \\\"https://python.langchain.com/docs/integrations/vectorstores/chroma\\\"\\n beta: bool = True\\n icon = \\\"Chroma\\\"\\n\\n def build_config(self):\\n \\\"\\\"\\\"\\n Builds the configuration for the component.\\n\\n Returns:\\n - dict: A dictionary containing the configuration options for the component.\\n \\\"\\\"\\\"\\n return {\\n \\\"collection_name\\\": {\\\"display_name\\\": \\\"Collection Name\\\", \\\"value\\\": \\\"langflow\\\"},\\n \\\"index_directory\\\": {\\\"display_name\\\": \\\"Persist Directory\\\"},\\n \\\"code\\\": {\\\"advanced\\\": True, \\\"display_name\\\": \\\"Code\\\"},\\n \\\"documents\\\": {\\\"display_name\\\": \\\"Documents\\\", \\\"is_list\\\": True},\\n \\\"embedding\\\": {\\\"display_name\\\": \\\"Embedding\\\"},\\n \\\"chroma_server_cors_allow_origins\\\": {\\n \\\"display_name\\\": \\\"Server CORS Allow Origins\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"chroma_server_host\\\": {\\\"display_name\\\": \\\"Server Host\\\", \\\"advanced\\\": True},\\n \\\"chroma_server_port\\\": {\\\"display_name\\\": \\\"Server Port\\\", \\\"advanced\\\": True},\\n \\\"chroma_server_grpc_port\\\": {\\n \\\"display_name\\\": \\\"Server gRPC Port\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"chroma_server_ssl_enabled\\\": {\\n \\\"display_name\\\": \\\"Server SSL Enabled\\\",\\n \\\"advanced\\\": True,\\n },\\n }\\n\\n def build(\\n self,\\n collection_name: str,\\n embedding: Embeddings,\\n chroma_server_ssl_enabled: bool,\\n index_directory: Optional[str] = None,\\n documents: Optional[List[Document]] = None,\\n chroma_server_cors_allow_origins: Optional[str] = None,\\n chroma_server_host: Optional[str] = None,\\n chroma_server_port: Optional[int] = None,\\n chroma_server_grpc_port: Optional[int] = None,\\n ) -> Union[VectorStore, BaseRetriever]:\\n \\\"\\\"\\\"\\n Builds the Vector Store or BaseRetriever object.\\n\\n Args:\\n - collection_name (str): The name of the collection.\\n - index_directory (Optional[str]): The directory to persist the Vector Store to.\\n - chroma_server_ssl_enabled (bool): Whether to enable SSL for the Chroma server.\\n - embedding (Optional[Embeddings]): The embeddings to use for the Vector Store.\\n - documents (Optional[Document]): The documents to use for the Vector Store.\\n - chroma_server_cors_allow_origins (Optional[str]): The CORS allow origins for the Chroma server.\\n - chroma_server_host (Optional[str]): The host for the Chroma server.\\n - chroma_server_port (Optional[int]): The port for the Chroma server.\\n - chroma_server_grpc_port (Optional[int]): The gRPC port for the Chroma server.\\n\\n Returns:\\n - Union[VectorStore, BaseRetriever]: The Vector Store or BaseRetriever object.\\n \\\"\\\"\\\"\\n\\n # Chroma settings\\n chroma_settings = None\\n\\n if chroma_server_host is not None:\\n chroma_settings = chromadb.config.Settings(\\n chroma_server_cors_allow_origins=chroma_server_cors_allow_origins\\n or None,\\n chroma_server_host=chroma_server_host,\\n chroma_server_port=chroma_server_port or None,\\n chroma_server_grpc_port=chroma_server_grpc_port or None,\\n chroma_server_ssl_enabled=chroma_server_ssl_enabled,\\n )\\n\\n # If documents, then we need to create a Chroma instance using .from_documents\\n\\n # Check index_directory and expand it if it is a relative path\\n\\n index_directory = self.resolve_path(index_directory)\\n\\n if documents is not None and embedding is not None:\\n if len(documents) == 0:\\n raise ValueError(\\n \\\"If documents are provided, there must be at least one document.\\\"\\n )\\n chroma = Chroma.from_documents(\\n documents=documents, # type: ignore\\n persist_directory=index_directory,\\n collection_name=collection_name,\\n embedding=embedding,\\n client_settings=chroma_settings,\\n )\\n else:\\n chroma = Chroma(\\n persist_directory=index_directory,\\n client_settings=chroma_settings,\\n embedding_function=embedding,\\n )\\n return chroma\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"collection_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"langflow\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"collection_name\",\"display_name\":\"Collection Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"index_directory\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"index_directory\",\"display_name\":\"Persist Directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Implementation of Vector Store using Chroma\",\"icon\":\"Chroma\",\"base_classes\":[\"Runnable\",\"Generic\",\"VectorStore\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseRetriever\"],\"display_name\":\"Chroma\",\"documentation\":\"https://python.langchain.com/docs/integrations/vectorstores/chroma\",\"custom_fields\":{\"collection_name\":null,\"embedding\":null,\"chroma_server_ssl_enabled\":null,\"index_directory\":null,\"documents\":null,\"chroma_server_cors_allow_origins\":null,\"chroma_server_host\":null,\"chroma_server_port\":null,\"chroma_server_grpc_port\":null},\"output_types\":[\"VectorStore\",\"BaseRetriever\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"SupabaseVectorStore\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"embedding\":{\"type\":\"Embeddings\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import List, Union\\n\\nfrom langchain.schema import BaseRetriever\\nfrom langchain_community.vectorstores import VectorStore\\nfrom langchain_community.vectorstores.supabase import SupabaseVectorStore\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Document, Embeddings, NestedDict\\nfrom supabase.client import Client, create_client\\n\\n\\nclass SupabaseComponent(CustomComponent):\\n display_name = \\\"Supabase\\\"\\n description = \\\"Return VectorStore initialized from texts and embeddings.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"documents\\\": {\\\"display_name\\\": \\\"Documents\\\"},\\n \\\"embedding\\\": {\\\"display_name\\\": \\\"Embedding\\\"},\\n \\\"query_name\\\": {\\\"display_name\\\": \\\"Query Name\\\"},\\n \\\"search_kwargs\\\": {\\\"display_name\\\": \\\"Search Kwargs\\\", \\\"advanced\\\": True},\\n \\\"supabase_service_key\\\": {\\\"display_name\\\": \\\"Supabase Service Key\\\"},\\n \\\"supabase_url\\\": {\\\"display_name\\\": \\\"Supabase URL\\\"},\\n \\\"table_name\\\": {\\\"display_name\\\": \\\"Table Name\\\", \\\"advanced\\\": True},\\n }\\n\\n def build(\\n self,\\n embedding: Embeddings,\\n documents: List[Document],\\n query_name: str = \\\"\\\",\\n search_kwargs: NestedDict = {},\\n supabase_service_key: str = \\\"\\\",\\n supabase_url: str = \\\"\\\",\\n table_name: str = \\\"\\\",\\n ) -> Union[VectorStore, SupabaseVectorStore, BaseRetriever]:\\n supabase: Client = create_client(supabase_url, supabase_key=supabase_service_key)\\n return SupabaseVectorStore.from_documents(\\n documents=documents,\\n embedding=embedding,\\n query_name=query_name,\\n search_kwargs=search_kwargs,\\n client=supabase,\\n table_name=table_name,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"query_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"query_name\",\"display_name\":\"Query Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"search_kwargs\":{\"type\":\"NestedDict\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"search_kwargs\",\"display_name\":\"Search Kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"supabase_service_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"supabase_service_key\",\"display_name\":\"Supabase Service Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"supabase_url\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"supabase_url\",\"display_name\":\"Supabase URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"table_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"table_name\",\"display_name\":\"Table Name\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Return VectorStore initialized from texts and embeddings.\",\"base_classes\":[\"Runnable\",\"Generic\",\"VectorStore\",\"SupabaseVectorStore\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseRetriever\"],\"display_name\":\"Supabase\",\"documentation\":\"\",\"custom_fields\":{\"embedding\":null,\"documents\":null,\"query_name\":null,\"search_kwargs\":null,\"supabase_service_key\":null,\"supabase_url\":null,\"table_name\":null},\"output_types\":[\"VectorStore\",\"SupabaseVectorStore\",\"BaseRetriever\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"Redis\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"embedding\":{\"type\":\"Embeddings\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional, Union\\n\\nfrom langchain.embeddings.base import Embeddings\\nfrom langchain_community.vectorstores import VectorStore\\nfrom langchain_community.vectorstores.redis import Redis\\nfrom langchain_core.documents import Document\\nfrom langchain_core.retrievers import BaseRetriever\\nfrom langflow import CustomComponent\\n\\n\\nclass RedisComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing a Vector Store using Redis.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"Redis\\\"\\n description: str = \\\"Implementation of Vector Store using Redis\\\"\\n documentation = \\\"https://python.langchain.com/docs/integrations/vectorstores/redis\\\"\\n beta = True\\n\\n def build_config(self):\\n \\\"\\\"\\\"\\n Builds the configuration for the component.\\n\\n Returns:\\n - dict: A dictionary containing the configuration options for the component.\\n \\\"\\\"\\\"\\n return {\\n \\\"index_name\\\": {\\\"display_name\\\": \\\"Index Name\\\", \\\"value\\\": \\\"your_index\\\"},\\n \\\"code\\\": {\\\"show\\\": False, \\\"display_name\\\": \\\"Code\\\"},\\n \\\"documents\\\": {\\\"display_name\\\": \\\"Documents\\\", \\\"is_list\\\": True},\\n \\\"embedding\\\": {\\\"display_name\\\": \\\"Embedding\\\"},\\n \\\"schema\\\": {\\\"display_name\\\": \\\"Schema\\\", \\\"file_types\\\": [\\\".yaml\\\"]},\\n \\\"redis_server_url\\\": {\\n \\\"display_name\\\": \\\"Redis Server Connection String\\\",\\n \\\"advanced\\\": False,\\n },\\n \\\"redis_index_name\\\": {\\\"display_name\\\": \\\"Redis Index\\\", \\\"advanced\\\": False},\\n }\\n\\n def build(\\n self,\\n embedding: Embeddings,\\n redis_server_url: str,\\n redis_index_name: str,\\n schema: Optional[str] = None,\\n documents: Optional[Document] = None,\\n ) -> Union[VectorStore, BaseRetriever]:\\n \\\"\\\"\\\"\\n Builds the Vector Store or BaseRetriever object.\\n\\n Args:\\n - embedding (Embeddings): The embeddings to use for the Vector Store.\\n - documents (Optional[Document]): The documents to use for the Vector Store.\\n - redis_index_name (str): The name of the Redis index.\\n - redis_server_url (str): The URL for the Redis server.\\n\\n Returns:\\n - VectorStore: The Vector Store object.\\n \\\"\\\"\\\"\\n if documents is None:\\n if schema is None:\\n raise ValueError(\\\"If no documents are provided, a schema must be provided.\\\")\\n redis_vs = Redis.from_existing_index(\\n embedding=embedding,\\n index_name=redis_index_name,\\n schema=schema,\\n key_prefix=None,\\n redis_url=redis_server_url,\\n )\\n else:\\n redis_vs = Redis.from_documents(\\n documents=documents, # type: ignore\\n embedding=embedding,\\n redis_url=redis_server_url,\\n index_name=redis_index_name,\\n )\\n return redis_vs\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"redis_index_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"redis_index_name\",\"display_name\":\"Redis Index\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"redis_server_url\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"redis_server_url\",\"display_name\":\"Redis Server Connection String\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"schema\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[\".yaml\"],\"file_path\":\"\",\"password\":false,\"name\":\"schema\",\"display_name\":\"Schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Implementation of Vector Store using Redis\",\"base_classes\":[\"Runnable\",\"Generic\",\"VectorStore\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseRetriever\"],\"display_name\":\"Redis\",\"documentation\":\"https://python.langchain.com/docs/integrations/vectorstores/redis\",\"custom_fields\":{\"embedding\":null,\"redis_server_url\":null,\"redis_index_name\":null,\"schema\":null,\"documents\":null},\"output_types\":[\"VectorStore\",\"BaseRetriever\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"pgvector\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"embedding\":{\"type\":\"Embeddings\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional, Union\\n\\nfrom langchain.embeddings.base import Embeddings\\nfrom langchain_community.vectorstores import VectorStore\\nfrom langchain_community.vectorstores.pgvector import PGVector\\nfrom langchain_core.documents import Document\\nfrom langchain_core.retrievers import BaseRetriever\\nfrom langflow import CustomComponent\\n\\n\\nclass PGVectorComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing a Vector Store using PostgreSQL.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"PGVector\\\"\\n description: str = \\\"Implementation of Vector Store using PostgreSQL\\\"\\n documentation = \\\"https://python.langchain.com/docs/integrations/vectorstores/pgvector\\\"\\n\\n def build_config(self):\\n \\\"\\\"\\\"\\n Builds the configuration for the component.\\n\\n Returns:\\n - dict: A dictionary containing the configuration options for the component.\\n \\\"\\\"\\\"\\n return {\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"documents\\\": {\\\"display_name\\\": \\\"Documents\\\", \\\"is_list\\\": True},\\n \\\"embedding\\\": {\\\"display_name\\\": \\\"Embedding\\\"},\\n \\\"pg_server_url\\\": {\\n \\\"display_name\\\": \\\"PostgreSQL Server Connection String\\\",\\n \\\"advanced\\\": False,\\n },\\n \\\"collection_name\\\": {\\\"display_name\\\": \\\"Table\\\", \\\"advanced\\\": False},\\n }\\n\\n def build(\\n self,\\n embedding: Embeddings,\\n pg_server_url: str,\\n collection_name: str,\\n documents: Optional[Document] = None,\\n ) -> Union[VectorStore, BaseRetriever]:\\n \\\"\\\"\\\"\\n Builds the Vector Store or BaseRetriever object.\\n\\n Args:\\n - embedding (Embeddings): The embeddings to use for the Vector Store.\\n - documents (Optional[Document]): The documents to use for the Vector Store.\\n - collection_name (str): The name of the PG table.\\n - pg_server_url (str): The URL for the PG server.\\n\\n Returns:\\n - VectorStore: The Vector Store object.\\n \\\"\\\"\\\"\\n\\n try:\\n if documents is None:\\n vector_store = PGVector.from_existing_index(\\n embedding=embedding,\\n collection_name=collection_name,\\n connection_string=pg_server_url,\\n )\\n else:\\n vector_store = PGVector.from_documents(\\n embedding=embedding,\\n documents=documents, # type: ignore\\n collection_name=collection_name,\\n connection_string=pg_server_url,\\n )\\n except Exception as e:\\n raise RuntimeError(f\\\"Failed to build PGVector: {e}\\\")\\n return vector_store\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"collection_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"collection_name\",\"display_name\":\"Table\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"pg_server_url\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"pg_server_url\",\"display_name\":\"PostgreSQL Server Connection String\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Implementation of Vector Store using PostgreSQL\",\"base_classes\":[\"Runnable\",\"Generic\",\"VectorStore\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseRetriever\"],\"display_name\":\"PGVector\",\"documentation\":\"https://python.langchain.com/docs/integrations/vectorstores/pgvector\",\"custom_fields\":{\"embedding\":null,\"pg_server_url\":null,\"collection_name\":null,\"documents\":null},\"output_types\":[\"VectorStore\",\"BaseRetriever\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"Pinecone\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"embedding\":{\"type\":\"Embeddings\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"import os\\nfrom typing import List, Optional, Union\\n\\nimport pinecone # type: ignore\\nfrom langchain.schema import BaseRetriever\\nfrom langchain_community.vectorstores import VectorStore\\nfrom langchain_community.vectorstores.pinecone import Pinecone\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Document, Embeddings\\n\\n\\nclass PineconeComponent(CustomComponent):\\n display_name = \\\"Pinecone\\\"\\n description = \\\"Construct Pinecone wrapper from raw documents.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"documents\\\": {\\\"display_name\\\": \\\"Documents\\\"},\\n \\\"embedding\\\": {\\\"display_name\\\": \\\"Embedding\\\"},\\n \\\"index_name\\\": {\\\"display_name\\\": \\\"Index Name\\\"},\\n \\\"namespace\\\": {\\\"display_name\\\": \\\"Namespace\\\"},\\n \\\"pinecone_api_key\\\": {\\\"display_name\\\": \\\"Pinecone API Key\\\", \\\"default\\\": \\\"\\\", \\\"password\\\": True, \\\"required\\\": True},\\n \\\"pinecone_env\\\": {\\\"display_name\\\": \\\"Pinecone Environment\\\", \\\"default\\\": \\\"\\\", \\\"required\\\": True},\\n \\\"search_kwargs\\\": {\\\"display_name\\\": \\\"Search Kwargs\\\", \\\"default\\\": \\\"{}\\\"},\\n \\\"pool_threads\\\": {\\\"display_name\\\": \\\"Pool Threads\\\", \\\"default\\\": 1, \\\"advanced\\\": True},\\n }\\n\\n def build(\\n self,\\n embedding: Embeddings,\\n pinecone_env: str,\\n documents: List[Document],\\n text_key: str = \\\"text\\\",\\n pool_threads: int = 4,\\n index_name: Optional[str] = None,\\n pinecone_api_key: Optional[str] = None,\\n namespace: Optional[str] = \\\"default\\\",\\n ) -> Union[VectorStore, Pinecone, BaseRetriever]:\\n if pinecone_api_key is None or pinecone_env is None:\\n raise ValueError(\\\"Pinecone API Key and Environment are required.\\\")\\n if os.getenv(\\\"PINECONE_API_KEY\\\") is None and pinecone_api_key is None:\\n raise ValueError(\\\"Pinecone API Key is required.\\\")\\n\\n pinecone.init(api_key=pinecone_api_key, environment=pinecone_env) # type: ignore\\n if not index_name:\\n raise ValueError(\\\"Index Name is required.\\\")\\n if documents:\\n return Pinecone.from_documents(\\n documents=documents,\\n embedding=embedding,\\n index_name=index_name,\\n pool_threads=pool_threads,\\n namespace=namespace,\\n text_key=text_key,\\n )\\n\\n return Pinecone.from_existing_index(\\n index_name=index_name,\\n embedding=embedding,\\n text_key=text_key,\\n namespace=namespace,\\n pool_threads=pool_threads,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"index_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"index_name\",\"display_name\":\"Index Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"namespace\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"default\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"namespace\",\"display_name\":\"Namespace\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"pinecone_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"pinecone_api_key\",\"display_name\":\"Pinecone API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"pinecone_env\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"pinecone_env\",\"display_name\":\"Pinecone Environment\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"pool_threads\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":4,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"pool_threads\",\"display_name\":\"Pool Threads\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"text_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"text\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"text_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Construct Pinecone wrapper from raw documents.\",\"base_classes\":[\"Runnable\",\"Generic\",\"VectorStore\",\"Pinecone\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseRetriever\"],\"display_name\":\"Pinecone\",\"documentation\":\"\",\"custom_fields\":{\"embedding\":null,\"pinecone_env\":null,\"documents\":null,\"text_key\":null,\"pool_threads\":null,\"index_name\":null,\"pinecone_api_key\":null,\"namespace\":null},\"output_types\":[\"VectorStore\",\"Pinecone\",\"BaseRetriever\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"Qdrant\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"embedding\":{\"type\":\"Embeddings\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"api_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_key\",\"display_name\":\"API Key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional, Union\\n\\nfrom langchain.schema import BaseRetriever\\nfrom langchain_community.vectorstores import VectorStore\\nfrom langchain_community.vectorstores.qdrant import Qdrant\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Document, Embeddings, NestedDict\\n\\n\\nclass QdrantComponent(CustomComponent):\\n display_name = \\\"Qdrant\\\"\\n description = \\\"Construct Qdrant wrapper from a list of texts.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"documents\\\": {\\\"display_name\\\": \\\"Documents\\\"},\\n \\\"embedding\\\": {\\\"display_name\\\": \\\"Embedding\\\"},\\n \\\"api_key\\\": {\\\"display_name\\\": \\\"API Key\\\", \\\"password\\\": True, \\\"advanced\\\": True},\\n \\\"collection_name\\\": {\\\"display_name\\\": \\\"Collection Name\\\"},\\n \\\"content_payload_key\\\": {\\\"display_name\\\": \\\"Content Payload Key\\\", \\\"advanced\\\": True},\\n \\\"distance_func\\\": {\\\"display_name\\\": \\\"Distance Function\\\", \\\"advanced\\\": True},\\n \\\"grpc_port\\\": {\\\"display_name\\\": \\\"gRPC Port\\\", \\\"advanced\\\": True},\\n \\\"host\\\": {\\\"display_name\\\": \\\"Host\\\", \\\"advanced\\\": True},\\n \\\"https\\\": {\\\"display_name\\\": \\\"HTTPS\\\", \\\"advanced\\\": True},\\n \\\"location\\\": {\\\"display_name\\\": \\\"Location\\\", \\\"advanced\\\": True},\\n \\\"metadata_payload_key\\\": {\\\"display_name\\\": \\\"Metadata Payload Key\\\", \\\"advanced\\\": True},\\n \\\"path\\\": {\\\"display_name\\\": \\\"Path\\\", \\\"advanced\\\": True},\\n \\\"port\\\": {\\\"display_name\\\": \\\"Port\\\", \\\"advanced\\\": True},\\n \\\"prefer_grpc\\\": {\\\"display_name\\\": \\\"Prefer gRPC\\\", \\\"advanced\\\": True},\\n \\\"prefix\\\": {\\\"display_name\\\": \\\"Prefix\\\", \\\"advanced\\\": True},\\n \\\"search_kwargs\\\": {\\\"display_name\\\": \\\"Search Kwargs\\\", \\\"advanced\\\": True},\\n \\\"timeout\\\": {\\\"display_name\\\": \\\"Timeout\\\", \\\"advanced\\\": True},\\n \\\"url\\\": {\\\"display_name\\\": \\\"URL\\\", \\\"advanced\\\": True},\\n }\\n\\n def build(\\n self,\\n embedding: Embeddings,\\n collection_name: str,\\n documents: Optional[Document] = None,\\n api_key: Optional[str] = None,\\n content_payload_key: str = \\\"page_content\\\",\\n distance_func: str = \\\"Cosine\\\",\\n grpc_port: int = 6334,\\n https: bool = False,\\n host: Optional[str] = None,\\n location: Optional[str] = None,\\n metadata_payload_key: str = \\\"metadata\\\",\\n path: Optional[str] = None,\\n port: Optional[int] = 6333,\\n prefer_grpc: bool = False,\\n prefix: Optional[str] = None,\\n search_kwargs: Optional[NestedDict] = None,\\n timeout: Optional[int] = None,\\n url: Optional[str] = None,\\n ) -> Union[VectorStore, Qdrant, BaseRetriever]:\\n if documents is None:\\n from qdrant_client import QdrantClient\\n\\n client = QdrantClient(\\n location=location,\\n url=host,\\n port=port,\\n grpc_port=grpc_port,\\n https=https,\\n prefix=prefix,\\n timeout=timeout,\\n prefer_grpc=prefer_grpc,\\n metadata_payload_key=metadata_payload_key,\\n content_payload_key=content_payload_key,\\n api_key=api_key,\\n collection_name=collection_name,\\n host=host,\\n path=path,\\n )\\n vs = Qdrant(\\n client=client,\\n collection_name=collection_name,\\n embeddings=embedding,\\n )\\n return vs\\n else:\\n vs = Qdrant.from_documents(\\n documents=documents, # type: ignore\\n embedding=embedding,\\n api_key=api_key,\\n collection_name=collection_name,\\n content_payload_key=content_payload_key,\\n distance_func=distance_func,\\n grpc_port=grpc_port,\\n host=host,\\n https=https,\\n location=location,\\n metadata_payload_key=metadata_payload_key,\\n path=path,\\n port=port,\\n prefer_grpc=prefer_grpc,\\n prefix=prefix,\\n search_kwargs=search_kwargs,\\n timeout=timeout,\\n url=url,\\n )\\n return vs\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"collection_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"collection_name\",\"display_name\":\"Collection Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"content_payload_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"page_content\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"content_payload_key\",\"display_name\":\"Content Payload Key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"distance_func\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"Cosine\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"distance_func\",\"display_name\":\"Distance Function\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"grpc_port\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":6334,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"grpc_port\",\"display_name\":\"gRPC Port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"host\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"host\",\"display_name\":\"Host\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"https\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"https\",\"display_name\":\"HTTPS\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"location\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"location\",\"display_name\":\"Location\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"metadata_payload_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"metadata\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata_payload_key\",\"display_name\":\"Metadata Payload Key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"path\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"path\",\"display_name\":\"Path\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"port\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":6333,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"port\",\"display_name\":\"Port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"prefer_grpc\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"prefer_grpc\",\"display_name\":\"Prefer gRPC\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"prefix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"prefix\",\"display_name\":\"Prefix\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"search_kwargs\":{\"type\":\"NestedDict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"search_kwargs\",\"display_name\":\"Search Kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"timeout\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"timeout\",\"display_name\":\"Timeout\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"url\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"url\",\"display_name\":\"URL\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Construct Qdrant wrapper from a list of texts.\",\"base_classes\":[\"Runnable\",\"Generic\",\"VectorStore\",\"Qdrant\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseRetriever\"],\"display_name\":\"Qdrant\",\"documentation\":\"\",\"custom_fields\":{\"embedding\":null,\"collection_name\":null,\"documents\":null,\"api_key\":null,\"content_payload_key\":null,\"distance_func\":null,\"grpc_port\":null,\"https\":null,\"host\":null,\"location\":null,\"metadata_payload_key\":null,\"path\":null,\"port\":null,\"prefer_grpc\":null,\"prefix\":null,\"search_kwargs\":null,\"timeout\":null,\"url\":null},\"output_types\":[\"VectorStore\",\"Qdrant\",\"BaseRetriever\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"MongoDBAtlasVectorSearch\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"embedding\":{\"type\":\"Embeddings\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import List, Optional\\n\\nfrom langchain_community.vectorstores import MongoDBAtlasVectorSearch\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import (\\n Document,\\n Embeddings,\\n NestedDict,\\n)\\n\\n\\nclass MongoDBAtlasComponent(CustomComponent):\\n display_name = \\\"MongoDB Atlas\\\"\\n description = \\\"Construct a `MongoDB Atlas Vector Search` vector store from raw documents.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"documents\\\": {\\\"display_name\\\": \\\"Documents\\\"},\\n \\\"embedding\\\": {\\\"display_name\\\": \\\"Embedding\\\"},\\n \\\"collection_name\\\": {\\\"display_name\\\": \\\"Collection Name\\\"},\\n \\\"db_name\\\": {\\\"display_name\\\": \\\"Database Name\\\"},\\n \\\"index_name\\\": {\\\"display_name\\\": \\\"Index Name\\\"},\\n \\\"mongodb_atlas_cluster_uri\\\": {\\\"display_name\\\": \\\"MongoDB Atlas Cluster URI\\\"},\\n \\\"search_kwargs\\\": {\\\"display_name\\\": \\\"Search Kwargs\\\", \\\"advanced\\\": True},\\n }\\n\\n def build(\\n self,\\n documents: List[Document],\\n embedding: Embeddings,\\n collection_name: str = \\\"\\\",\\n db_name: str = \\\"\\\",\\n index_name: str = \\\"\\\",\\n mongodb_atlas_cluster_uri: str = \\\"\\\",\\n search_kwargs: Optional[NestedDict] = None,\\n ) -> MongoDBAtlasVectorSearch:\\n search_kwargs = search_kwargs or {}\\n return MongoDBAtlasVectorSearch(\\n documents=documents,\\n embedding=embedding,\\n collection_name=collection_name,\\n db_name=db_name,\\n index_name=index_name,\\n mongodb_atlas_cluster_uri=mongodb_atlas_cluster_uri,\\n search_kwargs=search_kwargs,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"collection_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"collection_name\",\"display_name\":\"Collection Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"db_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"db_name\",\"display_name\":\"Database Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"index_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"index_name\",\"display_name\":\"Index Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"mongodb_atlas_cluster_uri\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"mongodb_atlas_cluster_uri\",\"display_name\":\"MongoDB Atlas Cluster URI\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"search_kwargs\":{\"type\":\"NestedDict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"search_kwargs\",\"display_name\":\"Search Kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Construct a `MongoDB Atlas Vector Search` vector store from raw documents.\",\"base_classes\":[\"VectorStore\",\"MongoDBAtlasVectorSearch\"],\"display_name\":\"MongoDB Atlas\",\"documentation\":\"\",\"custom_fields\":{\"documents\":null,\"embedding\":null,\"collection_name\":null,\"db_name\":null,\"index_name\":null,\"mongodb_atlas_cluster_uri\":null,\"search_kwargs\":null},\"output_types\":[\"MongoDBAtlasVectorSearch\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"ChromaSearch\":{\"template\":{\"embedding\":{\"type\":\"Embeddings\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"Embedding model to vectorize inputs (make sure to use same as index)\",\"title_case\":false},\"inputs\":{\"type\":\"Text\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"inputs\",\"display_name\":\"Input\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"chroma_server_cors_allow_origins\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chroma_server_cors_allow_origins\",\"display_name\":\"Server CORS Allow Origins\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"chroma_server_grpc_port\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chroma_server_grpc_port\",\"display_name\":\"Server gRPC Port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"chroma_server_host\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chroma_server_host\",\"display_name\":\"Server Host\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"chroma_server_port\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chroma_server_port\",\"display_name\":\"Server Port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"chroma_server_ssl_enabled\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"chroma_server_ssl_enabled\",\"display_name\":\"Server SSL Enabled\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import List, Optional\\n\\nimport chromadb # type: ignore\\nfrom langchain_community.vectorstores.chroma import Chroma\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Embeddings, Text\\nfrom langflow.schema import Record, docs_to_records\\n\\n\\nclass ChromaSearchComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing a Vector Store using Chroma.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"Chroma Search\\\"\\n description: str = \\\"Search a Chroma collection for similar documents.\\\"\\n beta: bool = True\\n icon = \\\"Chroma\\\"\\n\\n def build_config(self):\\n \\\"\\\"\\\"\\n Builds the configuration for the component.\\n\\n Returns:\\n - dict: A dictionary containing the configuration options for the component.\\n \\\"\\\"\\\"\\n return {\\n \\\"inputs\\\": {\\\"display_name\\\": \\\"Input\\\"},\\n \\\"search_type\\\": {\\n \\\"display_name\\\": \\\"Search Type\\\",\\n \\\"options\\\": [\\\"Similarity\\\", \\\"MMR\\\"],\\n },\\n \\\"collection_name\\\": {\\\"display_name\\\": \\\"Collection Name\\\", \\\"value\\\": \\\"langflow\\\"},\\n # \\\"persist\\\": {\\\"display_name\\\": \\\"Persist\\\"},\\n \\\"index_directory\\\": {\\\"display_name\\\": \\\"Index Directory\\\"},\\n \\\"code\\\": {\\\"show\\\": False, \\\"display_name\\\": \\\"Code\\\"},\\n \\\"documents\\\": {\\\"display_name\\\": \\\"Documents\\\", \\\"is_list\\\": True},\\n \\\"embedding\\\": {\\n \\\"display_name\\\": \\\"Embedding\\\",\\n \\\"info\\\": \\\"Embedding model to vectorize inputs (make sure to use same as index)\\\",\\n },\\n \\\"chroma_server_cors_allow_origins\\\": {\\n \\\"display_name\\\": \\\"Server CORS Allow Origins\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"chroma_server_host\\\": {\\\"display_name\\\": \\\"Server Host\\\", \\\"advanced\\\": True},\\n \\\"chroma_server_port\\\": {\\\"display_name\\\": \\\"Server Port\\\", \\\"advanced\\\": True},\\n \\\"chroma_server_grpc_port\\\": {\\n \\\"display_name\\\": \\\"Server gRPC Port\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"chroma_server_ssl_enabled\\\": {\\n \\\"display_name\\\": \\\"Server SSL Enabled\\\",\\n \\\"advanced\\\": True,\\n },\\n }\\n\\n def build(\\n self,\\n inputs: Text,\\n search_type: str,\\n collection_name: str,\\n embedding: Embeddings,\\n chroma_server_ssl_enabled: bool,\\n index_directory: Optional[str] = None,\\n chroma_server_cors_allow_origins: Optional[str] = None,\\n chroma_server_host: Optional[str] = None,\\n chroma_server_port: Optional[int] = None,\\n chroma_server_grpc_port: Optional[int] = None,\\n ) -> List[Record]:\\n \\\"\\\"\\\"\\n Builds the Vector Store or BaseRetriever object.\\n\\n Args:\\n - collection_name (str): The name of the collection.\\n - persist_directory (Optional[str]): The directory to persist the Vector Store to.\\n - chroma_server_ssl_enabled (bool): Whether to enable SSL for the Chroma server.\\n - persist (bool): Whether to persist the Vector Store or not.\\n - embedding (Optional[Embeddings]): The embeddings to use for the Vector Store.\\n - documents (Optional[Document]): The documents to use for the Vector Store.\\n - chroma_server_cors_allow_origins (Optional[str]): The CORS allow origins for the Chroma server.\\n - chroma_server_host (Optional[str]): The host for the Chroma server.\\n - chroma_server_port (Optional[int]): The port for the Chroma server.\\n - chroma_server_grpc_port (Optional[int]): The gRPC port for the Chroma server.\\n\\n Returns:\\n - Union[VectorStore, BaseRetriever]: The Vector Store or BaseRetriever object.\\n \\\"\\\"\\\"\\n\\n # Chroma settings\\n chroma_settings = None\\n\\n if chroma_server_host is not None:\\n chroma_settings = chromadb.config.Settings(\\n chroma_server_cors_allow_origins=chroma_server_cors_allow_origins or None,\\n chroma_server_host=chroma_server_host,\\n chroma_server_port=chroma_server_port or None,\\n chroma_server_grpc_port=chroma_server_grpc_port or None,\\n chroma_server_ssl_enabled=chroma_server_ssl_enabled,\\n )\\n index_directory = self.resolve_path(index_directory)\\n chroma = Chroma(\\n embedding_function=embedding,\\n collection_name=collection_name,\\n persist_directory=index_directory,\\n client_settings=chroma_settings,\\n )\\n\\n # Validate the inputs\\n docs = []\\n if inputs and isinstance(inputs, str):\\n docs = chroma.search(query=inputs, search_type=search_type.lower())\\n else:\\n raise ValueError(\\\"Invalid inputs provided.\\\")\\n return docs_to_records(docs)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"collection_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"langflow\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"collection_name\",\"display_name\":\"Collection Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"index_directory\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"index_directory\",\"display_name\":\"Index Directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"search_type\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"Similarity\",\"MMR\"],\"name\":\"search_type\",\"display_name\":\"Search Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Search a Chroma collection for similar documents.\",\"icon\":\"Chroma\",\"base_classes\":[\"Record\"],\"display_name\":\"Chroma Search\",\"documentation\":\"\",\"custom_fields\":{\"inputs\":null,\"search_type\":null,\"collection_name\":null,\"embedding\":null,\"chroma_server_ssl_enabled\":null,\"index_directory\":null,\"chroma_server_cors_allow_origins\":null,\"chroma_server_host\":null,\"chroma_server_port\":null,\"chroma_server_grpc_port\":null},\"output_types\":[\"Record\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"FAISS\":{\"template\":{\"documents\":{\"type\":\"Document\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"embedding\":{\"type\":\"Embeddings\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import List, Union\\n\\nfrom langchain.schema import BaseRetriever\\nfrom langchain_community.vectorstores import VectorStore\\nfrom langchain_community.vectorstores.faiss import FAISS\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Document, Embeddings\\n\\n\\nclass FAISSComponent(CustomComponent):\\n display_name = \\\"FAISS\\\"\\n description = \\\"Construct FAISS wrapper from raw documents.\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/faiss\\\"\\n\\n def build_config(self):\\n return {\\n \\\"documents\\\": {\\\"display_name\\\": \\\"Documents\\\"},\\n \\\"embedding\\\": {\\\"display_name\\\": \\\"Embedding\\\"},\\n }\\n\\n def build(\\n self,\\n embedding: Embeddings,\\n documents: List[Document],\\n ) -> Union[VectorStore, FAISS, BaseRetriever]:\\n return FAISS.from_documents(documents=documents, embedding=embedding)\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Construct FAISS wrapper from raw documents.\",\"base_classes\":[\"Runnable\",\"FAISS\",\"Generic\",\"VectorStore\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseRetriever\"],\"display_name\":\"FAISS\",\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/faiss\",\"custom_fields\":{\"embedding\":null,\"documents\":null},\"output_types\":[\"VectorStore\",\"FAISS\",\"BaseRetriever\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true}},\"models\":{\"LlamaCppModel\":{\"template\":{\"metadata\":{\"type\":\"Dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model_kwargs\":{\"type\":\"Dict\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_kwargs\",\"display_name\":\"Model Kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[\".bin\"],\"file_path\":\"\",\"password\":false,\"name\":\"model_path\",\"display_name\":\"Model Path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"cache\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"cache\",\"display_name\":\"Cache\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"client\",\"display_name\":\"Client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Any, Dict, List, Optional\\n\\nfrom langchain_community.llms.llamacpp import LlamaCpp\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Text\\n\\n\\nclass LlamaCppComponent(CustomComponent):\\n display_name = \\\"LlamaCppModel\\\"\\n description = \\\"Generate text using llama.cpp model.\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/llamacpp\\\"\\n\\n def build_config(self):\\n return {\\n \\\"grammar\\\": {\\\"display_name\\\": \\\"Grammar\\\", \\\"advanced\\\": True},\\n \\\"cache\\\": {\\\"display_name\\\": \\\"Cache\\\", \\\"advanced\\\": True},\\n \\\"client\\\": {\\\"display_name\\\": \\\"Client\\\", \\\"advanced\\\": True},\\n \\\"echo\\\": {\\\"display_name\\\": \\\"Echo\\\", \\\"advanced\\\": True},\\n \\\"f16_kv\\\": {\\\"display_name\\\": \\\"F16 KV\\\", \\\"advanced\\\": True},\\n \\\"grammar_path\\\": {\\\"display_name\\\": \\\"Grammar Path\\\", \\\"advanced\\\": True},\\n \\\"last_n_tokens_size\\\": {\\n \\\"display_name\\\": \\\"Last N Tokens Size\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"logits_all\\\": {\\\"display_name\\\": \\\"Logits All\\\", \\\"advanced\\\": True},\\n \\\"logprobs\\\": {\\\"display_name\\\": \\\"Logprobs\\\", \\\"advanced\\\": True},\\n \\\"lora_base\\\": {\\\"display_name\\\": \\\"Lora Base\\\", \\\"advanced\\\": True},\\n \\\"lora_path\\\": {\\\"display_name\\\": \\\"Lora Path\\\", \\\"advanced\\\": True},\\n \\\"max_tokens\\\": {\\\"display_name\\\": \\\"Max Tokens\\\", \\\"advanced\\\": True},\\n \\\"metadata\\\": {\\\"display_name\\\": \\\"Metadata\\\", \\\"advanced\\\": True},\\n \\\"model_kwargs\\\": {\\\"display_name\\\": \\\"Model Kwargs\\\", \\\"advanced\\\": True},\\n \\\"model_path\\\": {\\n \\\"display_name\\\": \\\"Model Path\\\",\\n \\\"field_type\\\": \\\"file\\\",\\n \\\"file_types\\\": [\\\".bin\\\"],\\n \\\"required\\\": True,\\n },\\n \\\"n_batch\\\": {\\\"display_name\\\": \\\"N Batch\\\", \\\"advanced\\\": True},\\n \\\"n_ctx\\\": {\\\"display_name\\\": \\\"N Ctx\\\", \\\"advanced\\\": True},\\n \\\"n_gpu_layers\\\": {\\\"display_name\\\": \\\"N GPU Layers\\\", \\\"advanced\\\": True},\\n \\\"n_parts\\\": {\\\"display_name\\\": \\\"N Parts\\\", \\\"advanced\\\": True},\\n \\\"n_threads\\\": {\\\"display_name\\\": \\\"N Threads\\\", \\\"advanced\\\": True},\\n \\\"repeat_penalty\\\": {\\\"display_name\\\": \\\"Repeat Penalty\\\", \\\"advanced\\\": True},\\n \\\"rope_freq_base\\\": {\\\"display_name\\\": \\\"Rope Freq Base\\\", \\\"advanced\\\": True},\\n \\\"rope_freq_scale\\\": {\\\"display_name\\\": \\\"Rope Freq Scale\\\", \\\"advanced\\\": True},\\n \\\"seed\\\": {\\\"display_name\\\": \\\"Seed\\\", \\\"advanced\\\": True},\\n \\\"stop\\\": {\\\"display_name\\\": \\\"Stop\\\", \\\"advanced\\\": True},\\n \\\"streaming\\\": {\\\"display_name\\\": \\\"Streaming\\\", \\\"advanced\\\": True},\\n \\\"suffix\\\": {\\\"display_name\\\": \\\"Suffix\\\", \\\"advanced\\\": True},\\n \\\"tags\\\": {\\\"display_name\\\": \\\"Tags\\\", \\\"advanced\\\": True},\\n \\\"temperature\\\": {\\\"display_name\\\": \\\"Temperature\\\"},\\n \\\"top_k\\\": {\\\"display_name\\\": \\\"Top K\\\", \\\"advanced\\\": True},\\n \\\"top_p\\\": {\\\"display_name\\\": \\\"Top P\\\", \\\"advanced\\\": True},\\n \\\"use_mlock\\\": {\\\"display_name\\\": \\\"Use Mlock\\\", \\\"advanced\\\": True},\\n \\\"use_mmap\\\": {\\\"display_name\\\": \\\"Use Mmap\\\", \\\"advanced\\\": True},\\n \\\"verbose\\\": {\\\"display_name\\\": \\\"Verbose\\\", \\\"advanced\\\": True},\\n \\\"vocab_only\\\": {\\\"display_name\\\": \\\"Vocab Only\\\", \\\"advanced\\\": True},\\n \\\"inputs\\\": {\\\"display_name\\\": \\\"Input\\\"},\\n }\\n\\n def build(\\n self,\\n model_path: str,\\n inputs: str,\\n grammar: Optional[str] = None,\\n cache: Optional[bool] = None,\\n client: Optional[Any] = None,\\n echo: Optional[bool] = False,\\n f16_kv: bool = True,\\n grammar_path: Optional[str] = None,\\n last_n_tokens_size: Optional[int] = 64,\\n logits_all: bool = False,\\n logprobs: Optional[int] = None,\\n lora_base: Optional[str] = None,\\n lora_path: Optional[str] = None,\\n max_tokens: Optional[int] = 256,\\n metadata: Optional[Dict] = None,\\n model_kwargs: Dict = {},\\n n_batch: Optional[int] = 8,\\n n_ctx: int = 512,\\n n_gpu_layers: Optional[int] = 1,\\n n_parts: int = -1,\\n n_threads: Optional[int] = 1,\\n repeat_penalty: Optional[float] = 1.1,\\n rope_freq_base: float = 10000.0,\\n rope_freq_scale: float = 1.0,\\n seed: int = -1,\\n stop: Optional[List[str]] = [],\\n streaming: bool = True,\\n suffix: Optional[str] = \\\"\\\",\\n tags: Optional[List[str]] = [],\\n temperature: Optional[float] = 0.8,\\n top_k: Optional[int] = 40,\\n top_p: Optional[float] = 0.95,\\n use_mlock: bool = False,\\n use_mmap: Optional[bool] = True,\\n verbose: bool = True,\\n vocab_only: bool = False,\\n ) -> Text:\\n output = LlamaCpp(\\n model_path=model_path,\\n grammar=grammar,\\n cache=cache,\\n client=client,\\n echo=echo,\\n f16_kv=f16_kv,\\n grammar_path=grammar_path,\\n last_n_tokens_size=last_n_tokens_size,\\n logits_all=logits_all,\\n logprobs=logprobs,\\n lora_base=lora_base,\\n lora_path=lora_path,\\n max_tokens=max_tokens,\\n metadata=metadata,\\n model_kwargs=model_kwargs,\\n n_batch=n_batch,\\n n_ctx=n_ctx,\\n n_gpu_layers=n_gpu_layers,\\n n_parts=n_parts,\\n n_threads=n_threads,\\n repeat_penalty=repeat_penalty,\\n rope_freq_base=rope_freq_base,\\n rope_freq_scale=rope_freq_scale,\\n seed=seed,\\n stop=stop,\\n streaming=streaming,\\n suffix=suffix,\\n tags=tags,\\n temperature=temperature,\\n top_k=top_k,\\n top_p=top_p,\\n use_mlock=use_mlock,\\n use_mmap=use_mmap,\\n verbose=verbose,\\n vocab_only=vocab_only,\\n )\\n message = output.invoke(inputs)\\n result = message.content if hasattr(message, \\\"content\\\") else message\\n self.status = result\\n return result\\n self.status = result\\n return result\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"echo\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"echo\",\"display_name\":\"Echo\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"f16_kv\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"f16_kv\",\"display_name\":\"F16 KV\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"grammar\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"grammar\",\"display_name\":\"Grammar\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"grammar_path\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"grammar_path\",\"display_name\":\"Grammar Path\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"inputs\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"inputs\",\"display_name\":\"Input\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"last_n_tokens_size\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":64,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"last_n_tokens_size\",\"display_name\":\"Last N Tokens Size\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"logits_all\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"logits_all\",\"display_name\":\"Logits All\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"logprobs\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"logprobs\",\"display_name\":\"Logprobs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"lora_base\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"lora_base\",\"display_name\":\"Lora Base\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"lora_path\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"lora_path\",\"display_name\":\"Lora Path\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"max_tokens\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":256,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_tokens\",\"display_name\":\"Max Tokens\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"n_batch\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":8,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"n_batch\",\"display_name\":\"N Batch\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"n_ctx\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":512,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"n_ctx\",\"display_name\":\"N Ctx\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"n_gpu_layers\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"n_gpu_layers\",\"display_name\":\"N GPU Layers\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"n_parts\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":-1,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"n_parts\",\"display_name\":\"N Parts\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"n_threads\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"n_threads\",\"display_name\":\"N Threads\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"repeat_penalty\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1.1,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"repeat_penalty\",\"display_name\":\"Repeat Penalty\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"rope_freq_base\":{\"type\":\"float\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":10000.0,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"rope_freq_base\",\"display_name\":\"Rope Freq Base\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"rope_freq_scale\":{\"type\":\"float\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1.0,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"rope_freq_scale\",\"display_name\":\"Rope Freq Scale\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"seed\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":-1,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"seed\",\"display_name\":\"Seed\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"stop\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":[],\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"stop\",\"display_name\":\"Stop\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"streaming\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"streaming\",\"display_name\":\"Streaming\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"suffix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"suffix\",\"display_name\":\"Suffix\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":[],\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"display_name\":\"Tags\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.8,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"top_k\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":40,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_k\",\"display_name\":\"Top K\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"top_p\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.95,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_p\",\"display_name\":\"Top P\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"use_mlock\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"use_mlock\",\"display_name\":\"Use Mlock\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"use_mmap\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"use_mmap\",\"display_name\":\"Use Mmap\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"verbose\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"verbose\",\"display_name\":\"Verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"vocab_only\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"vocab_only\",\"display_name\":\"Vocab Only\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Generate text using llama.cpp model.\",\"base_classes\":[\"Text\",\"object\"],\"display_name\":\"LlamaCppModel\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/llamacpp\",\"custom_fields\":{\"model_path\":null,\"inputs\":null,\"grammar\":null,\"cache\":null,\"client\":null,\"echo\":null,\"f16_kv\":null,\"grammar_path\":null,\"last_n_tokens_size\":null,\"logits_all\":null,\"logprobs\":null,\"lora_base\":null,\"lora_path\":null,\"max_tokens\":null,\"metadata\":null,\"model_kwargs\":null,\"n_batch\":null,\"n_ctx\":null,\"n_gpu_layers\":null,\"n_parts\":null,\"n_threads\":null,\"repeat_penalty\":null,\"rope_freq_base\":null,\"rope_freq_scale\":null,\"seed\":null,\"stop\":null,\"streaming\":null,\"suffix\":null,\"tags\":null,\"temperature\":null,\"top_k\":null,\"top_p\":null,\"use_mlock\":null,\"use_mmap\":null,\"verbose\":null,\"vocab_only\":null},\"output_types\":[\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"BaiduQianfanChatModel\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain_community.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint\\nfrom pydantic.v1 import SecretStr\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Text\\n\\n\\nclass QianfanChatEndpointComponent(CustomComponent):\\n display_name: str = \\\"QianfanChat Model\\\"\\n description: str = (\\n \\\"Generate text using Baidu Qianfan chat models. Get more detail from \\\"\\n \\\"https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint.\\\"\\n )\\n\\n def build_config(self):\\n return {\\n \\\"model\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"options\\\": [\\n \\\"ERNIE-Bot\\\",\\n \\\"ERNIE-Bot-turbo\\\",\\n \\\"BLOOMZ-7B\\\",\\n \\\"Llama-2-7b-chat\\\",\\n \\\"Llama-2-13b-chat\\\",\\n \\\"Llama-2-70b-chat\\\",\\n \\\"Qianfan-BLOOMZ-7B-compressed\\\",\\n \\\"Qianfan-Chinese-Llama-2-7B\\\",\\n \\\"ChatGLM2-6B-32K\\\",\\n \\\"AquilaChat-7B\\\",\\n ],\\n \\\"info\\\": \\\"https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint\\\",\\n \\\"required\\\": True,\\n },\\n \\\"qianfan_ak\\\": {\\n \\\"display_name\\\": \\\"Qianfan Ak\\\",\\n \\\"required\\\": True,\\n \\\"password\\\": True,\\n \\\"info\\\": \\\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\\\",\\n },\\n \\\"qianfan_sk\\\": {\\n \\\"display_name\\\": \\\"Qianfan Sk\\\",\\n \\\"required\\\": True,\\n \\\"password\\\": True,\\n \\\"info\\\": \\\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\\\",\\n },\\n \\\"top_p\\\": {\\n \\\"display_name\\\": \\\"Top p\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\\\",\\n \\\"value\\\": 0.8,\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\\\",\\n \\\"value\\\": 0.95,\\n },\\n \\\"penalty_score\\\": {\\n \\\"display_name\\\": \\\"Penalty Score\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\\\",\\n \\\"value\\\": 1.0,\\n },\\n \\\"endpoint\\\": {\\n \\\"display_name\\\": \\\"Endpoint\\\",\\n \\\"info\\\": \\\"Endpoint of the Qianfan LLM, required if custom model used.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"inputs\\\": {\\\"display_name\\\": \\\"Input\\\"},\\n }\\n\\n def build(\\n self,\\n inputs: str,\\n model: str = \\\"ERNIE-Bot-turbo\\\",\\n qianfan_ak: Optional[str] = None,\\n qianfan_sk: Optional[str] = None,\\n top_p: Optional[float] = None,\\n temperature: Optional[float] = None,\\n penalty_score: Optional[float] = None,\\n endpoint: Optional[str] = None,\\n ) -> Text:\\n try:\\n output = QianfanChatEndpoint( # type: ignore\\n model=model,\\n qianfan_ak=SecretStr(qianfan_ak) if qianfan_ak else None,\\n qianfan_sk=SecretStr(qianfan_sk) if qianfan_sk else None,\\n top_p=top_p,\\n temperature=temperature,\\n penalty_score=penalty_score,\\n endpoint=endpoint,\\n )\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to Baidu Qianfan API.\\\") from e\\n message = output.invoke(inputs)\\n result = message.content if hasattr(message, \\\"content\\\") else message\\n self.status = result\\n return result\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"endpoint\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"endpoint\",\"display_name\":\"Endpoint\",\"advanced\":false,\"dynamic\":false,\"info\":\"Endpoint of the Qianfan LLM, required if custom model used.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"inputs\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"inputs\",\"display_name\":\"Input\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"ERNIE-Bot-turbo\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"ERNIE-Bot\",\"ERNIE-Bot-turbo\",\"BLOOMZ-7B\",\"Llama-2-7b-chat\",\"Llama-2-13b-chat\",\"Llama-2-70b-chat\",\"Qianfan-BLOOMZ-7B-compressed\",\"Qianfan-Chinese-Llama-2-7B\",\"ChatGLM2-6B-32K\",\"AquilaChat-7B\"],\"name\":\"model\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint\",\"title_case\":false,\"input_types\":[\"Text\"]},\"penalty_score\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1.0,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"penalty_score\",\"display_name\":\"Penalty Score\",\"advanced\":false,\"dynamic\":false,\"info\":\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"qianfan_ak\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"qianfan_ak\",\"display_name\":\"Qianfan Ak\",\"advanced\":false,\"dynamic\":false,\"info\":\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\",\"title_case\":false,\"input_types\":[\"Text\"]},\"qianfan_sk\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"qianfan_sk\",\"display_name\":\"Qianfan Sk\",\"advanced\":false,\"dynamic\":false,\"info\":\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\",\"title_case\":false,\"input_types\":[\"Text\"]},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.95,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"top_p\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.8,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_p\",\"display_name\":\"Top p\",\"advanced\":false,\"dynamic\":false,\"info\":\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Generate text using Baidu Qianfan chat models. Get more detail from https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint.\",\"base_classes\":[\"Text\",\"object\"],\"display_name\":\"QianfanChat Model\",\"documentation\":\"\",\"custom_fields\":{\"inputs\":null,\"model\":null,\"qianfan_ak\":null,\"qianfan_sk\":null,\"top_p\":null,\"temperature\":null,\"penalty_score\":null,\"endpoint\":null},\"output_types\":[\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"GoogleGenerativeAIModel\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain_google_genai import ChatGoogleGenerativeAI # type: ignore\\nfrom pydantic.v1.types import SecretStr\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import RangeSpec, Text\\n\\n\\nclass GoogleGenerativeAIComponent(CustomComponent):\\n display_name: str = \\\"Google Generative AIModel\\\"\\n description: str = \\\"Generate text using Google Generative AI to generate text.\\\"\\n documentation: str = \\\"http://docs.langflow.org/components/custom\\\"\\n\\n def build_config(self):\\n return {\\n \\\"google_api_key\\\": {\\n \\\"display_name\\\": \\\"Google API Key\\\",\\n \\\"info\\\": \\\"The Google API Key to use for the Google Generative AI.\\\",\\n },\\n \\\"max_output_tokens\\\": {\\n \\\"display_name\\\": \\\"Max Output Tokens\\\",\\n \\\"info\\\": \\\"The maximum number of tokens to generate.\\\",\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"info\\\": \\\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\\\",\\n },\\n \\\"top_k\\\": {\\n \\\"display_name\\\": \\\"Top K\\\",\\n \\\"info\\\": \\\"Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.\\\",\\n \\\"range_spec\\\": RangeSpec(min=0, max=2, step=0.1),\\n \\\"advanced\\\": True,\\n },\\n \\\"top_p\\\": {\\n \\\"display_name\\\": \\\"Top P\\\",\\n \\\"info\\\": \\\"The maximum cumulative probability of tokens to consider when sampling.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"n\\\": {\\n \\\"display_name\\\": \\\"N\\\",\\n \\\"info\\\": \\\"Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"model\\\": {\\n \\\"display_name\\\": \\\"Model\\\",\\n \\\"info\\\": \\\"The name of the model to use. Supported examples: gemini-pro\\\",\\n \\\"options\\\": [\\\"gemini-pro\\\", \\\"gemini-pro-vision\\\"],\\n },\\n \\\"code\\\": {\\n \\\"advanced\\\": True,\\n },\\n \\\"inputs\\\": {\\\"display_name\\\": \\\"Input\\\"},\\n }\\n\\n def build(\\n self,\\n google_api_key: str,\\n model: str,\\n inputs: str,\\n max_output_tokens: Optional[int] = None,\\n temperature: float = 0.1,\\n top_k: Optional[int] = None,\\n top_p: Optional[float] = None,\\n n: Optional[int] = 1,\\n ) -> Text:\\n output = ChatGoogleGenerativeAI(\\n model=model,\\n max_output_tokens=max_output_tokens or None, # type: ignore\\n temperature=temperature,\\n top_k=top_k or None,\\n top_p=top_p or None, # type: ignore\\n n=n or 1,\\n google_api_key=SecretStr(google_api_key),\\n )\\n message = output.invoke(inputs)\\n result = message.content if hasattr(message, \\\"content\\\") else message\\n self.status = result\\n return result\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"google_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"google_api_key\",\"display_name\":\"Google API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The Google API Key to use for the Google Generative AI.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"inputs\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"inputs\",\"display_name\":\"Input\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"max_output_tokens\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_output_tokens\",\"display_name\":\"Max Output Tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"The maximum number of tokens to generate.\",\"title_case\":false},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"gemini-pro\",\"gemini-pro-vision\"],\"name\":\"model\",\"display_name\":\"Model\",\"advanced\":false,\"dynamic\":false,\"info\":\"The name of the model to use. Supported examples: gemini-pro\",\"title_case\":false,\"input_types\":[\"Text\"]},\"n\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"n\",\"display_name\":\"N\",\"advanced\":true,\"dynamic\":false,\"info\":\"Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.\",\"title_case\":false},\"temperature\":{\"type\":\"float\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.1,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"top_k\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_k\",\"display_name\":\"Top K\",\"advanced\":true,\"dynamic\":false,\"info\":\"Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.\",\"rangeSpec\":{\"min\":0.0,\"max\":2.0,\"step\":0.1},\"title_case\":false},\"top_p\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_p\",\"display_name\":\"Top P\",\"advanced\":true,\"dynamic\":false,\"info\":\"The maximum cumulative probability of tokens to consider when sampling.\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Generate text using Google Generative AI to generate text.\",\"base_classes\":[\"Text\",\"object\"],\"display_name\":\"Google Generative AIModel\",\"documentation\":\"http://docs.langflow.org/components/custom\",\"custom_fields\":{\"google_api_key\":null,\"model\":null,\"inputs\":null,\"max_output_tokens\":null,\"temperature\":null,\"top_k\":null,\"top_p\":null,\"n\":null},\"output_types\":[\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"CTransformersModel\":{\"template\":{\"model_file\":{\"type\":\"file\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[\".bin\"],\"file_path\":\"\",\"password\":false,\"name\":\"model_file\",\"display_name\":\"Model File\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Dict, Optional\\n\\nfrom langchain_community.llms.ctransformers import CTransformers\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Text\\n\\n\\nclass CTransformersComponent(CustomComponent):\\n display_name = \\\"CTransformersModel\\\"\\n description = \\\"Generate text using CTransformers LLM models\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/ctransformers\\\"\\n\\n def build_config(self):\\n return {\\n \\\"model\\\": {\\\"display_name\\\": \\\"Model\\\", \\\"required\\\": True},\\n \\\"model_file\\\": {\\n \\\"display_name\\\": \\\"Model File\\\",\\n \\\"required\\\": False,\\n \\\"field_type\\\": \\\"file\\\",\\n \\\"file_types\\\": [\\\".bin\\\"],\\n },\\n \\\"model_type\\\": {\\\"display_name\\\": \\\"Model Type\\\", \\\"required\\\": True},\\n \\\"config\\\": {\\n \\\"display_name\\\": \\\"Config\\\",\\n \\\"advanced\\\": True,\\n \\\"required\\\": False,\\n \\\"field_type\\\": \\\"dict\\\",\\n \\\"value\\\": '{\\\"top_k\\\":40,\\\"top_p\\\":0.95,\\\"temperature\\\":0.8,\\\"repetition_penalty\\\":1.1,\\\"last_n_tokens\\\":64,\\\"seed\\\":-1,\\\"max_new_tokens\\\":256,\\\"stop\\\":\\\"\\\",\\\"stream\\\":\\\"False\\\",\\\"reset\\\":\\\"True\\\",\\\"batch_size\\\":8,\\\"threads\\\":-1,\\\"context_length\\\":-1,\\\"gpu_layers\\\":0}',\\n },\\n \\\"inputs\\\": {\\\"display_name\\\": \\\"Input\\\"},\\n }\\n\\n def build(\\n self,\\n model: str,\\n model_file: str,\\n inputs: str,\\n model_type: str,\\n config: Optional[Dict] = None,\\n ) -> Text:\\n output = CTransformers(model=model, model_file=model_file, model_type=model_type, config=config)\\n message = output.invoke(inputs)\\n result = message.content if hasattr(message, \\\"content\\\") else message\\n self.status = result\\n return result\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"config\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"{\\\"top_k\\\":40,\\\"top_p\\\":0.95,\\\"temperature\\\":0.8,\\\"repetition_penalty\\\":1.1,\\\"last_n_tokens\\\":64,\\\"seed\\\":-1,\\\"max_new_tokens\\\":256,\\\"stop\\\":\\\"\\\",\\\"stream\\\":\\\"False\\\",\\\"reset\\\":\\\"True\\\",\\\"batch_size\\\":8,\\\"threads\\\":-1,\\\"context_length\\\":-1,\\\"gpu_layers\\\":0}\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"config\",\"display_name\":\"Config\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"inputs\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"inputs\",\"display_name\":\"Input\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model\",\"display_name\":\"Model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"model_type\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_type\",\"display_name\":\"Model Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Generate text using CTransformers LLM models\",\"base_classes\":[\"Text\",\"object\"],\"display_name\":\"CTransformersModel\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/ctransformers\",\"custom_fields\":{\"model\":null,\"model_file\":null,\"inputs\":null,\"model_type\":null,\"config\":null},\"output_types\":[\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"VertexAiModel\":{\"template\":{\"credentials\":{\"type\":\"file\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[\".json\"],\"password\":false,\"name\":\"credentials\",\"display_name\":\"Credentials\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"examples\":{\"type\":\"BaseMessage\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":true,\"value\":[],\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"examples\",\"display_name\":\"Examples\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import List, Optional\\n\\nfrom langchain_core.messages.base import BaseMessage\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Text\\n\\n\\nclass ChatVertexAIComponent(CustomComponent):\\n display_name = \\\"ChatVertexAIModel\\\"\\n description = \\\"Generate text using Vertex AI Chat large language models API.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"credentials\\\": {\\n \\\"display_name\\\": \\\"Credentials\\\",\\n \\\"field_type\\\": \\\"file\\\",\\n \\\"file_types\\\": [\\\".json\\\"],\\n \\\"file_path\\\": None,\\n },\\n \\\"examples\\\": {\\n \\\"display_name\\\": \\\"Examples\\\",\\n \\\"multiline\\\": True,\\n },\\n \\\"location\\\": {\\n \\\"display_name\\\": \\\"Location\\\",\\n \\\"value\\\": \\\"us-central1\\\",\\n },\\n \\\"max_output_tokens\\\": {\\n \\\"display_name\\\": \\\"Max Output Tokens\\\",\\n \\\"value\\\": 128,\\n \\\"advanced\\\": True,\\n },\\n \\\"model_name\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"value\\\": \\\"chat-bison\\\",\\n },\\n \\\"project\\\": {\\n \\\"display_name\\\": \\\"Project\\\",\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"value\\\": 0.0,\\n },\\n \\\"top_k\\\": {\\n \\\"display_name\\\": \\\"Top K\\\",\\n \\\"value\\\": 40,\\n \\\"advanced\\\": True,\\n },\\n \\\"top_p\\\": {\\n \\\"display_name\\\": \\\"Top P\\\",\\n \\\"value\\\": 0.95,\\n \\\"advanced\\\": True,\\n },\\n \\\"verbose\\\": {\\n \\\"display_name\\\": \\\"Verbose\\\",\\n \\\"value\\\": False,\\n \\\"advanced\\\": True,\\n },\\n \\\"inputs\\\": {\\\"display_name\\\": \\\"Input\\\"},\\n }\\n\\n def build(\\n self,\\n inputs: str,\\n credentials: Optional[str],\\n project: str,\\n examples: Optional[List[BaseMessage]] = [],\\n location: str = \\\"us-central1\\\",\\n max_output_tokens: int = 128,\\n model_name: str = \\\"chat-bison\\\",\\n temperature: float = 0.0,\\n top_k: int = 40,\\n top_p: float = 0.95,\\n verbose: bool = False,\\n ) -> Text:\\n try:\\n from langchain_google_vertexai import ChatVertexAI\\n except ImportError:\\n raise ImportError(\\n \\\"To use the ChatVertexAI model, you need to install the langchain-google-vertexai package.\\\"\\n )\\n output = ChatVertexAI(\\n credentials=credentials,\\n examples=examples,\\n location=location,\\n max_output_tokens=max_output_tokens,\\n model_name=model_name,\\n project=project,\\n temperature=temperature,\\n top_k=top_k,\\n top_p=top_p,\\n verbose=verbose,\\n )\\n message = output.invoke(inputs)\\n result = message.content if hasattr(message, \\\"content\\\") else message\\n self.status = result\\n return result\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"inputs\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"inputs\",\"display_name\":\"Input\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"location\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"us-central1\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"location\",\"display_name\":\"Location\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"max_output_tokens\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":128,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_output_tokens\",\"display_name\":\"Max Output Tokens\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"chat-bison\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_name\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"project\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"project\",\"display_name\":\"Project\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"temperature\":{\"type\":\"float\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.0,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"top_k\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":40,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_k\",\"display_name\":\"Top K\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"top_p\":{\"type\":\"float\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.95,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_p\",\"display_name\":\"Top P\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"verbose\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"verbose\",\"display_name\":\"Verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Generate text using Vertex AI Chat large language models API.\",\"base_classes\":[\"Text\",\"object\"],\"display_name\":\"ChatVertexAIModel\",\"documentation\":\"\",\"custom_fields\":{\"inputs\":null,\"credentials\":null,\"project\":null,\"examples\":null,\"location\":null,\"max_output_tokens\":null,\"model_name\":null,\"temperature\":null,\"top_k\":null,\"top_p\":null,\"verbose\":null},\"output_types\":[\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"OllamaModel\":{\"template\":{\"metadata\":{\"type\":\"Dict[str, Any]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":true,\"dynamic\":false,\"info\":\"Metadata to add to the run trace.\",\"title_case\":false},\"stop\":{\"type\":\"list\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"stop\",\"display_name\":\"Stop Tokens\",\"advanced\":true,\"dynamic\":false,\"info\":\"List of tokens to signal the model to stop generating text.\",\"title_case\":false},\"tags\":{\"type\":\"list\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"display_name\":\"Tags\",\"advanced\":true,\"dynamic\":false,\"info\":\"Tags to add to the run trace.\",\"title_case\":false},\"base_url\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"base_url\",\"display_name\":\"Base URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"Endpoint of the Ollama API. Defaults to 'http://localhost:11434' if not specified.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"cache\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"cache\",\"display_name\":\"Cache\",\"advanced\":true,\"dynamic\":false,\"info\":\"Enable or disable caching.\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Any, Dict, List, Optional\\n\\n# from langchain_community.chat_models import ChatOllama\\nfrom langchain_community.chat_models import ChatOllama\\n\\n# from langchain.chat_models import ChatOllama\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Text\\n\\n# whe When a callback component is added to Langflow, the comment must be uncommented.\\n# from langchain.callbacks.manager import CallbackManager\\n\\n\\nclass ChatOllamaComponent(CustomComponent):\\n display_name = \\\"ChatOllamaModel\\\"\\n description = \\\"Generate text using Local LLM for chat with Ollama.\\\"\\n\\n def build_config(self) -> dict:\\n return {\\n \\\"base_url\\\": {\\n \\\"display_name\\\": \\\"Base URL\\\",\\n \\\"info\\\": \\\"Endpoint of the Ollama API. Defaults to 'http://localhost:11434' if not specified.\\\",\\n },\\n \\\"model\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"value\\\": \\\"llama2\\\",\\n \\\"info\\\": \\\"Refer to https://ollama.ai/library for more models.\\\",\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"value\\\": 0.8,\\n \\\"info\\\": \\\"Controls the creativity of model responses.\\\",\\n },\\n \\\"cache\\\": {\\n \\\"display_name\\\": \\\"Cache\\\",\\n \\\"field_type\\\": \\\"bool\\\",\\n \\\"info\\\": \\\"Enable or disable caching.\\\",\\n \\\"advanced\\\": True,\\n \\\"value\\\": False,\\n },\\n ### When a callback component is added to Langflow, the comment must be uncommented. ###\\n # \\\"callback_manager\\\": {\\n # \\\"display_name\\\": \\\"Callback Manager\\\",\\n # \\\"info\\\": \\\"Optional callback manager for additional functionality.\\\",\\n # \\\"advanced\\\": True,\\n # },\\n # \\\"callbacks\\\": {\\n # \\\"display_name\\\": \\\"Callbacks\\\",\\n # \\\"info\\\": \\\"Callbacks to execute during model runtime.\\\",\\n # \\\"advanced\\\": True,\\n # },\\n ########################################################################################\\n \\\"format\\\": {\\n \\\"display_name\\\": \\\"Format\\\",\\n \\\"field_type\\\": \\\"str\\\",\\n \\\"info\\\": \\\"Specify the format of the output (e.g., json).\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"metadata\\\": {\\n \\\"display_name\\\": \\\"Metadata\\\",\\n \\\"info\\\": \\\"Metadata to add to the run trace.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"mirostat\\\": {\\n \\\"display_name\\\": \\\"Mirostat\\\",\\n \\\"options\\\": [\\\"Disabled\\\", \\\"Mirostat\\\", \\\"Mirostat 2.0\\\"],\\n \\\"info\\\": \\\"Enable/disable Mirostat sampling for controlling perplexity.\\\",\\n \\\"value\\\": \\\"Disabled\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"mirostat_eta\\\": {\\n \\\"display_name\\\": \\\"Mirostat Eta\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Learning rate for Mirostat algorithm. (Default: 0.1)\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"mirostat_tau\\\": {\\n \\\"display_name\\\": \\\"Mirostat Tau\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Controls the balance between coherence and diversity of the output. (Default: 5.0)\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"num_ctx\\\": {\\n \\\"display_name\\\": \\\"Context Window Size\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"Size of the context window for generating tokens. (Default: 2048)\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"num_gpu\\\": {\\n \\\"display_name\\\": \\\"Number of GPUs\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"Number of GPUs to use for computation. (Default: 1 on macOS, 0 to disable)\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"num_thread\\\": {\\n \\\"display_name\\\": \\\"Number of Threads\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"Number of threads to use during computation. (Default: detected for optimal performance)\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"repeat_last_n\\\": {\\n \\\"display_name\\\": \\\"Repeat Last N\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"How far back the model looks to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx)\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"repeat_penalty\\\": {\\n \\\"display_name\\\": \\\"Repeat Penalty\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Penalty for repetitions in generated text. (Default: 1.1)\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"tfs_z\\\": {\\n \\\"display_name\\\": \\\"TFS Z\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Tail free sampling value. (Default: 1)\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"timeout\\\": {\\n \\\"display_name\\\": \\\"Timeout\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"Timeout for the request stream.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"top_k\\\": {\\n \\\"display_name\\\": \\\"Top K\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"Limits token selection to top K. (Default: 40)\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"top_p\\\": {\\n \\\"display_name\\\": \\\"Top P\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Works together with top-k. (Default: 0.9)\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"verbose\\\": {\\n \\\"display_name\\\": \\\"Verbose\\\",\\n \\\"field_type\\\": \\\"bool\\\",\\n \\\"info\\\": \\\"Whether to print out response text.\\\",\\n },\\n \\\"tags\\\": {\\n \\\"display_name\\\": \\\"Tags\\\",\\n \\\"field_type\\\": \\\"list\\\",\\n \\\"info\\\": \\\"Tags to add to the run trace.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"stop\\\": {\\n \\\"display_name\\\": \\\"Stop Tokens\\\",\\n \\\"field_type\\\": \\\"list\\\",\\n \\\"info\\\": \\\"List of tokens to signal the model to stop generating text.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"system\\\": {\\n \\\"display_name\\\": \\\"System\\\",\\n \\\"field_type\\\": \\\"str\\\",\\n \\\"info\\\": \\\"System to use for generating text.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"template\\\": {\\n \\\"display_name\\\": \\\"Template\\\",\\n \\\"field_type\\\": \\\"str\\\",\\n \\\"info\\\": \\\"Template to use for generating text.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"inputs\\\": {\\\"display_name\\\": \\\"Input\\\"},\\n }\\n\\n def build(\\n self,\\n base_url: Optional[str],\\n model: str,\\n inputs: str,\\n mirostat: Optional[str],\\n mirostat_eta: Optional[float] = None,\\n mirostat_tau: Optional[float] = None,\\n ### When a callback component is added to Langflow, the comment must be uncommented.###\\n # callback_manager: Optional[CallbackManager] = None,\\n # callbacks: Optional[List[Callbacks]] = None,\\n #######################################################################################\\n repeat_last_n: Optional[int] = None,\\n verbose: Optional[bool] = None,\\n cache: Optional[bool] = None,\\n num_ctx: Optional[int] = None,\\n num_gpu: Optional[int] = None,\\n format: Optional[str] = None,\\n metadata: Optional[Dict[str, Any]] = None,\\n num_thread: Optional[int] = None,\\n repeat_penalty: Optional[float] = None,\\n stop: Optional[List[str]] = None,\\n system: Optional[str] = None,\\n tags: Optional[List[str]] = None,\\n temperature: Optional[float] = None,\\n template: Optional[str] = None,\\n tfs_z: Optional[float] = None,\\n timeout: Optional[int] = None,\\n top_k: Optional[int] = None,\\n top_p: Optional[int] = None,\\n ) -> Text:\\n if not base_url:\\n base_url = \\\"http://localhost:11434\\\"\\n\\n # Mapping mirostat settings to their corresponding values\\n mirostat_options = {\\\"Mirostat\\\": 1, \\\"Mirostat 2.0\\\": 2}\\n\\n # Default to 0 for 'Disabled'\\n mirostat_value = mirostat_options.get(mirostat, 0) # type: ignore\\n\\n # Set mirostat_eta and mirostat_tau to None if mirostat is disabled\\n if mirostat_value == 0:\\n mirostat_eta = None\\n mirostat_tau = None\\n\\n # Mapping system settings to their corresponding values\\n llm_params = {\\n \\\"base_url\\\": base_url,\\n \\\"cache\\\": cache,\\n \\\"model\\\": model,\\n \\\"mirostat\\\": mirostat_value,\\n \\\"format\\\": format,\\n \\\"metadata\\\": metadata,\\n \\\"tags\\\": tags,\\n ## When a callback component is added to Langflow, the comment must be uncommented.##\\n # \\\"callback_manager\\\": callback_manager,\\n # \\\"callbacks\\\": callbacks,\\n #####################################################################################\\n \\\"mirostat_eta\\\": mirostat_eta,\\n \\\"mirostat_tau\\\": mirostat_tau,\\n \\\"num_ctx\\\": num_ctx,\\n \\\"num_gpu\\\": num_gpu,\\n \\\"num_thread\\\": num_thread,\\n \\\"repeat_last_n\\\": repeat_last_n,\\n \\\"repeat_penalty\\\": repeat_penalty,\\n \\\"temperature\\\": temperature,\\n \\\"stop\\\": stop,\\n \\\"system\\\": system,\\n \\\"template\\\": template,\\n \\\"tfs_z\\\": tfs_z,\\n \\\"timeout\\\": timeout,\\n \\\"top_k\\\": top_k,\\n \\\"top_p\\\": top_p,\\n \\\"verbose\\\": verbose,\\n }\\n\\n # None Value remove\\n llm_params = {k: v for k, v in llm_params.items() if v is not None}\\n\\n try:\\n output = ChatOllama(**llm_params) # type: ignore\\n except Exception as e:\\n raise ValueError(\\\"Could not initialize Ollama LLM.\\\") from e\\n message = output.invoke(inputs)\\n result = message.content if hasattr(message, \\\"content\\\") else message\\n self.status = result\\n return result\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"format\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"format\",\"display_name\":\"Format\",\"advanced\":true,\"dynamic\":false,\"info\":\"Specify the format of the output (e.g., json).\",\"title_case\":false,\"input_types\":[\"Text\"]},\"inputs\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"inputs\",\"display_name\":\"Input\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"mirostat\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"Disabled\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"Disabled\",\"Mirostat\",\"Mirostat 2.0\"],\"name\":\"mirostat\",\"display_name\":\"Mirostat\",\"advanced\":true,\"dynamic\":false,\"info\":\"Enable/disable Mirostat sampling for controlling perplexity.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"mirostat_eta\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"mirostat_eta\",\"display_name\":\"Mirostat Eta\",\"advanced\":true,\"dynamic\":false,\"info\":\"Learning rate for Mirostat algorithm. (Default: 0.1)\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"mirostat_tau\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"mirostat_tau\",\"display_name\":\"Mirostat Tau\",\"advanced\":true,\"dynamic\":false,\"info\":\"Controls the balance between coherence and diversity of the output. (Default: 5.0)\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"llama2\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"Refer to https://ollama.ai/library for more models.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"num_ctx\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"num_ctx\",\"display_name\":\"Context Window Size\",\"advanced\":true,\"dynamic\":false,\"info\":\"Size of the context window for generating tokens. (Default: 2048)\",\"title_case\":false},\"num_gpu\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"num_gpu\",\"display_name\":\"Number of GPUs\",\"advanced\":true,\"dynamic\":false,\"info\":\"Number of GPUs to use for computation. (Default: 1 on macOS, 0 to disable)\",\"title_case\":false},\"num_thread\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"num_thread\",\"display_name\":\"Number of Threads\",\"advanced\":true,\"dynamic\":false,\"info\":\"Number of threads to use during computation. (Default: detected for optimal performance)\",\"title_case\":false},\"repeat_last_n\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"repeat_last_n\",\"display_name\":\"Repeat Last N\",\"advanced\":true,\"dynamic\":false,\"info\":\"How far back the model looks to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx)\",\"title_case\":false},\"repeat_penalty\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"repeat_penalty\",\"display_name\":\"Repeat Penalty\",\"advanced\":true,\"dynamic\":false,\"info\":\"Penalty for repetitions in generated text. (Default: 1.1)\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"system\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"system\",\"display_name\":\"System\",\"advanced\":true,\"dynamic\":false,\"info\":\"System to use for generating text.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.8,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"Controls the creativity of model responses.\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"template\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"template\",\"display_name\":\"Template\",\"advanced\":true,\"dynamic\":false,\"info\":\"Template to use for generating text.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"tfs_z\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tfs_z\",\"display_name\":\"TFS Z\",\"advanced\":true,\"dynamic\":false,\"info\":\"Tail free sampling value. (Default: 1)\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"timeout\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"timeout\",\"display_name\":\"Timeout\",\"advanced\":true,\"dynamic\":false,\"info\":\"Timeout for the request stream.\",\"title_case\":false},\"top_k\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_k\",\"display_name\":\"Top K\",\"advanced\":true,\"dynamic\":false,\"info\":\"Limits token selection to top K. (Default: 40)\",\"title_case\":false},\"top_p\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_p\",\"display_name\":\"Top P\",\"advanced\":true,\"dynamic\":false,\"info\":\"Works together with top-k. (Default: 0.9)\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"verbose\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"verbose\",\"display_name\":\"Verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"Whether to print out response text.\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Generate text using Local LLM for chat with Ollama.\",\"base_classes\":[\"Text\",\"object\"],\"display_name\":\"ChatOllamaModel\",\"documentation\":\"\",\"custom_fields\":{\"base_url\":null,\"model\":null,\"inputs\":null,\"mirostat\":null,\"mirostat_eta\":null,\"mirostat_tau\":null,\"repeat_last_n\":null,\"verbose\":null,\"cache\":null,\"num_ctx\":null,\"num_gpu\":null,\"format\":null,\"metadata\":null,\"num_thread\":null,\"repeat_penalty\":null,\"stop\":null,\"system\":null,\"tags\":null,\"temperature\":null,\"template\":null,\"tfs_z\":null,\"timeout\":null,\"top_k\":null,\"top_p\":null},\"output_types\":[\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"AnthropicModel\":{\"template\":{\"anthropic_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"anthropic_api_key\",\"display_name\":\"Anthropic API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"Your Anthropic API key.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"api_endpoint\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"api_endpoint\",\"display_name\":\"API Endpoint\",\"advanced\":false,\"dynamic\":false,\"info\":\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain_community.chat_models.anthropic import ChatAnthropic\\nfrom pydantic.v1 import SecretStr\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Text\\n\\n\\nclass AnthropicLLM(CustomComponent):\\n display_name: str = \\\"AnthropicModel\\\"\\n description: str = \\\"Generate text using Anthropic Chat&Completion large language models.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"model\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"options\\\": [\\n \\\"claude-2.1\\\",\\n \\\"claude-2.0\\\",\\n \\\"claude-instant-1.2\\\",\\n \\\"claude-instant-1\\\",\\n # Add more models as needed\\n ],\\n \\\"info\\\": \\\"https://python.langchain.com/docs/integrations/chat/anthropic\\\",\\n \\\"required\\\": True,\\n \\\"value\\\": \\\"claude-2.1\\\",\\n },\\n \\\"anthropic_api_key\\\": {\\n \\\"display_name\\\": \\\"Anthropic API Key\\\",\\n \\\"required\\\": True,\\n \\\"password\\\": True,\\n \\\"info\\\": \\\"Your Anthropic API key.\\\",\\n },\\n \\\"max_tokens\\\": {\\n \\\"display_name\\\": \\\"Max Tokens\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 256,\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"value\\\": 0.7,\\n },\\n \\\"api_endpoint\\\": {\\n \\\"display_name\\\": \\\"API Endpoint\\\",\\n \\\"info\\\": \\\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"inputs\\\": {\\\"display_name\\\": \\\"Input\\\"},\\n }\\n\\n def build(\\n self,\\n model: str,\\n inputs: str,\\n anthropic_api_key: Optional[str] = None,\\n max_tokens: Optional[int] = None,\\n temperature: Optional[float] = None,\\n api_endpoint: Optional[str] = None,\\n ) -> Text:\\n # Set default API endpoint if not provided\\n if not api_endpoint:\\n api_endpoint = \\\"https://api.anthropic.com\\\"\\n\\n try:\\n output = ChatAnthropic(\\n model_name=model,\\n anthropic_api_key=(SecretStr(anthropic_api_key) if anthropic_api_key else None),\\n max_tokens_to_sample=max_tokens, # type: ignore\\n temperature=temperature,\\n anthropic_api_url=api_endpoint,\\n )\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to Anthropic API.\\\") from e\\n message = output.invoke(inputs)\\n result = message.content if hasattr(message, \\\"content\\\") else message\\n self.status = result\\n return result\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"inputs\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"inputs\",\"display_name\":\"Input\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"max_tokens\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":256,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_tokens\",\"display_name\":\"Max Tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"claude-2.1\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"claude-2.1\",\"claude-2.0\",\"claude-instant-1.2\",\"claude-instant-1\"],\"name\":\"model\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"https://python.langchain.com/docs/integrations/chat/anthropic\",\"title_case\":false,\"input_types\":[\"Text\"]},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.7,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Generate text using Anthropic Chat&Completion large language models.\",\"base_classes\":[\"Text\",\"object\"],\"display_name\":\"AnthropicModel\",\"documentation\":\"\",\"custom_fields\":{\"model\":null,\"inputs\":null,\"anthropic_api_key\":null,\"max_tokens\":null,\"temperature\":null,\"api_endpoint\":null},\"output_types\":[\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"OpenAIModel\":{\"template\":{\"inputs\":{\"type\":\"Text\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"inputs\",\"display_name\":\"Input\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain_openai import ChatOpenAI\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import NestedDict, Text\\n\\n\\nclass OpenAIModelComponent(CustomComponent):\\n display_name = \\\"OpenAI Model\\\"\\n description = \\\"Generates text using OpenAI's models.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"inputs\\\": {\\\"display_name\\\": \\\"Input\\\"},\\n \\\"max_tokens\\\": {\\n \\\"display_name\\\": \\\"Max Tokens\\\",\\n \\\"advanced\\\": False,\\n \\\"required\\\": False,\\n },\\n \\\"model_kwargs\\\": {\\n \\\"display_name\\\": \\\"Model Kwargs\\\",\\n \\\"advanced\\\": True,\\n \\\"required\\\": False,\\n },\\n \\\"model_name\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"advanced\\\": False,\\n \\\"required\\\": False,\\n \\\"options\\\": [\\n \\\"gpt-4-turbo-preview\\\",\\n \\\"gpt-4-0125-preview\\\",\\n \\\"gpt-4-1106-preview\\\",\\n \\\"gpt-4-vision-preview\\\",\\n \\\"gpt-3.5-turbo-0125\\\",\\n \\\"gpt-3.5-turbo-1106\\\",\\n ],\\n },\\n \\\"openai_api_base\\\": {\\n \\\"display_name\\\": \\\"OpenAI API Base\\\",\\n \\\"advanced\\\": False,\\n \\\"required\\\": False,\\n \\\"info\\\": (\\n \\\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\\\n\\\\n\\\"\\n \\\"You can change this to use other APIs like JinaChat, LocalAI and Prem.\\\"\\n ),\\n },\\n \\\"openai_api_key\\\": {\\n \\\"display_name\\\": \\\"OpenAI API Key\\\",\\n \\\"advanced\\\": False,\\n \\\"required\\\": False,\\n \\\"password\\\": True,\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"advanced\\\": False,\\n \\\"required\\\": False,\\n \\\"value\\\": 0.7,\\n },\\n }\\n\\n def build(\\n self,\\n inputs: Text,\\n max_tokens: Optional[int] = 256,\\n model_kwargs: NestedDict = {},\\n model_name: str = \\\"gpt-4-1106-preview\\\",\\n openai_api_base: Optional[str] = None,\\n openai_api_key: Optional[str] = None,\\n temperature: float = 0.7,\\n ) -> Text:\\n if not openai_api_base:\\n openai_api_base = \\\"https://api.openai.com/v1\\\"\\n model = ChatOpenAI(\\n max_tokens=max_tokens,\\n model_kwargs=model_kwargs,\\n model=model_name,\\n base_url=openai_api_base,\\n api_key=openai_api_key,\\n temperature=temperature,\\n )\\n\\n message = model.invoke(inputs)\\n result = message.content if hasattr(message, \\\"content\\\") else message\\n self.status = result\\n return result\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"max_tokens\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":256,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_tokens\",\"display_name\":\"Max Tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model_kwargs\":{\"type\":\"NestedDict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_kwargs\",\"display_name\":\"Model Kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"gpt-4-1106-preview\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"gpt-4-turbo-preview\",\"gpt-4-0125-preview\",\"gpt-4-1106-preview\",\"gpt-4-vision-preview\",\"gpt-3.5-turbo-0125\",\"gpt-3.5-turbo-1106\"],\"name\":\"model_name\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"openai_api_base\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"openai_api_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.7,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Generates text using OpenAI's models.\",\"base_classes\":[\"Text\",\"object\"],\"display_name\":\"OpenAI Model\",\"documentation\":\"\",\"custom_fields\":{\"inputs\":null,\"max_tokens\":null,\"model_kwargs\":null,\"model_name\":null,\"openai_api_base\":null,\"openai_api_key\":null,\"temperature\":null},\"output_types\":[\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"HuggingFaceModel\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain_community.chat_models.huggingface import ChatHuggingFace\\nfrom langchain_community.llms.huggingface_endpoint import HuggingFaceEndpoint\\n\\nfrom langflow import CustomComponent\\n\\nfrom langflow.field_typing import Text\\n\\n\\nclass HuggingFaceEndpointsComponent(CustomComponent):\\n display_name: str = \\\"Hugging Face Inference API models\\\"\\n description: str = \\\"Generate text using LLM model from Hugging Face Inference API.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"endpoint_url\\\": {\\\"display_name\\\": \\\"Endpoint URL\\\", \\\"password\\\": True},\\n \\\"task\\\": {\\n \\\"display_name\\\": \\\"Task\\\",\\n \\\"options\\\": [\\\"text2text-generation\\\", \\\"text-generation\\\", \\\"summarization\\\"],\\n },\\n \\\"huggingfacehub_api_token\\\": {\\\"display_name\\\": \\\"API token\\\", \\\"password\\\": True},\\n \\\"model_kwargs\\\": {\\n \\\"display_name\\\": \\\"Model Keyword Arguments\\\",\\n \\\"field_type\\\": \\\"code\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"inputs\\\": {\\\"display_name\\\": \\\"Input\\\"},\\n }\\n\\n def build(\\n self,\\n inputs: str,\\n endpoint_url: str,\\n task: str = \\\"text2text-generation\\\",\\n huggingfacehub_api_token: Optional[str] = None,\\n model_kwargs: Optional[dict] = None,\\n ) -> Text:\\n try:\\n llm = HuggingFaceEndpoint(\\n endpoint_url=endpoint_url,\\n task=task,\\n huggingfacehub_api_token=huggingfacehub_api_token,\\n model_kwargs=model_kwargs,\\n )\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to HuggingFace Endpoints API.\\\") from e\\n output = ChatHuggingFace(llm=llm)\\n message = output.invoke(inputs)\\n result = message.content if hasattr(message, \\\"content\\\") else message\\n self.status = result\\n return result\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"endpoint_url\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"endpoint_url\",\"display_name\":\"Endpoint URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"huggingfacehub_api_token\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"huggingfacehub_api_token\",\"display_name\":\"API token\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"inputs\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"inputs\",\"display_name\":\"Input\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"model_kwargs\":{\"type\":\"code\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_kwargs\",\"display_name\":\"Model Keyword Arguments\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"task\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"text2text-generation\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"text2text-generation\",\"text-generation\",\"summarization\"],\"name\":\"task\",\"display_name\":\"Task\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Generate text using LLM model from Hugging Face Inference API.\",\"base_classes\":[\"Text\",\"object\"],\"display_name\":\"Hugging Face Inference API models\",\"documentation\":\"\",\"custom_fields\":{\"inputs\":null,\"endpoint_url\":null,\"task\":null,\"huggingfacehub_api_token\":null,\"model_kwargs\":null},\"output_types\":[\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"AzureOpenAIModel\":{\"template\":{\"api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_key\",\"display_name\":\"API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"api_version\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"2023-12-01-preview\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"2023-03-15-preview\",\"2023-05-15\",\"2023-06-01-preview\",\"2023-07-01-preview\",\"2023-08-01-preview\",\"2023-09-01-preview\",\"2023-12-01-preview\"],\"name\":\"api_version\",\"display_name\":\"API Version\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"azure_deployment\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"azure_deployment\",\"display_name\":\"Deployment Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"azure_endpoint\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"azure_endpoint\",\"display_name\":\"Azure Endpoint\",\"advanced\":false,\"dynamic\":false,\"info\":\"Your Azure endpoint, including the resource.. Example: `https://example-resource.azure.openai.com/`\",\"title_case\":false,\"input_types\":[\"Text\"]},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.llms.base import BaseLanguageModel\\nfrom langchain_openai import AzureChatOpenAI\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass AzureChatOpenAIComponent(CustomComponent):\\n display_name: str = \\\"AzureOpenAI Model\\\"\\n description: str = \\\"Generate text using LLM model from Azure OpenAI.\\\"\\n documentation: str = \\\"https://python.langchain.com/docs/integrations/llms/azure_openai\\\"\\n beta = False\\n\\n AZURE_OPENAI_MODELS = [\\n \\\"gpt-35-turbo\\\",\\n \\\"gpt-35-turbo-16k\\\",\\n \\\"gpt-35-turbo-instruct\\\",\\n \\\"gpt-4\\\",\\n \\\"gpt-4-32k\\\",\\n \\\"gpt-4-vision\\\",\\n ]\\n\\n AZURE_OPENAI_API_VERSIONS = [\\n \\\"2023-03-15-preview\\\",\\n \\\"2023-05-15\\\",\\n \\\"2023-06-01-preview\\\",\\n \\\"2023-07-01-preview\\\",\\n \\\"2023-08-01-preview\\\",\\n \\\"2023-09-01-preview\\\",\\n \\\"2023-12-01-preview\\\",\\n ]\\n\\n def build_config(self):\\n return {\\n \\\"model\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"value\\\": self.AZURE_OPENAI_MODELS[0],\\n \\\"options\\\": self.AZURE_OPENAI_MODELS,\\n \\\"required\\\": True,\\n },\\n \\\"azure_endpoint\\\": {\\n \\\"display_name\\\": \\\"Azure Endpoint\\\",\\n \\\"required\\\": True,\\n \\\"info\\\": \\\"Your Azure endpoint, including the resource.. Example: `https://example-resource.azure.openai.com/`\\\",\\n },\\n \\\"azure_deployment\\\": {\\n \\\"display_name\\\": \\\"Deployment Name\\\",\\n \\\"required\\\": True,\\n },\\n \\\"api_version\\\": {\\n \\\"display_name\\\": \\\"API Version\\\",\\n \\\"options\\\": self.AZURE_OPENAI_API_VERSIONS,\\n \\\"value\\\": self.AZURE_OPENAI_API_VERSIONS[-1],\\n \\\"required\\\": True,\\n \\\"advanced\\\": True,\\n },\\n \\\"api_key\\\": {\\\"display_name\\\": \\\"API Key\\\", \\\"required\\\": True, \\\"password\\\": True},\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"value\\\": 0.7,\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"required\\\": False,\\n },\\n \\\"max_tokens\\\": {\\n \\\"display_name\\\": \\\"Max Tokens\\\",\\n \\\"value\\\": 1000,\\n \\\"required\\\": False,\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"advanced\\\": True,\\n \\\"info\\\": \\\"Maximum number of tokens to generate.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"inputs\\\": {\\\"display_name\\\": \\\"Input\\\"},\\n }\\n\\n def build(\\n self,\\n model: str,\\n azure_endpoint: str,\\n inputs: str,\\n azure_deployment: str,\\n api_key: str,\\n api_version: str,\\n temperature: float = 0.7,\\n max_tokens: Optional[int] = 1000,\\n ) -> BaseLanguageModel:\\n try:\\n output = AzureChatOpenAI(\\n model=model,\\n azure_endpoint=azure_endpoint,\\n azure_deployment=azure_deployment,\\n api_version=api_version,\\n api_key=api_key,\\n temperature=temperature,\\n max_tokens=max_tokens,\\n )\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to AzureOpenAI API.\\\") from e\\n message = output.invoke(inputs)\\n result = message.content if hasattr(message, \\\"content\\\") else message\\n self.status = result\\n return result\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"inputs\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"inputs\",\"display_name\":\"Input\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"max_tokens\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1000,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_tokens\",\"display_name\":\"Max Tokens\",\"advanced\":true,\"dynamic\":false,\"info\":\"Maximum number of tokens to generate.\",\"title_case\":false},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"gpt-35-turbo\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"gpt-35-turbo\",\"gpt-35-turbo-16k\",\"gpt-35-turbo-instruct\",\"gpt-4\",\"gpt-4-32k\",\"gpt-4-vision\"],\"name\":\"model\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.7,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Generate text using LLM model from Azure OpenAI.\",\"base_classes\":[\"BaseLanguageModel\",\"Runnable\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\"],\"display_name\":\"AzureOpenAI Model\",\"documentation\":\"https://python.langchain.com/docs/integrations/llms/azure_openai\",\"custom_fields\":{\"model\":null,\"azure_endpoint\":null,\"inputs\":null,\"azure_deployment\":null,\"api_key\":null,\"api_version\":null,\"temperature\":null,\"max_tokens\":null},\"output_types\":[\"BaseLanguageModel\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"AmazonBedrockModel\":{\"template\":{\"cache\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"cache\",\"display_name\":\"Cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain_community.chat_models.bedrock import BedrockChat\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Text\\n\\n\\nclass AmazonBedrockComponent(CustomComponent):\\n display_name: str = \\\"Amazon Bedrock Model\\\"\\n description: str = \\\"Generate text using LLM model from Amazon Bedrock.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"model_id\\\": {\\n \\\"display_name\\\": \\\"Model Id\\\",\\n \\\"options\\\": [\\n \\\"ai21.j2-grande-instruct\\\",\\n \\\"ai21.j2-jumbo-instruct\\\",\\n \\\"ai21.j2-mid\\\",\\n \\\"ai21.j2-mid-v1\\\",\\n \\\"ai21.j2-ultra\\\",\\n \\\"ai21.j2-ultra-v1\\\",\\n \\\"anthropic.claude-instant-v1\\\",\\n \\\"anthropic.claude-v1\\\",\\n \\\"anthropic.claude-v2\\\",\\n \\\"cohere.command-text-v14\\\",\\n ],\\n },\\n \\\"credentials_profile_name\\\": {\\\"display_name\\\": \\\"Credentials Profile Name\\\"},\\n \\\"streaming\\\": {\\\"display_name\\\": \\\"Streaming\\\", \\\"field_type\\\": \\\"bool\\\"},\\n \\\"endpoint_url\\\": {\\\"display_name\\\": \\\"Endpoint URL\\\"},\\n \\\"region_name\\\": {\\\"display_name\\\": \\\"Region Name\\\"},\\n \\\"model_kwargs\\\": {\\\"display_name\\\": \\\"Model Kwargs\\\"},\\n \\\"cache\\\": {\\\"display_name\\\": \\\"Cache\\\"},\\n \\\"code\\\": {\\\"advanced\\\": True},\\n \\\"inputs\\\": {\\\"display_name\\\": \\\"Input\\\"},\\n }\\n\\n def build(\\n self,\\n inputs: str,\\n model_id: str = \\\"anthropic.claude-instant-v1\\\",\\n credentials_profile_name: Optional[str] = None,\\n region_name: Optional[str] = None,\\n model_kwargs: Optional[dict] = None,\\n endpoint_url: Optional[str] = None,\\n streaming: bool = False,\\n cache: Optional[bool] = None,\\n ) -> Text:\\n try:\\n output = BedrockChat(\\n credentials_profile_name=credentials_profile_name,\\n model_id=model_id,\\n region_name=region_name,\\n model_kwargs=model_kwargs,\\n endpoint_url=endpoint_url,\\n streaming=streaming,\\n cache=cache,\\n ) # type: ignore\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to AmazonBedrock API.\\\") from e\\n message = output.invoke(inputs)\\n result = message.content if hasattr(message, \\\"content\\\") else message\\n self.status = result\\n return result\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"credentials_profile_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"credentials_profile_name\",\"display_name\":\"Credentials Profile Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"endpoint_url\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"endpoint_url\",\"display_name\":\"Endpoint URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"inputs\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"inputs\",\"display_name\":\"Input\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"model_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"anthropic.claude-instant-v1\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"ai21.j2-grande-instruct\",\"ai21.j2-jumbo-instruct\",\"ai21.j2-mid\",\"ai21.j2-mid-v1\",\"ai21.j2-ultra\",\"ai21.j2-ultra-v1\",\"anthropic.claude-instant-v1\",\"anthropic.claude-v1\",\"anthropic.claude-v2\",\"cohere.command-text-v14\"],\"name\":\"model_id\",\"display_name\":\"Model Id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"model_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_kwargs\",\"display_name\":\"Model Kwargs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"region_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"region_name\",\"display_name\":\"Region Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"streaming\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"streaming\",\"display_name\":\"Streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Generate text using LLM model from Amazon Bedrock.\",\"base_classes\":[\"Text\",\"object\"],\"display_name\":\"Amazon Bedrock Model\",\"documentation\":\"\",\"custom_fields\":{\"inputs\":null,\"model_id\":null,\"credentials_profile_name\":null,\"region_name\":null,\"model_kwargs\":null,\"endpoint_url\":null,\"streaming\":null,\"cache\":null},\"output_types\":[\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"CohereModel\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langchain_community.chat_models.cohere import ChatCohere\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Text\\n\\n\\nclass CohereComponent(CustomComponent):\\n display_name = \\\"CohereModel\\\"\\n description = \\\"Generate text using Cohere large language models.\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/cohere\\\"\\n\\n def build_config(self):\\n return {\\n \\\"cohere_api_key\\\": {\\n \\\"display_name\\\": \\\"Cohere API Key\\\",\\n \\\"type\\\": \\\"password\\\",\\n \\\"password\\\": True,\\n },\\n \\\"max_tokens\\\": {\\n \\\"display_name\\\": \\\"Max Tokens\\\",\\n \\\"default\\\": 256,\\n \\\"type\\\": \\\"int\\\",\\n \\\"show\\\": True,\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"default\\\": 0.75,\\n \\\"type\\\": \\\"float\\\",\\n \\\"show\\\": True,\\n },\\n \\\"inputs\\\": {\\\"display_name\\\": \\\"Input\\\"},\\n }\\n\\n def build(\\n self,\\n cohere_api_key: str,\\n inputs: str,\\n max_tokens: int = 256,\\n temperature: float = 0.75,\\n ) -> Text:\\n output = ChatCohere(\\n cohere_api_key=cohere_api_key,\\n max_tokens=max_tokens,\\n temperature=temperature,\\n )\\n message = output.invoke(inputs)\\n result = message.content if hasattr(message, \\\"content\\\") else message\\n self.status = result\\n return result\\n return result\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"cohere_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"cohere_api_key\",\"display_name\":\"Cohere API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"inputs\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"inputs\",\"display_name\":\"Input\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"max_tokens\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":256,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_tokens\",\"display_name\":\"Max Tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"temperature\":{\"type\":\"float\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.75,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Generate text using Cohere large language models.\",\"base_classes\":[\"Text\",\"object\"],\"display_name\":\"CohereModel\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/cohere\",\"custom_fields\":{\"cohere_api_key\":null,\"inputs\":null,\"max_tokens\":null,\"temperature\":null},\"output_types\":[\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true}},\"model_specs\":{\"AmazonBedrockSpecs\":{\"template\":{\"cache\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"cache\",\"display_name\":\"Cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.llms.base import BaseLLM\\nfrom langchain_community.llms.bedrock import Bedrock\\n\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass AmazonBedrockComponent(CustomComponent):\\n display_name: str = \\\"Amazon Bedrock\\\"\\n description: str = \\\"LLM model from Amazon Bedrock.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"model_id\\\": {\\n \\\"display_name\\\": \\\"Model Id\\\",\\n \\\"options\\\": [\\n \\\"ai21.j2-grande-instruct\\\",\\n \\\"ai21.j2-jumbo-instruct\\\",\\n \\\"ai21.j2-mid\\\",\\n \\\"ai21.j2-mid-v1\\\",\\n \\\"ai21.j2-ultra\\\",\\n \\\"ai21.j2-ultra-v1\\\",\\n \\\"anthropic.claude-instant-v1\\\",\\n \\\"anthropic.claude-v1\\\",\\n \\\"anthropic.claude-v2\\\",\\n \\\"cohere.command-text-v14\\\",\\n ],\\n },\\n \\\"credentials_profile_name\\\": {\\\"display_name\\\": \\\"Credentials Profile Name\\\"},\\n \\\"streaming\\\": {\\\"display_name\\\": \\\"Streaming\\\", \\\"field_type\\\": \\\"bool\\\"},\\n \\\"endpoint_url\\\": {\\\"display_name\\\": \\\"Endpoint URL\\\"},\\n \\\"region_name\\\": {\\\"display_name\\\": \\\"Region Name\\\"},\\n \\\"model_kwargs\\\": {\\\"display_name\\\": \\\"Model Kwargs\\\"},\\n \\\"cache\\\": {\\\"display_name\\\": \\\"Cache\\\"},\\n \\\"code\\\": {\\\"advanced\\\": True},\\n }\\n\\n def build(\\n self,\\n model_id: str = \\\"anthropic.claude-instant-v1\\\",\\n credentials_profile_name: Optional[str] = None,\\n region_name: Optional[str] = None,\\n model_kwargs: Optional[dict] = None,\\n endpoint_url: Optional[str] = None,\\n streaming: bool = False,\\n cache: Optional[bool] = None,\\n ) -> BaseLLM:\\n try:\\n output = Bedrock(\\n credentials_profile_name=credentials_profile_name,\\n model_id=model_id,\\n region_name=region_name,\\n model_kwargs=model_kwargs,\\n endpoint_url=endpoint_url,\\n streaming=streaming,\\n cache=cache,\\n ) # type: ignore\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to AmazonBedrock API.\\\") from e\\n return output\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"credentials_profile_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"credentials_profile_name\",\"display_name\":\"Credentials Profile Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"endpoint_url\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"endpoint_url\",\"display_name\":\"Endpoint URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"model_id\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"anthropic.claude-instant-v1\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"ai21.j2-grande-instruct\",\"ai21.j2-jumbo-instruct\",\"ai21.j2-mid\",\"ai21.j2-mid-v1\",\"ai21.j2-ultra\",\"ai21.j2-ultra-v1\",\"anthropic.claude-instant-v1\",\"anthropic.claude-v1\",\"anthropic.claude-v2\",\"cohere.command-text-v14\"],\"name\":\"model_id\",\"display_name\":\"Model Id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"model_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_kwargs\",\"display_name\":\"Model Kwargs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"region_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"region_name\",\"display_name\":\"Region Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"streaming\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"streaming\",\"display_name\":\"Streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"LLM model from Amazon Bedrock.\",\"base_classes\":[\"BaseLanguageModel\",\"Runnable\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseLLM\"],\"display_name\":\"Amazon Bedrock\",\"documentation\":\"\",\"custom_fields\":{\"model_id\":null,\"credentials_profile_name\":null,\"region_name\":null,\"model_kwargs\":null,\"endpoint_url\":null,\"streaming\":null,\"cache\":null},\"output_types\":[\"BaseLLM\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"ChatVertexAISpecs\":{\"template\":{\"credentials\":{\"type\":\"file\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[\".json\"],\"password\":false,\"name\":\"credentials\",\"display_name\":\"Credentials\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"examples\":{\"type\":\"BaseMessage\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":true,\"value\":[],\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"examples\",\"display_name\":\"Examples\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import List, Optional, Union\\n\\nfrom langchain.llms import BaseLLM\\nfrom langchain_community.chat_models.vertexai import ChatVertexAI\\nfrom langchain_core.messages.base import BaseMessage\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import BaseLanguageModel\\n\\n\\nclass ChatVertexAIComponent(CustomComponent):\\n display_name = \\\"ChatVertexAI\\\"\\n description = \\\"`Vertex AI` Chat large language models API.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"credentials\\\": {\\n \\\"display_name\\\": \\\"Credentials\\\",\\n \\\"field_type\\\": \\\"file\\\",\\n \\\"file_types\\\": [\\\".json\\\"],\\n \\\"file_path\\\": None,\\n },\\n \\\"examples\\\": {\\n \\\"display_name\\\": \\\"Examples\\\",\\n \\\"multiline\\\": True,\\n },\\n \\\"location\\\": {\\n \\\"display_name\\\": \\\"Location\\\",\\n \\\"value\\\": \\\"us-central1\\\",\\n },\\n \\\"max_output_tokens\\\": {\\n \\\"display_name\\\": \\\"Max Output Tokens\\\",\\n \\\"value\\\": 128,\\n \\\"advanced\\\": True,\\n },\\n \\\"model_name\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"value\\\": \\\"chat-bison\\\",\\n },\\n \\\"project\\\": {\\n \\\"display_name\\\": \\\"Project\\\",\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"value\\\": 0.0,\\n },\\n \\\"top_k\\\": {\\n \\\"display_name\\\": \\\"Top K\\\",\\n \\\"value\\\": 40,\\n \\\"advanced\\\": True,\\n },\\n \\\"top_p\\\": {\\n \\\"display_name\\\": \\\"Top P\\\",\\n \\\"value\\\": 0.95,\\n \\\"advanced\\\": True,\\n },\\n \\\"verbose\\\": {\\n \\\"display_name\\\": \\\"Verbose\\\",\\n \\\"value\\\": False,\\n \\\"advanced\\\": True,\\n },\\n }\\n\\n def build(\\n self,\\n credentials: Optional[str],\\n project: str,\\n examples: Optional[List[BaseMessage]] = [],\\n location: str = \\\"us-central1\\\",\\n max_output_tokens: int = 128,\\n model_name: str = \\\"chat-bison\\\",\\n temperature: float = 0.0,\\n top_k: int = 40,\\n top_p: float = 0.95,\\n verbose: bool = False,\\n ) -> Union[BaseLanguageModel, BaseLLM]:\\n return ChatVertexAI(\\n credentials=credentials,\\n examples=examples,\\n location=location,\\n max_output_tokens=max_output_tokens,\\n model_name=model_name,\\n project=project,\\n temperature=temperature,\\n top_k=top_k,\\n top_p=top_p,\\n verbose=verbose,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"location\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"us-central1\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"location\",\"display_name\":\"Location\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"max_output_tokens\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":128,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_output_tokens\",\"display_name\":\"Max Output Tokens\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model_name\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"chat-bison\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_name\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"project\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"project\",\"display_name\":\"Project\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"temperature\":{\"type\":\"float\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.0,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"top_k\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":40,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_k\",\"display_name\":\"Top K\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"top_p\":{\"type\":\"float\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.95,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_p\",\"display_name\":\"Top P\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"verbose\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"verbose\",\"display_name\":\"Verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"`Vertex AI` Chat large language models API.\",\"base_classes\":[\"BaseLanguageModel\",\"Runnable\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseLLM\"],\"display_name\":\"ChatVertexAI\",\"documentation\":\"\",\"custom_fields\":{\"credentials\":null,\"project\":null,\"examples\":null,\"location\":null,\"max_output_tokens\":null,\"model_name\":null,\"temperature\":null,\"top_k\":null,\"top_p\":null,\"verbose\":null},\"output_types\":[\"BaseLanguageModel\",\"BaseLLM\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"VertexAISpecs\":{\"template\":{\"credentials\":{\"type\":\"file\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[\".json\"],\"file_path\":\"\",\"password\":false,\"name\":\"credentials\",\"display_name\":\"Credentials\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langchain.llms import BaseLLM\\nfrom typing import Optional, Union, Callable, Dict\\nfrom langchain_community.llms.vertexai import VertexAI\\n\\n\\nclass VertexAIComponent(CustomComponent):\\n display_name = \\\"VertexAI\\\"\\n description = \\\"Google Vertex AI large language models\\\"\\n\\n def build_config(self):\\n return {\\n \\\"credentials\\\": {\\n \\\"display_name\\\": \\\"Credentials\\\",\\n \\\"field_type\\\": \\\"file\\\",\\n \\\"file_types\\\": [\\\".json\\\"],\\n \\\"required\\\": False,\\n \\\"value\\\": None,\\n },\\n \\\"location\\\": {\\n \\\"display_name\\\": \\\"Location\\\",\\n \\\"type\\\": \\\"str\\\",\\n \\\"advanced\\\": True,\\n \\\"value\\\": \\\"us-central1\\\",\\n \\\"required\\\": False,\\n },\\n \\\"max_output_tokens\\\": {\\n \\\"display_name\\\": \\\"Max Output Tokens\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 128,\\n \\\"required\\\": False,\\n \\\"advanced\\\": True,\\n },\\n \\\"max_retries\\\": {\\n \\\"display_name\\\": \\\"Max Retries\\\",\\n \\\"type\\\": \\\"int\\\",\\n \\\"value\\\": 6,\\n \\\"required\\\": False,\\n \\\"advanced\\\": True,\\n },\\n \\\"metadata\\\": {\\n \\\"display_name\\\": \\\"Metadata\\\",\\n \\\"field_type\\\": \\\"dict\\\",\\n \\\"required\\\": False,\\n \\\"default\\\": {},\\n },\\n \\\"model_name\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"type\\\": \\\"str\\\",\\n \\\"value\\\": \\\"text-bison\\\",\\n \\\"required\\\": False,\\n },\\n \\\"n\\\": {\\n \\\"advanced\\\": True,\\n \\\"display_name\\\": \\\"N\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 1,\\n \\\"required\\\": False,\\n },\\n \\\"project\\\": {\\n \\\"display_name\\\": \\\"Project\\\",\\n \\\"type\\\": \\\"str\\\",\\n \\\"required\\\": False,\\n \\\"default\\\": None,\\n },\\n \\\"request_parallelism\\\": {\\n \\\"display_name\\\": \\\"Request Parallelism\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 5,\\n \\\"required\\\": False,\\n \\\"advanced\\\": True,\\n },\\n \\\"streaming\\\": {\\n \\\"display_name\\\": \\\"Streaming\\\",\\n \\\"field_type\\\": \\\"bool\\\",\\n \\\"value\\\": False,\\n \\\"required\\\": False,\\n \\\"advanced\\\": True,\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"value\\\": 0.0,\\n \\\"required\\\": False,\\n \\\"advanced\\\": True,\\n },\\n \\\"top_k\\\": {\\\"display_name\\\": \\\"Top K\\\", \\\"type\\\": \\\"int\\\", \\\"default\\\": 40, \\\"required\\\": False, \\\"advanced\\\": True},\\n \\\"top_p\\\": {\\n \\\"display_name\\\": \\\"Top P\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"value\\\": 0.95,\\n \\\"required\\\": False,\\n \\\"advanced\\\": True,\\n },\\n \\\"tuned_model_name\\\": {\\n \\\"display_name\\\": \\\"Tuned Model Name\\\",\\n \\\"type\\\": \\\"str\\\",\\n \\\"required\\\": False,\\n \\\"value\\\": None,\\n \\\"advanced\\\": True,\\n },\\n \\\"verbose\\\": {\\n \\\"display_name\\\": \\\"Verbose\\\",\\n \\\"field_type\\\": \\\"bool\\\",\\n \\\"value\\\": False,\\n \\\"required\\\": False,\\n },\\n \\\"name\\\": {\\\"display_name\\\": \\\"Name\\\", \\\"field_type\\\": \\\"str\\\"},\\n }\\n\\n def build(\\n self,\\n credentials: Optional[str] = None,\\n location: str = \\\"us-central1\\\",\\n max_output_tokens: int = 128,\\n max_retries: int = 6,\\n metadata: Dict = {},\\n model_name: str = \\\"text-bison\\\",\\n n: int = 1,\\n name: Optional[str] = None,\\n project: Optional[str] = None,\\n request_parallelism: int = 5,\\n streaming: bool = False,\\n temperature: float = 0.0,\\n top_k: int = 40,\\n top_p: float = 0.95,\\n tuned_model_name: Optional[str] = None,\\n verbose: bool = False,\\n ) -> Union[BaseLLM, Callable]:\\n return VertexAI(\\n credentials=credentials,\\n location=location,\\n max_output_tokens=max_output_tokens,\\n max_retries=max_retries,\\n metadata=metadata,\\n model_name=model_name,\\n n=n,\\n name=name,\\n project=project,\\n request_parallelism=request_parallelism,\\n streaming=streaming,\\n temperature=temperature,\\n top_k=top_k,\\n top_p=top_p,\\n tuned_model_name=tuned_model_name,\\n verbose=verbose,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"location\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"us-central1\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"location\",\"display_name\":\"Location\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"max_output_tokens\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":128,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_output_tokens\",\"display_name\":\"Max Output Tokens\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"max_retries\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":6,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_retries\",\"display_name\":\"Max Retries\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"metadata\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"text-bison\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_name\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"n\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"n\",\"display_name\":\"N\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"name\",\"display_name\":\"Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"project\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"project\",\"display_name\":\"Project\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"request_parallelism\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":5,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"request_parallelism\",\"display_name\":\"Request Parallelism\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"streaming\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"streaming\",\"display_name\":\"Streaming\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.0,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"top_k\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":40,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_k\",\"display_name\":\"Top K\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"top_p\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.95,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_p\",\"display_name\":\"Top P\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"tuned_model_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tuned_model_name\",\"display_name\":\"Tuned Model Name\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"verbose\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"verbose\",\"display_name\":\"Verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Google Vertex AI large language models\",\"base_classes\":[\"BaseLanguageModel\",\"Runnable\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseLLM\",\"Callable\"],\"display_name\":\"VertexAI\",\"documentation\":\"\",\"custom_fields\":{\"credentials\":null,\"location\":null,\"max_output_tokens\":null,\"max_retries\":null,\"metadata\":null,\"model_name\":null,\"n\":null,\"name\":null,\"project\":null,\"request_parallelism\":null,\"streaming\":null,\"temperature\":null,\"top_k\":null,\"top_p\":null,\"tuned_model_name\":null,\"verbose\":null},\"output_types\":[\"BaseLLM\",\"Callable\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"ChatAnthropicSpecs\":{\"template\":{\"anthropic_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"anthropic_api_key\",\"display_name\":\"Anthropic API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"anthropic_api_url\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"anthropic_api_url\",\"display_name\":\"Anthropic API URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from pydantic.v1.types import SecretStr\\nfrom langflow import CustomComponent\\nfrom typing import Optional, Union, Callable\\nfrom langflow.field_typing import BaseLanguageModel\\nfrom langchain_community.chat_models.anthropic import ChatAnthropic\\n\\n\\nclass ChatAnthropicComponent(CustomComponent):\\n display_name = \\\"ChatAnthropic\\\"\\n description = \\\"`Anthropic` chat large language models.\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/anthropic\\\"\\n\\n def build_config(self):\\n return {\\n \\\"anthropic_api_key\\\": {\\n \\\"display_name\\\": \\\"Anthropic API Key\\\",\\n \\\"field_type\\\": \\\"str\\\",\\n \\\"password\\\": True,\\n },\\n \\\"anthropic_api_url\\\": {\\n \\\"display_name\\\": \\\"Anthropic API URL\\\",\\n \\\"field_type\\\": \\\"str\\\",\\n },\\n \\\"model_kwargs\\\": {\\n \\\"display_name\\\": \\\"Model Kwargs\\\",\\n \\\"field_type\\\": \\\"dict\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n },\\n }\\n\\n def build(\\n self,\\n anthropic_api_key: str,\\n anthropic_api_url: Optional[str] = None,\\n model_kwargs: dict = {},\\n temperature: Optional[float] = None,\\n ) -> Union[BaseLanguageModel, Callable]:\\n return ChatAnthropic(\\n anthropic_api_key=SecretStr(anthropic_api_key),\\n anthropic_api_url=anthropic_api_url,\\n model_kwargs=model_kwargs,\\n temperature=temperature,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"model_kwargs\":{\"type\":\"dict\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_kwargs\",\"display_name\":\"Model Kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"`Anthropic` chat large language models.\",\"base_classes\":[\"BaseLanguageModel\",\"Runnable\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\",\"Callable\"],\"display_name\":\"ChatAnthropic\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/anthropic\",\"custom_fields\":{\"anthropic_api_key\":null,\"anthropic_api_url\":null,\"model_kwargs\":null,\"temperature\":null},\"output_types\":[\"BaseLanguageModel\",\"Callable\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"AzureChatOpenAISpecs\":{\"template\":{\"api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_key\",\"display_name\":\"API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"api_version\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"2023-12-01-preview\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"2023-03-15-preview\",\"2023-05-15\",\"2023-06-01-preview\",\"2023-07-01-preview\",\"2023-08-01-preview\",\"2023-09-01-preview\",\"2023-12-01-preview\"],\"name\":\"api_version\",\"display_name\":\"API Version\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"azure_deployment\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"azure_deployment\",\"display_name\":\"Deployment Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"azure_endpoint\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"azure_endpoint\",\"display_name\":\"Azure Endpoint\",\"advanced\":false,\"dynamic\":false,\"info\":\"Your Azure endpoint, including the resource.. Example: `https://example-resource.azure.openai.com/`\",\"title_case\":false,\"input_types\":[\"Text\"]},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.llms.base import BaseLanguageModel\\nfrom langchain_community.chat_models.azure_openai import AzureChatOpenAI\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass AzureChatOpenAISpecsComponent(CustomComponent):\\n display_name: str = \\\"AzureChatOpenAI\\\"\\n description: str = \\\"LLM model from Azure OpenAI.\\\"\\n documentation: str = \\\"https://python.langchain.com/docs/integrations/llms/azure_openai\\\"\\n beta = False\\n\\n AZURE_OPENAI_MODELS = [\\n \\\"gpt-35-turbo\\\",\\n \\\"gpt-35-turbo-16k\\\",\\n \\\"gpt-35-turbo-instruct\\\",\\n \\\"gpt-4\\\",\\n \\\"gpt-4-32k\\\",\\n \\\"gpt-4-vision\\\",\\n ]\\n\\n AZURE_OPENAI_API_VERSIONS = [\\n \\\"2023-03-15-preview\\\",\\n \\\"2023-05-15\\\",\\n \\\"2023-06-01-preview\\\",\\n \\\"2023-07-01-preview\\\",\\n \\\"2023-08-01-preview\\\",\\n \\\"2023-09-01-preview\\\",\\n \\\"2023-12-01-preview\\\",\\n ]\\n\\n def build_config(self):\\n return {\\n \\\"model\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"value\\\": self.AZURE_OPENAI_MODELS[0],\\n \\\"options\\\": self.AZURE_OPENAI_MODELS,\\n \\\"required\\\": True,\\n },\\n \\\"azure_endpoint\\\": {\\n \\\"display_name\\\": \\\"Azure Endpoint\\\",\\n \\\"required\\\": True,\\n \\\"info\\\": \\\"Your Azure endpoint, including the resource.. Example: `https://example-resource.azure.openai.com/`\\\",\\n },\\n \\\"azure_deployment\\\": {\\n \\\"display_name\\\": \\\"Deployment Name\\\",\\n \\\"required\\\": True,\\n },\\n \\\"api_version\\\": {\\n \\\"display_name\\\": \\\"API Version\\\",\\n \\\"options\\\": self.AZURE_OPENAI_API_VERSIONS,\\n \\\"value\\\": self.AZURE_OPENAI_API_VERSIONS[-1],\\n \\\"required\\\": True,\\n \\\"advanced\\\": True,\\n },\\n \\\"api_key\\\": {\\\"display_name\\\": \\\"API Key\\\", \\\"required\\\": True, \\\"password\\\": True},\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"value\\\": 0.7,\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"required\\\": False,\\n },\\n \\\"max_tokens\\\": {\\n \\\"display_name\\\": \\\"Max Tokens\\\",\\n \\\"value\\\": 1000,\\n \\\"required\\\": False,\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"advanced\\\": True,\\n \\\"info\\\": \\\"Maximum number of tokens to generate.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model: str,\\n azure_endpoint: str,\\n azure_deployment: str,\\n api_key: str,\\n api_version: str,\\n temperature: float = 0.7,\\n max_tokens: Optional[int] = 1000,\\n ) -> BaseLanguageModel:\\n try:\\n llm = AzureChatOpenAI(\\n model=model,\\n azure_endpoint=azure_endpoint,\\n azure_deployment=azure_deployment,\\n api_version=api_version,\\n api_key=api_key,\\n temperature=temperature,\\n max_tokens=max_tokens,\\n )\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to AzureOpenAI API.\\\") from e\\n return llm\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"max_tokens\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1000,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_tokens\",\"display_name\":\"Max Tokens\",\"advanced\":true,\"dynamic\":false,\"info\":\"Maximum number of tokens to generate.\",\"title_case\":false},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"gpt-35-turbo\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"gpt-35-turbo\",\"gpt-35-turbo-16k\",\"gpt-35-turbo-instruct\",\"gpt-4\",\"gpt-4-32k\",\"gpt-4-vision\"],\"name\":\"model\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.7,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"LLM model from Azure OpenAI.\",\"base_classes\":[\"BaseLanguageModel\",\"Runnable\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\"],\"display_name\":\"AzureChatOpenAI\",\"documentation\":\"https://python.langchain.com/docs/integrations/llms/azure_openai\",\"custom_fields\":{\"model\":null,\"azure_endpoint\":null,\"azure_deployment\":null,\"api_key\":null,\"api_version\":null,\"temperature\":null,\"max_tokens\":null},\"output_types\":[\"BaseLanguageModel\"],\"field_formatters\":{},\"pinned\":false,\"beta\":false},\"ChatOllamaEndpointSpecs\":{\"template\":{\"metadata\":{\"type\":\"Dict[str, Any]\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":true,\"dynamic\":false,\"info\":\"Metadata to add to the run trace.\",\"title_case\":false},\"stop\":{\"type\":\"list\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"stop\",\"display_name\":\"Stop Tokens\",\"advanced\":true,\"dynamic\":false,\"info\":\"List of tokens to signal the model to stop generating text.\",\"title_case\":false},\"tags\":{\"type\":\"list\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"display_name\":\"Tags\",\"advanced\":true,\"dynamic\":false,\"info\":\"Tags to add to the run trace.\",\"title_case\":false},\"base_url\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"base_url\",\"display_name\":\"Base URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"Endpoint of the Ollama API. Defaults to 'http://localhost:11434' if not specified.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"cache\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"cache\",\"display_name\":\"Cache\",\"advanced\":true,\"dynamic\":false,\"info\":\"Enable or disable caching.\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Any, Dict, List, Optional\\n\\n# from langchain_community.chat_models import ChatOllama\\nfrom langchain_community.chat_models import ChatOllama\\nfrom langchain_core.language_models.chat_models import BaseChatModel\\n\\n# from langchain.chat_models import ChatOllama\\nfrom langflow import CustomComponent\\n\\n# whe When a callback component is added to Langflow, the comment must be uncommented.\\n# from langchain.callbacks.manager import CallbackManager\\n\\n\\nclass ChatOllamaComponent(CustomComponent):\\n display_name = \\\"ChatOllama\\\"\\n description = \\\"Local LLM for chat with Ollama.\\\"\\n\\n def build_config(self) -> dict:\\n return {\\n \\\"base_url\\\": {\\n \\\"display_name\\\": \\\"Base URL\\\",\\n \\\"info\\\": \\\"Endpoint of the Ollama API. Defaults to 'http://localhost:11434' if not specified.\\\",\\n },\\n \\\"model\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"value\\\": \\\"llama2\\\",\\n \\\"info\\\": \\\"Refer to https://ollama.ai/library for more models.\\\",\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"value\\\": 0.8,\\n \\\"info\\\": \\\"Controls the creativity of model responses.\\\",\\n },\\n \\\"cache\\\": {\\n \\\"display_name\\\": \\\"Cache\\\",\\n \\\"field_type\\\": \\\"bool\\\",\\n \\\"info\\\": \\\"Enable or disable caching.\\\",\\n \\\"advanced\\\": True,\\n \\\"value\\\": False,\\n },\\n ### When a callback component is added to Langflow, the comment must be uncommented. ###\\n # \\\"callback_manager\\\": {\\n # \\\"display_name\\\": \\\"Callback Manager\\\",\\n # \\\"info\\\": \\\"Optional callback manager for additional functionality.\\\",\\n # \\\"advanced\\\": True,\\n # },\\n # \\\"callbacks\\\": {\\n # \\\"display_name\\\": \\\"Callbacks\\\",\\n # \\\"info\\\": \\\"Callbacks to execute during model runtime.\\\",\\n # \\\"advanced\\\": True,\\n # },\\n ########################################################################################\\n \\\"format\\\": {\\n \\\"display_name\\\": \\\"Format\\\",\\n \\\"field_type\\\": \\\"str\\\",\\n \\\"info\\\": \\\"Specify the format of the output (e.g., json).\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"metadata\\\": {\\n \\\"display_name\\\": \\\"Metadata\\\",\\n \\\"info\\\": \\\"Metadata to add to the run trace.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"mirostat\\\": {\\n \\\"display_name\\\": \\\"Mirostat\\\",\\n \\\"options\\\": [\\\"Disabled\\\", \\\"Mirostat\\\", \\\"Mirostat 2.0\\\"],\\n \\\"info\\\": \\\"Enable/disable Mirostat sampling for controlling perplexity.\\\",\\n \\\"value\\\": \\\"Disabled\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"mirostat_eta\\\": {\\n \\\"display_name\\\": \\\"Mirostat Eta\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Learning rate for Mirostat algorithm. (Default: 0.1)\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"mirostat_tau\\\": {\\n \\\"display_name\\\": \\\"Mirostat Tau\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Controls the balance between coherence and diversity of the output. (Default: 5.0)\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"num_ctx\\\": {\\n \\\"display_name\\\": \\\"Context Window Size\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"Size of the context window for generating tokens. (Default: 2048)\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"num_gpu\\\": {\\n \\\"display_name\\\": \\\"Number of GPUs\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"Number of GPUs to use for computation. (Default: 1 on macOS, 0 to disable)\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"num_thread\\\": {\\n \\\"display_name\\\": \\\"Number of Threads\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"Number of threads to use during computation. (Default: detected for optimal performance)\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"repeat_last_n\\\": {\\n \\\"display_name\\\": \\\"Repeat Last N\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"How far back the model looks to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx)\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"repeat_penalty\\\": {\\n \\\"display_name\\\": \\\"Repeat Penalty\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Penalty for repetitions in generated text. (Default: 1.1)\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"tfs_z\\\": {\\n \\\"display_name\\\": \\\"TFS Z\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Tail free sampling value. (Default: 1)\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"timeout\\\": {\\n \\\"display_name\\\": \\\"Timeout\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"Timeout for the request stream.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"top_k\\\": {\\n \\\"display_name\\\": \\\"Top K\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"Limits token selection to top K. (Default: 40)\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"top_p\\\": {\\n \\\"display_name\\\": \\\"Top P\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Works together with top-k. (Default: 0.9)\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"verbose\\\": {\\n \\\"display_name\\\": \\\"Verbose\\\",\\n \\\"field_type\\\": \\\"bool\\\",\\n \\\"info\\\": \\\"Whether to print out response text.\\\",\\n },\\n \\\"tags\\\": {\\n \\\"display_name\\\": \\\"Tags\\\",\\n \\\"field_type\\\": \\\"list\\\",\\n \\\"info\\\": \\\"Tags to add to the run trace.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"stop\\\": {\\n \\\"display_name\\\": \\\"Stop Tokens\\\",\\n \\\"field_type\\\": \\\"list\\\",\\n \\\"info\\\": \\\"List of tokens to signal the model to stop generating text.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"system\\\": {\\n \\\"display_name\\\": \\\"System\\\",\\n \\\"field_type\\\": \\\"str\\\",\\n \\\"info\\\": \\\"System to use for generating text.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"template\\\": {\\n \\\"display_name\\\": \\\"Template\\\",\\n \\\"field_type\\\": \\\"str\\\",\\n \\\"info\\\": \\\"Template to use for generating text.\\\",\\n \\\"advanced\\\": True,\\n },\\n }\\n\\n def build(\\n self,\\n base_url: Optional[str],\\n model: str,\\n mirostat: Optional[str],\\n mirostat_eta: Optional[float] = None,\\n mirostat_tau: Optional[float] = None,\\n ### When a callback component is added to Langflow, the comment must be uncommented.###\\n # callback_manager: Optional[CallbackManager] = None,\\n # callbacks: Optional[List[Callbacks]] = None,\\n #######################################################################################\\n repeat_last_n: Optional[int] = None,\\n verbose: Optional[bool] = None,\\n cache: Optional[bool] = None,\\n num_ctx: Optional[int] = None,\\n num_gpu: Optional[int] = None,\\n format: Optional[str] = None,\\n metadata: Optional[Dict[str, Any]] = None,\\n num_thread: Optional[int] = None,\\n repeat_penalty: Optional[float] = None,\\n stop: Optional[List[str]] = None,\\n system: Optional[str] = None,\\n tags: Optional[List[str]] = None,\\n temperature: Optional[float] = None,\\n template: Optional[str] = None,\\n tfs_z: Optional[float] = None,\\n timeout: Optional[int] = None,\\n top_k: Optional[int] = None,\\n top_p: Optional[int] = None,\\n ) -> BaseChatModel:\\n if not base_url:\\n base_url = \\\"http://localhost:11434\\\"\\n\\n # Mapping mirostat settings to their corresponding values\\n mirostat_options = {\\\"Mirostat\\\": 1, \\\"Mirostat 2.0\\\": 2}\\n\\n # Default to 0 for 'Disabled'\\n mirostat_value = mirostat_options.get(mirostat, 0) # type: ignore\\n\\n # Set mirostat_eta and mirostat_tau to None if mirostat is disabled\\n if mirostat_value == 0:\\n mirostat_eta = None\\n mirostat_tau = None\\n\\n # Mapping system settings to their corresponding values\\n llm_params = {\\n \\\"base_url\\\": base_url,\\n \\\"cache\\\": cache,\\n \\\"model\\\": model,\\n \\\"mirostat\\\": mirostat_value,\\n \\\"format\\\": format,\\n \\\"metadata\\\": metadata,\\n \\\"tags\\\": tags,\\n ## When a callback component is added to Langflow, the comment must be uncommented.##\\n # \\\"callback_manager\\\": callback_manager,\\n # \\\"callbacks\\\": callbacks,\\n #####################################################################################\\n \\\"mirostat_eta\\\": mirostat_eta,\\n \\\"mirostat_tau\\\": mirostat_tau,\\n \\\"num_ctx\\\": num_ctx,\\n \\\"num_gpu\\\": num_gpu,\\n \\\"num_thread\\\": num_thread,\\n \\\"repeat_last_n\\\": repeat_last_n,\\n \\\"repeat_penalty\\\": repeat_penalty,\\n \\\"temperature\\\": temperature,\\n \\\"stop\\\": stop,\\n \\\"system\\\": system,\\n \\\"template\\\": template,\\n \\\"tfs_z\\\": tfs_z,\\n \\\"timeout\\\": timeout,\\n \\\"top_k\\\": top_k,\\n \\\"top_p\\\": top_p,\\n \\\"verbose\\\": verbose,\\n }\\n\\n # None Value remove\\n llm_params = {k: v for k, v in llm_params.items() if v is not None}\\n\\n try:\\n output = ChatOllama(**llm_params) # type: ignore\\n except Exception as e:\\n raise ValueError(\\\"Could not initialize Ollama LLM.\\\") from e\\n\\n return output # type: ignore\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"format\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"format\",\"display_name\":\"Format\",\"advanced\":true,\"dynamic\":false,\"info\":\"Specify the format of the output (e.g., json).\",\"title_case\":false,\"input_types\":[\"Text\"]},\"mirostat\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"Disabled\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"Disabled\",\"Mirostat\",\"Mirostat 2.0\"],\"name\":\"mirostat\",\"display_name\":\"Mirostat\",\"advanced\":true,\"dynamic\":false,\"info\":\"Enable/disable Mirostat sampling for controlling perplexity.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"mirostat_eta\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"mirostat_eta\",\"display_name\":\"Mirostat Eta\",\"advanced\":true,\"dynamic\":false,\"info\":\"Learning rate for Mirostat algorithm. (Default: 0.1)\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"mirostat_tau\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"mirostat_tau\",\"display_name\":\"Mirostat Tau\",\"advanced\":true,\"dynamic\":false,\"info\":\"Controls the balance between coherence and diversity of the output. (Default: 5.0)\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"llama2\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"Refer to https://ollama.ai/library for more models.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"num_ctx\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"num_ctx\",\"display_name\":\"Context Window Size\",\"advanced\":true,\"dynamic\":false,\"info\":\"Size of the context window for generating tokens. (Default: 2048)\",\"title_case\":false},\"num_gpu\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"num_gpu\",\"display_name\":\"Number of GPUs\",\"advanced\":true,\"dynamic\":false,\"info\":\"Number of GPUs to use for computation. (Default: 1 on macOS, 0 to disable)\",\"title_case\":false},\"num_thread\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"num_thread\",\"display_name\":\"Number of Threads\",\"advanced\":true,\"dynamic\":false,\"info\":\"Number of threads to use during computation. (Default: detected for optimal performance)\",\"title_case\":false},\"repeat_last_n\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"repeat_last_n\",\"display_name\":\"Repeat Last N\",\"advanced\":true,\"dynamic\":false,\"info\":\"How far back the model looks to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx)\",\"title_case\":false},\"repeat_penalty\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"repeat_penalty\",\"display_name\":\"Repeat Penalty\",\"advanced\":true,\"dynamic\":false,\"info\":\"Penalty for repetitions in generated text. (Default: 1.1)\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"system\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"system\",\"display_name\":\"System\",\"advanced\":true,\"dynamic\":false,\"info\":\"System to use for generating text.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.8,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"Controls the creativity of model responses.\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"template\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"template\",\"display_name\":\"Template\",\"advanced\":true,\"dynamic\":false,\"info\":\"Template to use for generating text.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"tfs_z\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tfs_z\",\"display_name\":\"TFS Z\",\"advanced\":true,\"dynamic\":false,\"info\":\"Tail free sampling value. (Default: 1)\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"timeout\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"timeout\",\"display_name\":\"Timeout\",\"advanced\":true,\"dynamic\":false,\"info\":\"Timeout for the request stream.\",\"title_case\":false},\"top_k\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_k\",\"display_name\":\"Top K\",\"advanced\":true,\"dynamic\":false,\"info\":\"Limits token selection to top K. (Default: 40)\",\"title_case\":false},\"top_p\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_p\",\"display_name\":\"Top P\",\"advanced\":true,\"dynamic\":false,\"info\":\"Works together with top-k. (Default: 0.9)\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"verbose\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"verbose\",\"display_name\":\"Verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"Whether to print out response text.\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Local LLM for chat with Ollama.\",\"base_classes\":[\"BaseLanguageModel\",\"Runnable\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseChatModel\"],\"display_name\":\"ChatOllama\",\"documentation\":\"\",\"custom_fields\":{\"base_url\":null,\"model\":null,\"mirostat\":null,\"mirostat_eta\":null,\"mirostat_tau\":null,\"repeat_last_n\":null,\"verbose\":null,\"cache\":null,\"num_ctx\":null,\"num_gpu\":null,\"format\":null,\"metadata\":null,\"num_thread\":null,\"repeat_penalty\":null,\"stop\":null,\"system\":null,\"tags\":null,\"temperature\":null,\"template\":null,\"tfs_z\":null,\"timeout\":null,\"top_k\":null,\"top_p\":null},\"output_types\":[\"BaseChatModel\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"BaiduQianfanChatEndpointsSpecs\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain_community.chat_models.baidu_qianfan_endpoint import QianfanChatEndpoint\\nfrom langchain.llms.base import BaseLLM\\nfrom pydantic.v1 import SecretStr\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass QianfanChatEndpointComponent(CustomComponent):\\n display_name: str = \\\"QianfanChatEndpoint\\\"\\n description: str = (\\n \\\"Baidu Qianfan chat models. Get more detail from \\\"\\n \\\"https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint.\\\"\\n )\\n\\n def build_config(self):\\n return {\\n \\\"model\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"options\\\": [\\n \\\"ERNIE-Bot\\\",\\n \\\"ERNIE-Bot-turbo\\\",\\n \\\"BLOOMZ-7B\\\",\\n \\\"Llama-2-7b-chat\\\",\\n \\\"Llama-2-13b-chat\\\",\\n \\\"Llama-2-70b-chat\\\",\\n \\\"Qianfan-BLOOMZ-7B-compressed\\\",\\n \\\"Qianfan-Chinese-Llama-2-7B\\\",\\n \\\"ChatGLM2-6B-32K\\\",\\n \\\"AquilaChat-7B\\\",\\n ],\\n \\\"info\\\": \\\"https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint\\\",\\n \\\"required\\\": True,\\n },\\n \\\"qianfan_ak\\\": {\\n \\\"display_name\\\": \\\"Qianfan Ak\\\",\\n \\\"required\\\": True,\\n \\\"password\\\": True,\\n \\\"info\\\": \\\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\\\",\\n },\\n \\\"qianfan_sk\\\": {\\n \\\"display_name\\\": \\\"Qianfan Sk\\\",\\n \\\"required\\\": True,\\n \\\"password\\\": True,\\n \\\"info\\\": \\\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\\\",\\n },\\n \\\"top_p\\\": {\\n \\\"display_name\\\": \\\"Top p\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\\\",\\n \\\"value\\\": 0.8,\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\\\",\\n \\\"value\\\": 0.95,\\n },\\n \\\"penalty_score\\\": {\\n \\\"display_name\\\": \\\"Penalty Score\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\\\",\\n \\\"value\\\": 1.0,\\n },\\n \\\"endpoint\\\": {\\n \\\"display_name\\\": \\\"Endpoint\\\",\\n \\\"info\\\": \\\"Endpoint of the Qianfan LLM, required if custom model used.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model: str = \\\"ERNIE-Bot-turbo\\\",\\n qianfan_ak: Optional[str] = None,\\n qianfan_sk: Optional[str] = None,\\n top_p: Optional[float] = None,\\n temperature: Optional[float] = None,\\n penalty_score: Optional[float] = None,\\n endpoint: Optional[str] = None,\\n ) -> BaseLLM:\\n try:\\n output = QianfanChatEndpoint( # type: ignore\\n model=model,\\n qianfan_ak=SecretStr(qianfan_ak) if qianfan_ak else None,\\n qianfan_sk=SecretStr(qianfan_sk) if qianfan_sk else None,\\n top_p=top_p,\\n temperature=temperature,\\n penalty_score=penalty_score,\\n endpoint=endpoint,\\n )\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to Baidu Qianfan API.\\\") from e\\n return output # type: ignore\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"endpoint\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"endpoint\",\"display_name\":\"Endpoint\",\"advanced\":false,\"dynamic\":false,\"info\":\"Endpoint of the Qianfan LLM, required if custom model used.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"ERNIE-Bot-turbo\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"ERNIE-Bot\",\"ERNIE-Bot-turbo\",\"BLOOMZ-7B\",\"Llama-2-7b-chat\",\"Llama-2-13b-chat\",\"Llama-2-70b-chat\",\"Qianfan-BLOOMZ-7B-compressed\",\"Qianfan-Chinese-Llama-2-7B\",\"ChatGLM2-6B-32K\",\"AquilaChat-7B\"],\"name\":\"model\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint\",\"title_case\":false,\"input_types\":[\"Text\"]},\"penalty_score\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1.0,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"penalty_score\",\"display_name\":\"Penalty Score\",\"advanced\":false,\"dynamic\":false,\"info\":\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"qianfan_ak\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"qianfan_ak\",\"display_name\":\"Qianfan Ak\",\"advanced\":false,\"dynamic\":false,\"info\":\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\",\"title_case\":false,\"input_types\":[\"Text\"]},\"qianfan_sk\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"qianfan_sk\",\"display_name\":\"Qianfan Sk\",\"advanced\":false,\"dynamic\":false,\"info\":\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\",\"title_case\":false,\"input_types\":[\"Text\"]},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.95,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"top_p\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.8,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_p\",\"display_name\":\"Top p\",\"advanced\":false,\"dynamic\":false,\"info\":\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Baidu Qianfan chat models. Get more detail from https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint.\",\"base_classes\":[\"BaseLanguageModel\",\"Runnable\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseLLM\"],\"display_name\":\"QianfanChatEndpoint\",\"documentation\":\"\",\"custom_fields\":{\"model\":null,\"qianfan_ak\":null,\"qianfan_sk\":null,\"top_p\":null,\"temperature\":null,\"penalty_score\":null,\"endpoint\":null},\"output_types\":[\"BaseLLM\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"LlamaCppSpecs\":{\"template\":{\"metadata\":{\"type\":\"Dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model_kwargs\":{\"type\":\"Dict\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_kwargs\",\"display_name\":\"Model Kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model_path\":{\"type\":\"file\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[\".bin\"],\"file_path\":\"\",\"password\":false,\"name\":\"model_path\",\"display_name\":\"Model Path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"cache\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"cache\",\"display_name\":\"Cache\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"client\":{\"type\":\"Any\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"client\",\"display_name\":\"Client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional, List, Dict, Any\\nfrom langflow import CustomComponent\\nfrom langchain_community.llms.llamacpp import LlamaCpp\\n\\n\\nclass LlamaCppComponent(CustomComponent):\\n display_name = \\\"LlamaCpp\\\"\\n description = \\\"llama.cpp model.\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/llamacpp\\\"\\n\\n def build_config(self):\\n return {\\n \\\"grammar\\\": {\\\"display_name\\\": \\\"Grammar\\\", \\\"advanced\\\": True},\\n \\\"cache\\\": {\\\"display_name\\\": \\\"Cache\\\", \\\"advanced\\\": True},\\n \\\"client\\\": {\\\"display_name\\\": \\\"Client\\\", \\\"advanced\\\": True},\\n \\\"echo\\\": {\\\"display_name\\\": \\\"Echo\\\", \\\"advanced\\\": True},\\n \\\"f16_kv\\\": {\\\"display_name\\\": \\\"F16 KV\\\", \\\"advanced\\\": True},\\n \\\"grammar_path\\\": {\\\"display_name\\\": \\\"Grammar Path\\\", \\\"advanced\\\": True},\\n \\\"last_n_tokens_size\\\": {\\\"display_name\\\": \\\"Last N Tokens Size\\\", \\\"advanced\\\": True},\\n \\\"logits_all\\\": {\\\"display_name\\\": \\\"Logits All\\\", \\\"advanced\\\": True},\\n \\\"logprobs\\\": {\\\"display_name\\\": \\\"Logprobs\\\", \\\"advanced\\\": True},\\n \\\"lora_base\\\": {\\\"display_name\\\": \\\"Lora Base\\\", \\\"advanced\\\": True},\\n \\\"lora_path\\\": {\\\"display_name\\\": \\\"Lora Path\\\", \\\"advanced\\\": True},\\n \\\"max_tokens\\\": {\\\"display_name\\\": \\\"Max Tokens\\\", \\\"advanced\\\": True},\\n \\\"metadata\\\": {\\\"display_name\\\": \\\"Metadata\\\", \\\"advanced\\\": True},\\n \\\"model_kwargs\\\": {\\\"display_name\\\": \\\"Model Kwargs\\\", \\\"advanced\\\": True},\\n \\\"model_path\\\": {\\n \\\"display_name\\\": \\\"Model Path\\\",\\n \\\"field_type\\\": \\\"file\\\",\\n \\\"file_types\\\": [\\\".bin\\\"],\\n \\\"required\\\": True,\\n },\\n \\\"n_batch\\\": {\\\"display_name\\\": \\\"N Batch\\\", \\\"advanced\\\": True},\\n \\\"n_ctx\\\": {\\\"display_name\\\": \\\"N Ctx\\\", \\\"advanced\\\": True},\\n \\\"n_gpu_layers\\\": {\\\"display_name\\\": \\\"N GPU Layers\\\", \\\"advanced\\\": True},\\n \\\"n_parts\\\": {\\\"display_name\\\": \\\"N Parts\\\", \\\"advanced\\\": True},\\n \\\"n_threads\\\": {\\\"display_name\\\": \\\"N Threads\\\", \\\"advanced\\\": True},\\n \\\"repeat_penalty\\\": {\\\"display_name\\\": \\\"Repeat Penalty\\\", \\\"advanced\\\": True},\\n \\\"rope_freq_base\\\": {\\\"display_name\\\": \\\"Rope Freq Base\\\", \\\"advanced\\\": True},\\n \\\"rope_freq_scale\\\": {\\\"display_name\\\": \\\"Rope Freq Scale\\\", \\\"advanced\\\": True},\\n \\\"seed\\\": {\\\"display_name\\\": \\\"Seed\\\", \\\"advanced\\\": True},\\n \\\"stop\\\": {\\\"display_name\\\": \\\"Stop\\\", \\\"advanced\\\": True},\\n \\\"streaming\\\": {\\\"display_name\\\": \\\"Streaming\\\", \\\"advanced\\\": True},\\n \\\"suffix\\\": {\\\"display_name\\\": \\\"Suffix\\\", \\\"advanced\\\": True},\\n \\\"tags\\\": {\\\"display_name\\\": \\\"Tags\\\", \\\"advanced\\\": True},\\n \\\"temperature\\\": {\\\"display_name\\\": \\\"Temperature\\\"},\\n \\\"top_k\\\": {\\\"display_name\\\": \\\"Top K\\\", \\\"advanced\\\": True},\\n \\\"top_p\\\": {\\\"display_name\\\": \\\"Top P\\\", \\\"advanced\\\": True},\\n \\\"use_mlock\\\": {\\\"display_name\\\": \\\"Use Mlock\\\", \\\"advanced\\\": True},\\n \\\"use_mmap\\\": {\\\"display_name\\\": \\\"Use Mmap\\\", \\\"advanced\\\": True},\\n \\\"verbose\\\": {\\\"display_name\\\": \\\"Verbose\\\", \\\"advanced\\\": True},\\n \\\"vocab_only\\\": {\\\"display_name\\\": \\\"Vocab Only\\\", \\\"advanced\\\": True},\\n }\\n\\n def build(\\n self,\\n model_path: str,\\n grammar: Optional[str] = None,\\n cache: Optional[bool] = None,\\n client: Optional[Any] = None,\\n echo: Optional[bool] = False,\\n f16_kv: bool = True,\\n grammar_path: Optional[str] = None,\\n last_n_tokens_size: Optional[int] = 64,\\n logits_all: bool = False,\\n logprobs: Optional[int] = None,\\n lora_base: Optional[str] = None,\\n lora_path: Optional[str] = None,\\n max_tokens: Optional[int] = 256,\\n metadata: Optional[Dict] = None,\\n model_kwargs: Dict = {},\\n n_batch: Optional[int] = 8,\\n n_ctx: int = 512,\\n n_gpu_layers: Optional[int] = 1,\\n n_parts: int = -1,\\n n_threads: Optional[int] = 1,\\n repeat_penalty: Optional[float] = 1.1,\\n rope_freq_base: float = 10000.0,\\n rope_freq_scale: float = 1.0,\\n seed: int = -1,\\n stop: Optional[List[str]] = [],\\n streaming: bool = True,\\n suffix: Optional[str] = \\\"\\\",\\n tags: Optional[List[str]] = [],\\n temperature: Optional[float] = 0.8,\\n top_k: Optional[int] = 40,\\n top_p: Optional[float] = 0.95,\\n use_mlock: bool = False,\\n use_mmap: Optional[bool] = True,\\n verbose: bool = True,\\n vocab_only: bool = False,\\n ) -> LlamaCpp:\\n return LlamaCpp(\\n model_path=model_path,\\n grammar=grammar,\\n cache=cache,\\n client=client,\\n echo=echo,\\n f16_kv=f16_kv,\\n grammar_path=grammar_path,\\n last_n_tokens_size=last_n_tokens_size,\\n logits_all=logits_all,\\n logprobs=logprobs,\\n lora_base=lora_base,\\n lora_path=lora_path,\\n max_tokens=max_tokens,\\n metadata=metadata,\\n model_kwargs=model_kwargs,\\n n_batch=n_batch,\\n n_ctx=n_ctx,\\n n_gpu_layers=n_gpu_layers,\\n n_parts=n_parts,\\n n_threads=n_threads,\\n repeat_penalty=repeat_penalty,\\n rope_freq_base=rope_freq_base,\\n rope_freq_scale=rope_freq_scale,\\n seed=seed,\\n stop=stop,\\n streaming=streaming,\\n suffix=suffix,\\n tags=tags,\\n temperature=temperature,\\n top_k=top_k,\\n top_p=top_p,\\n use_mlock=use_mlock,\\n use_mmap=use_mmap,\\n verbose=verbose,\\n vocab_only=vocab_only,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"echo\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"echo\",\"display_name\":\"Echo\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"f16_kv\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"f16_kv\",\"display_name\":\"F16 KV\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"grammar\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"grammar\",\"display_name\":\"Grammar\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"grammar_path\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"grammar_path\",\"display_name\":\"Grammar Path\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"last_n_tokens_size\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":64,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"last_n_tokens_size\",\"display_name\":\"Last N Tokens Size\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"logits_all\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"logits_all\",\"display_name\":\"Logits All\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"logprobs\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"logprobs\",\"display_name\":\"Logprobs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"lora_base\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"lora_base\",\"display_name\":\"Lora Base\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"lora_path\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"lora_path\",\"display_name\":\"Lora Path\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"max_tokens\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":256,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_tokens\",\"display_name\":\"Max Tokens\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"n_batch\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":8,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"n_batch\",\"display_name\":\"N Batch\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"n_ctx\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":512,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"n_ctx\",\"display_name\":\"N Ctx\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"n_gpu_layers\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"n_gpu_layers\",\"display_name\":\"N GPU Layers\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"n_parts\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":-1,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"n_parts\",\"display_name\":\"N Parts\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"n_threads\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"n_threads\",\"display_name\":\"N Threads\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"repeat_penalty\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1.1,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"repeat_penalty\",\"display_name\":\"Repeat Penalty\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"rope_freq_base\":{\"type\":\"float\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":10000.0,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"rope_freq_base\",\"display_name\":\"Rope Freq Base\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"rope_freq_scale\":{\"type\":\"float\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1.0,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"rope_freq_scale\",\"display_name\":\"Rope Freq Scale\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"seed\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":-1,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"seed\",\"display_name\":\"Seed\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"stop\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":[],\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"stop\",\"display_name\":\"Stop\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"streaming\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"streaming\",\"display_name\":\"Streaming\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"suffix\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"suffix\",\"display_name\":\"Suffix\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"tags\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":[],\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tags\",\"display_name\":\"Tags\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.8,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"top_k\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":40,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_k\",\"display_name\":\"Top K\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"top_p\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.95,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_p\",\"display_name\":\"Top P\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"use_mlock\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"use_mlock\",\"display_name\":\"Use Mlock\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"use_mmap\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"use_mmap\",\"display_name\":\"Use Mmap\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"verbose\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"verbose\",\"display_name\":\"Verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"vocab_only\":{\"type\":\"bool\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"vocab_only\",\"display_name\":\"Vocab Only\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"llama.cpp model.\",\"base_classes\":[\"BaseLanguageModel\",\"Runnable\",\"Generic\",\"LlamaCpp\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseLLM\",\"LLM\"],\"display_name\":\"LlamaCpp\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/llamacpp\",\"custom_fields\":{\"model_path\":null,\"grammar\":null,\"cache\":null,\"client\":null,\"echo\":null,\"f16_kv\":null,\"grammar_path\":null,\"last_n_tokens_size\":null,\"logits_all\":null,\"logprobs\":null,\"lora_base\":null,\"lora_path\":null,\"max_tokens\":null,\"metadata\":null,\"model_kwargs\":null,\"n_batch\":null,\"n_ctx\":null,\"n_gpu_layers\":null,\"n_parts\":null,\"n_threads\":null,\"repeat_penalty\":null,\"rope_freq_base\":null,\"rope_freq_scale\":null,\"seed\":null,\"stop\":null,\"streaming\":null,\"suffix\":null,\"tags\":null,\"temperature\":null,\"top_k\":null,\"top_p\":null,\"use_mlock\":null,\"use_mmap\":null,\"verbose\":null,\"vocab_only\":null},\"output_types\":[\"LlamaCpp\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"AnthropicSpecs\":{\"template\":{\"anthropic_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"anthropic_api_key\",\"display_name\":\"Anthropic API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"anthropic_api_url\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"anthropic_api_url\",\"display_name\":\"Anthropic API URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain_community.llms.anthropic import Anthropic\\nfrom pydantic.v1 import SecretStr\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import BaseLanguageModel, NestedDict\\n\\n\\nclass AnthropicComponent(CustomComponent):\\n display_name = \\\"Anthropic\\\"\\n description = \\\"Anthropic large language models.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"anthropic_api_key\\\": {\\n \\\"display_name\\\": \\\"Anthropic API Key\\\",\\n \\\"type\\\": str,\\n \\\"password\\\": True,\\n },\\n \\\"anthropic_api_url\\\": {\\n \\\"display_name\\\": \\\"Anthropic API URL\\\",\\n \\\"type\\\": str,\\n },\\n \\\"model_kwargs\\\": {\\n \\\"display_name\\\": \\\"Model Kwargs\\\",\\n \\\"field_type\\\": \\\"NestedDict\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n },\\n }\\n\\n def build(\\n self,\\n anthropic_api_key: str,\\n anthropic_api_url: str,\\n model_kwargs: NestedDict = {},\\n temperature: Optional[float] = None,\\n ) -> BaseLanguageModel:\\n return Anthropic(\\n anthropic_api_key=SecretStr(anthropic_api_key),\\n anthropic_api_url=anthropic_api_url,\\n model_kwargs=model_kwargs,\\n temperature=temperature,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"model_kwargs\":{\"type\":\"NestedDict\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_kwargs\",\"display_name\":\"Model Kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Anthropic large language models.\",\"base_classes\":[\"BaseLanguageModel\",\"Runnable\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\"],\"display_name\":\"Anthropic\",\"documentation\":\"\",\"custom_fields\":{\"anthropic_api_key\":null,\"anthropic_api_url\":null,\"model_kwargs\":null,\"temperature\":null},\"output_types\":[\"BaseLanguageModel\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"AnthropicLLMSpecs\":{\"template\":{\"anthropic_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"anthropic_api_key\",\"display_name\":\"Anthropic API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"Your Anthropic API key.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"api_endpoint\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"api_endpoint\",\"display_name\":\"API Endpoint\",\"advanced\":false,\"dynamic\":false,\"info\":\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain_community.chat_models.anthropic import ChatAnthropic\\nfrom langchain.llms.base import BaseLanguageModel\\nfrom pydantic.v1 import SecretStr\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass AnthropicLLM(CustomComponent):\\n display_name: str = \\\"AnthropicLLM\\\"\\n description: str = \\\"Anthropic Chat&Completion large language models.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"model\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"options\\\": [\\n \\\"claude-2.1\\\",\\n \\\"claude-2.0\\\",\\n \\\"claude-instant-1.2\\\",\\n \\\"claude-instant-1\\\",\\n # Add more models as needed\\n ],\\n \\\"info\\\": \\\"https://python.langchain.com/docs/integrations/chat/anthropic\\\",\\n \\\"required\\\": True,\\n \\\"value\\\": \\\"claude-2.1\\\",\\n },\\n \\\"anthropic_api_key\\\": {\\n \\\"display_name\\\": \\\"Anthropic API Key\\\",\\n \\\"required\\\": True,\\n \\\"password\\\": True,\\n \\\"info\\\": \\\"Your Anthropic API key.\\\",\\n },\\n \\\"max_tokens\\\": {\\n \\\"display_name\\\": \\\"Max Tokens\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"value\\\": 256,\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"value\\\": 0.7,\\n },\\n \\\"api_endpoint\\\": {\\n \\\"display_name\\\": \\\"API Endpoint\\\",\\n \\\"info\\\": \\\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model: str,\\n anthropic_api_key: Optional[str] = None,\\n max_tokens: Optional[int] = None,\\n temperature: Optional[float] = None,\\n api_endpoint: Optional[str] = None,\\n ) -> BaseLanguageModel:\\n # Set default API endpoint if not provided\\n if not api_endpoint:\\n api_endpoint = \\\"https://api.anthropic.com\\\"\\n\\n try:\\n output = ChatAnthropic(\\n model_name=model,\\n anthropic_api_key=SecretStr(anthropic_api_key) if anthropic_api_key else None,\\n max_tokens_to_sample=max_tokens, # type: ignore\\n temperature=temperature,\\n anthropic_api_url=api_endpoint,\\n )\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to Anthropic API.\\\") from e\\n return output\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"max_tokens\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":256,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_tokens\",\"display_name\":\"Max Tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"claude-2.1\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"claude-2.1\",\"claude-2.0\",\"claude-instant-1.2\",\"claude-instant-1\"],\"name\":\"model\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"https://python.langchain.com/docs/integrations/chat/anthropic\",\"title_case\":false,\"input_types\":[\"Text\"]},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.7,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Anthropic Chat&Completion large language models.\",\"base_classes\":[\"BaseLanguageModel\",\"Runnable\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\"],\"display_name\":\"AnthropicLLM\",\"documentation\":\"\",\"custom_fields\":{\"model\":null,\"anthropic_api_key\":null,\"max_tokens\":null,\"temperature\":null,\"api_endpoint\":null},\"output_types\":[\"BaseLanguageModel\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"CohereSpecs\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langchain_community.llms.cohere import Cohere\\nfrom langchain_core.language_models.base import BaseLanguageModel\\nfrom langflow import CustomComponent\\n\\n\\nclass CohereComponent(CustomComponent):\\n display_name = \\\"Cohere\\\"\\n description = \\\"Cohere large language models.\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/cohere\\\"\\n\\n def build_config(self):\\n return {\\n \\\"cohere_api_key\\\": {\\\"display_name\\\": \\\"Cohere API Key\\\", \\\"type\\\": \\\"password\\\", \\\"password\\\": True},\\n \\\"max_tokens\\\": {\\\"display_name\\\": \\\"Max Tokens\\\", \\\"default\\\": 256, \\\"type\\\": \\\"int\\\", \\\"show\\\": True},\\n \\\"temperature\\\": {\\\"display_name\\\": \\\"Temperature\\\", \\\"default\\\": 0.75, \\\"type\\\": \\\"float\\\", \\\"show\\\": True},\\n }\\n\\n def build(\\n self,\\n cohere_api_key: str,\\n max_tokens: int = 256,\\n temperature: float = 0.75,\\n ) -> BaseLanguageModel:\\n return Cohere(cohere_api_key=cohere_api_key, max_tokens=max_tokens, temperature=temperature) # type: ignore\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"cohere_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"cohere_api_key\",\"display_name\":\"Cohere API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"max_tokens\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":256,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_tokens\",\"display_name\":\"Max Tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"temperature\":{\"type\":\"float\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.75,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Cohere large language models.\",\"base_classes\":[\"BaseLanguageModel\",\"Runnable\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\"],\"display_name\":\"Cohere\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/cohere\",\"custom_fields\":{\"cohere_api_key\":null,\"max_tokens\":null,\"temperature\":null},\"output_types\":[\"BaseLanguageModel\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"GoogleGenerativeAISpecs\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain_google_genai import ChatGoogleGenerativeAI # type: ignore\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import BaseLanguageModel, RangeSpec\\nfrom pydantic.v1.types import SecretStr\\n\\n\\nclass GoogleGenerativeAIComponent(CustomComponent):\\n display_name: str = \\\"Google Generative AI\\\"\\n description: str = \\\"A component that uses Google Generative AI to generate text.\\\"\\n documentation: str = \\\"http://docs.langflow.org/components/custom\\\"\\n\\n def build_config(self):\\n return {\\n \\\"google_api_key\\\": {\\n \\\"display_name\\\": \\\"Google API Key\\\",\\n \\\"info\\\": \\\"The Google API Key to use for the Google Generative AI.\\\",\\n },\\n \\\"max_output_tokens\\\": {\\n \\\"display_name\\\": \\\"Max Output Tokens\\\",\\n \\\"info\\\": \\\"The maximum number of tokens to generate.\\\",\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"info\\\": \\\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\\\",\\n },\\n \\\"top_k\\\": {\\n \\\"display_name\\\": \\\"Top K\\\",\\n \\\"info\\\": \\\"Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.\\\",\\n \\\"range_spec\\\": RangeSpec(min=0, max=2, step=0.1),\\n \\\"advanced\\\": True,\\n },\\n \\\"top_p\\\": {\\n \\\"display_name\\\": \\\"Top P\\\",\\n \\\"info\\\": \\\"The maximum cumulative probability of tokens to consider when sampling.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"n\\\": {\\n \\\"display_name\\\": \\\"N\\\",\\n \\\"info\\\": \\\"Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"model\\\": {\\n \\\"display_name\\\": \\\"Model\\\",\\n \\\"info\\\": \\\"The name of the model to use. Supported examples: gemini-pro\\\",\\n \\\"options\\\": [\\\"gemini-pro\\\", \\\"gemini-pro-vision\\\"],\\n },\\n \\\"code\\\": {\\n \\\"advanced\\\": True,\\n },\\n }\\n\\n def build(\\n self,\\n google_api_key: str,\\n model: str,\\n max_output_tokens: Optional[int] = None,\\n temperature: float = 0.1,\\n top_k: Optional[int] = None,\\n top_p: Optional[float] = None,\\n n: Optional[int] = 1,\\n ) -> BaseLanguageModel:\\n return ChatGoogleGenerativeAI(\\n model=model,\\n max_output_tokens=max_output_tokens or None, # type: ignore\\n temperature=temperature,\\n top_k=top_k or None,\\n top_p=top_p or None, # type: ignore\\n n=n or 1,\\n google_api_key=SecretStr(google_api_key),\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"google_api_key\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"google_api_key\",\"display_name\":\"Google API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The Google API Key to use for the Google Generative AI.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"max_output_tokens\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_output_tokens\",\"display_name\":\"Max Output Tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"The maximum number of tokens to generate.\",\"title_case\":false},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"gemini-pro\",\"gemini-pro-vision\"],\"name\":\"model\",\"display_name\":\"Model\",\"advanced\":false,\"dynamic\":false,\"info\":\"The name of the model to use. Supported examples: gemini-pro\",\"title_case\":false,\"input_types\":[\"Text\"]},\"n\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"n\",\"display_name\":\"N\",\"advanced\":true,\"dynamic\":false,\"info\":\"Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.\",\"title_case\":false},\"temperature\":{\"type\":\"float\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.1,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"top_k\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_k\",\"display_name\":\"Top K\",\"advanced\":true,\"dynamic\":false,\"info\":\"Decode using top-k sampling: consider the set of top_k most probable tokens. Must be positive.\",\"rangeSpec\":{\"min\":0.0,\"max\":2.0,\"step\":0.1},\"title_case\":false},\"top_p\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_p\",\"display_name\":\"Top P\",\"advanced\":true,\"dynamic\":false,\"info\":\"The maximum cumulative probability of tokens to consider when sampling.\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"A component that uses Google Generative AI to generate text.\",\"base_classes\":[\"BaseLanguageModel\",\"Runnable\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\"],\"display_name\":\"Google Generative AI\",\"documentation\":\"http://docs.langflow.org/components/custom\",\"custom_fields\":{\"google_api_key\":null,\"model\":null,\"max_output_tokens\":null,\"temperature\":null,\"top_k\":null,\"top_p\":null,\"n\":null},\"output_types\":[\"BaseLanguageModel\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"BaiduQianfanLLMEndpointsSpecs\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\nfrom langflow import CustomComponent\\nfrom langchain.llms.baidu_qianfan_endpoint import QianfanLLMEndpoint\\nfrom langchain.llms.base import BaseLLM\\n\\n\\nclass QianfanLLMEndpointComponent(CustomComponent):\\n display_name: str = \\\"QianfanLLMEndpoint\\\"\\n description: str = (\\n \\\"Baidu Qianfan hosted open source or customized models. \\\"\\n \\\"Get more detail from https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint\\\"\\n )\\n\\n def build_config(self):\\n return {\\n \\\"model\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"options\\\": [\\n \\\"ERNIE-Bot\\\",\\n \\\"ERNIE-Bot-turbo\\\",\\n \\\"BLOOMZ-7B\\\",\\n \\\"Llama-2-7b-chat\\\",\\n \\\"Llama-2-13b-chat\\\",\\n \\\"Llama-2-70b-chat\\\",\\n \\\"Qianfan-BLOOMZ-7B-compressed\\\",\\n \\\"Qianfan-Chinese-Llama-2-7B\\\",\\n \\\"ChatGLM2-6B-32K\\\",\\n \\\"AquilaChat-7B\\\",\\n ],\\n \\\"info\\\": \\\"https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint\\\",\\n \\\"required\\\": True,\\n },\\n \\\"qianfan_ak\\\": {\\n \\\"display_name\\\": \\\"Qianfan Ak\\\",\\n \\\"required\\\": True,\\n \\\"password\\\": True,\\n \\\"info\\\": \\\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\\\",\\n },\\n \\\"qianfan_sk\\\": {\\n \\\"display_name\\\": \\\"Qianfan Sk\\\",\\n \\\"required\\\": True,\\n \\\"password\\\": True,\\n \\\"info\\\": \\\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\\\",\\n },\\n \\\"top_p\\\": {\\n \\\"display_name\\\": \\\"Top p\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\\\",\\n \\\"value\\\": 0.8,\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\\\",\\n \\\"value\\\": 0.95,\\n },\\n \\\"penalty_score\\\": {\\n \\\"display_name\\\": \\\"Penalty Score\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\\\",\\n \\\"value\\\": 1.0,\\n },\\n \\\"endpoint\\\": {\\n \\\"display_name\\\": \\\"Endpoint\\\",\\n \\\"info\\\": \\\"Endpoint of the Qianfan LLM, required if custom model used.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model: str = \\\"ERNIE-Bot-turbo\\\",\\n qianfan_ak: Optional[str] = None,\\n qianfan_sk: Optional[str] = None,\\n top_p: Optional[float] = None,\\n temperature: Optional[float] = None,\\n penalty_score: Optional[float] = None,\\n endpoint: Optional[str] = None,\\n ) -> BaseLLM:\\n try:\\n output = QianfanLLMEndpoint( # type: ignore\\n model=model,\\n qianfan_ak=qianfan_ak,\\n qianfan_sk=qianfan_sk,\\n top_p=top_p,\\n temperature=temperature,\\n penalty_score=penalty_score,\\n endpoint=endpoint,\\n )\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to Baidu Qianfan API.\\\") from e\\n return output # type: ignore\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"endpoint\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"endpoint\",\"display_name\":\"Endpoint\",\"advanced\":false,\"dynamic\":false,\"info\":\"Endpoint of the Qianfan LLM, required if custom model used.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"ERNIE-Bot-turbo\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"ERNIE-Bot\",\"ERNIE-Bot-turbo\",\"BLOOMZ-7B\",\"Llama-2-7b-chat\",\"Llama-2-13b-chat\",\"Llama-2-70b-chat\",\"Qianfan-BLOOMZ-7B-compressed\",\"Qianfan-Chinese-Llama-2-7B\",\"ChatGLM2-6B-32K\",\"AquilaChat-7B\"],\"name\":\"model\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint\",\"title_case\":false,\"input_types\":[\"Text\"]},\"penalty_score\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1.0,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"penalty_score\",\"display_name\":\"Penalty Score\",\"advanced\":false,\"dynamic\":false,\"info\":\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"qianfan_ak\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"qianfan_ak\",\"display_name\":\"Qianfan Ak\",\"advanced\":false,\"dynamic\":false,\"info\":\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\",\"title_case\":false,\"input_types\":[\"Text\"]},\"qianfan_sk\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"qianfan_sk\",\"display_name\":\"Qianfan Sk\",\"advanced\":false,\"dynamic\":false,\"info\":\"which you could get from https://cloud.baidu.com/product/wenxinworkshop\",\"title_case\":false,\"input_types\":[\"Text\"]},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.95,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"top_p\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.8,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_p\",\"display_name\":\"Top p\",\"advanced\":false,\"dynamic\":false,\"info\":\"Model params, only supported in ERNIE-Bot and ERNIE-Bot-turbo\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Baidu Qianfan hosted open source or customized models. Get more detail from https://python.langchain.com/docs/integrations/chat/baidu_qianfan_endpoint\",\"base_classes\":[\"BaseLanguageModel\",\"Runnable\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseLLM\"],\"display_name\":\"QianfanLLMEndpoint\",\"documentation\":\"\",\"custom_fields\":{\"model\":null,\"qianfan_ak\":null,\"qianfan_sk\":null,\"top_p\":null,\"temperature\":null,\"penalty_score\":null,\"endpoint\":null},\"output_types\":[\"BaseLLM\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"ChatLiteLLMSpecs\":{\"template\":{\"api_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"api_key\",\"display_name\":\"API key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Any, Callable, Dict, Optional, Union\\n\\nfrom langchain_community.chat_models.litellm import ChatLiteLLM, ChatLiteLLMException\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import BaseLanguageModel\\n\\n\\nclass ChatLiteLLMComponent(CustomComponent):\\n display_name = \\\"ChatLiteLLM\\\"\\n description = \\\"`LiteLLM` collection of large language models.\\\"\\n documentation = \\\"https://python.langchain.com/docs/integrations/chat/litellm\\\"\\n\\n def build_config(self):\\n return {\\n \\\"model\\\": {\\n \\\"display_name\\\": \\\"Model name\\\",\\n \\\"field_type\\\": \\\"str\\\",\\n \\\"advanced\\\": False,\\n \\\"required\\\": True,\\n \\\"info\\\": \\\"The name of the model to use. For example, `gpt-3.5-turbo`.\\\",\\n },\\n \\\"api_key\\\": {\\n \\\"display_name\\\": \\\"API key\\\",\\n \\\"field_type\\\": \\\"str\\\",\\n \\\"advanced\\\": False,\\n \\\"required\\\": False,\\n \\\"password\\\": True,\\n },\\n \\\"provider\\\": {\\n \\\"display_name\\\": \\\"Provider\\\",\\n \\\"info\\\": \\\"The provider of the API key.\\\",\\n \\\"options\\\": [\\n \\\"OpenAI\\\",\\n \\\"Azure\\\",\\n \\\"Anthropic\\\",\\n \\\"Replicate\\\",\\n \\\"Cohere\\\",\\n \\\"OpenRouter\\\",\\n ],\\n },\\n \\\"streaming\\\": {\\n \\\"display_name\\\": \\\"Streaming\\\",\\n \\\"field_type\\\": \\\"bool\\\",\\n \\\"advanced\\\": True,\\n \\\"required\\\": False,\\n \\\"default\\\": True,\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"advanced\\\": False,\\n \\\"required\\\": False,\\n \\\"default\\\": 0.7,\\n },\\n \\\"model_kwargs\\\": {\\n \\\"display_name\\\": \\\"Model kwargs\\\",\\n \\\"field_type\\\": \\\"dict\\\",\\n \\\"advanced\\\": True,\\n \\\"required\\\": False,\\n \\\"default\\\": {},\\n },\\n \\\"top_p\\\": {\\n \\\"display_name\\\": \\\"Top p\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"advanced\\\": True,\\n \\\"required\\\": False,\\n },\\n \\\"top_k\\\": {\\n \\\"display_name\\\": \\\"Top k\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"advanced\\\": True,\\n \\\"required\\\": False,\\n },\\n \\\"n\\\": {\\n \\\"display_name\\\": \\\"N\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"advanced\\\": True,\\n \\\"required\\\": False,\\n \\\"info\\\": \\\"Number of chat completions to generate for each prompt. \\\"\\n \\\"Note that the API may not return the full n completions if duplicates are generated.\\\",\\n \\\"default\\\": 1,\\n },\\n \\\"max_tokens\\\": {\\n \\\"display_name\\\": \\\"Max tokens\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"advanced\\\": False,\\n \\\"required\\\": False,\\n \\\"default\\\": 256,\\n \\\"info\\\": \\\"The maximum number of tokens to generate for each chat completion.\\\",\\n },\\n \\\"max_retries\\\": {\\n \\\"display_name\\\": \\\"Max retries\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"advanced\\\": True,\\n \\\"required\\\": False,\\n \\\"default\\\": 6,\\n },\\n \\\"verbose\\\": {\\n \\\"display_name\\\": \\\"Verbose\\\",\\n \\\"field_type\\\": \\\"bool\\\",\\n \\\"advanced\\\": True,\\n \\\"required\\\": False,\\n \\\"default\\\": False,\\n },\\n }\\n\\n def build(\\n self,\\n model: str,\\n provider: str,\\n api_key: Optional[str] = None,\\n streaming: bool = True,\\n temperature: Optional[float] = 0.7,\\n model_kwargs: Optional[Dict[str, Any]] = {},\\n top_p: Optional[float] = None,\\n top_k: Optional[int] = None,\\n n: int = 1,\\n max_tokens: int = 256,\\n max_retries: int = 6,\\n verbose: bool = False,\\n ) -> Union[BaseLanguageModel, Callable]:\\n try:\\n import litellm # type: ignore\\n\\n litellm.drop_params = True\\n litellm.set_verbose = verbose\\n except ImportError:\\n raise ChatLiteLLMException(\\n \\\"Could not import litellm python package. \\\" \\\"Please install it with `pip install litellm`\\\"\\n )\\n provider_map = {\\n \\\"OpenAI\\\": \\\"openai_api_key\\\",\\n \\\"Azure\\\": \\\"azure_api_key\\\",\\n \\\"Anthropic\\\": \\\"anthropic_api_key\\\",\\n \\\"Replicate\\\": \\\"replicate_api_key\\\",\\n \\\"Cohere\\\": \\\"cohere_api_key\\\",\\n \\\"OpenRouter\\\": \\\"openrouter_api_key\\\",\\n }\\n # Set the API key based on the provider\\n kwarg = {provider_map[provider]: api_key}\\n\\n LLM = ChatLiteLLM(\\n model=model,\\n client=None,\\n streaming=streaming,\\n temperature=temperature,\\n model_kwargs=model_kwargs if model_kwargs is not None else {},\\n top_p=top_p,\\n top_k=top_k,\\n n=n,\\n max_tokens=max_tokens,\\n max_retries=max_retries,\\n **kwarg,\\n )\\n return LLM\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"max_retries\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":6,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_retries\",\"display_name\":\"Max retries\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"max_tokens\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":256,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_tokens\",\"display_name\":\"Max tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"The maximum number of tokens to generate for each chat completion.\",\"title_case\":false},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model\",\"display_name\":\"Model name\",\"advanced\":false,\"dynamic\":false,\"info\":\"The name of the model to use. For example, `gpt-3.5-turbo`.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"model_kwargs\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_kwargs\",\"display_name\":\"Model kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"n\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":1,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"n\",\"display_name\":\"N\",\"advanced\":true,\"dynamic\":false,\"info\":\"Number of chat completions to generate for each prompt. Note that the API may not return the full n completions if duplicates are generated.\",\"title_case\":false},\"provider\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"OpenAI\",\"Azure\",\"Anthropic\",\"Replicate\",\"Cohere\",\"OpenRouter\"],\"name\":\"provider\",\"display_name\":\"Provider\",\"advanced\":false,\"dynamic\":false,\"info\":\"The provider of the API key.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"streaming\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":true,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"streaming\",\"display_name\":\"Streaming\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.7,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"top_k\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_k\",\"display_name\":\"Top k\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"top_p\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_p\",\"display_name\":\"Top p\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"verbose\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"verbose\",\"display_name\":\"Verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"`LiteLLM` collection of large language models.\",\"base_classes\":[\"BaseLanguageModel\",\"Runnable\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\",\"Callable\"],\"display_name\":\"ChatLiteLLM\",\"documentation\":\"https://python.langchain.com/docs/integrations/chat/litellm\",\"custom_fields\":{\"model\":null,\"provider\":null,\"api_key\":null,\"streaming\":null,\"temperature\":null,\"model_kwargs\":null,\"top_p\":null,\"top_k\":null,\"n\":null,\"max_tokens\":null,\"max_retries\":null,\"verbose\":null},\"output_types\":[\"BaseLanguageModel\",\"Callable\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"CTransformersSpecs\":{\"template\":{\"model_file\":{\"type\":\"file\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[\".bin\"],\"file_path\":\"\",\"password\":false,\"name\":\"model_file\",\"display_name\":\"Model File\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Dict, Optional\\n\\nfrom langchain_community.llms.ctransformers import CTransformers\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass CTransformersComponent(CustomComponent):\\n display_name = \\\"CTransformers\\\"\\n description = \\\"C Transformers LLM models\\\"\\n documentation = \\\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/ctransformers\\\"\\n\\n def build_config(self):\\n return {\\n \\\"model\\\": {\\\"display_name\\\": \\\"Model\\\", \\\"required\\\": True},\\n \\\"model_file\\\": {\\n \\\"display_name\\\": \\\"Model File\\\",\\n \\\"required\\\": False,\\n \\\"field_type\\\": \\\"file\\\",\\n \\\"file_types\\\": [\\\".bin\\\"],\\n },\\n \\\"model_type\\\": {\\\"display_name\\\": \\\"Model Type\\\", \\\"required\\\": True},\\n \\\"config\\\": {\\n \\\"display_name\\\": \\\"Config\\\",\\n \\\"advanced\\\": True,\\n \\\"required\\\": False,\\n \\\"field_type\\\": \\\"dict\\\",\\n \\\"value\\\": '{\\\"top_k\\\":40,\\\"top_p\\\":0.95,\\\"temperature\\\":0.8,\\\"repetition_penalty\\\":1.1,\\\"last_n_tokens\\\":64,\\\"seed\\\":-1,\\\"max_new_tokens\\\":256,\\\"stop\\\":\\\"\\\",\\\"stream\\\":\\\"False\\\",\\\"reset\\\":\\\"True\\\",\\\"batch_size\\\":8,\\\"threads\\\":-1,\\\"context_length\\\":-1,\\\"gpu_layers\\\":0}',\\n },\\n }\\n\\n def build(self, model: str, model_file: str, model_type: str, config: Optional[Dict] = None) -> CTransformers:\\n return CTransformers(model=model, model_file=model_file, model_type=model_type, config=config) # type: ignore\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"config\":{\"type\":\"dict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"{\\\"top_k\\\":40,\\\"top_p\\\":0.95,\\\"temperature\\\":0.8,\\\"repetition_penalty\\\":1.1,\\\"last_n_tokens\\\":64,\\\"seed\\\":-1,\\\"max_new_tokens\\\":256,\\\"stop\\\":\\\"\\\",\\\"stream\\\":\\\"False\\\",\\\"reset\\\":\\\"True\\\",\\\"batch_size\\\":8,\\\"threads\\\":-1,\\\"context_length\\\":-1,\\\"gpu_layers\\\":0}\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"config\",\"display_name\":\"Config\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model\",\"display_name\":\"Model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"model_type\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_type\",\"display_name\":\"Model Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"C Transformers LLM models\",\"base_classes\":[\"BaseLanguageModel\",\"Runnable\",\"Generic\",\"CTransformers\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseLLM\",\"LLM\"],\"display_name\":\"CTransformers\",\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/ctransformers\",\"custom_fields\":{\"model\":null,\"model_file\":null,\"model_type\":null,\"config\":null},\"output_types\":[\"CTransformers\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"HuggingFaceEndpointsSpecs\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langchain.llms.base import BaseLLM\\nfrom langchain.llms.huggingface_endpoint import HuggingFaceEndpoint\\nfrom langflow import CustomComponent\\n\\n\\nclass HuggingFaceEndpointsComponent(CustomComponent):\\n display_name: str = \\\"Hugging Face Inference API\\\"\\n description: str = \\\"LLM model from Hugging Face Inference API.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"endpoint_url\\\": {\\\"display_name\\\": \\\"Endpoint URL\\\", \\\"password\\\": True},\\n \\\"task\\\": {\\n \\\"display_name\\\": \\\"Task\\\",\\n \\\"options\\\": [\\\"text2text-generation\\\", \\\"text-generation\\\", \\\"summarization\\\"],\\n },\\n \\\"huggingfacehub_api_token\\\": {\\\"display_name\\\": \\\"API token\\\", \\\"password\\\": True},\\n \\\"model_kwargs\\\": {\\n \\\"display_name\\\": \\\"Model Keyword Arguments\\\",\\n \\\"field_type\\\": \\\"code\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n endpoint_url: str,\\n task: str = \\\"text2text-generation\\\",\\n huggingfacehub_api_token: Optional[str] = None,\\n model_kwargs: Optional[dict] = None,\\n ) -> BaseLLM:\\n try:\\n output = HuggingFaceEndpoint( # type: ignore\\n endpoint_url=endpoint_url,\\n task=task,\\n huggingfacehub_api_token=huggingfacehub_api_token,\\n model_kwargs=model_kwargs or {},\\n )\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to HuggingFace Endpoints API.\\\") from e\\n return output\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"endpoint_url\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"endpoint_url\",\"display_name\":\"Endpoint URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"huggingfacehub_api_token\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"huggingfacehub_api_token\",\"display_name\":\"API token\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"model_kwargs\":{\"type\":\"code\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_kwargs\",\"display_name\":\"Model Keyword Arguments\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"task\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"text2text-generation\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"text2text-generation\",\"text-generation\",\"summarization\"],\"name\":\"task\",\"display_name\":\"Task\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"LLM model from Hugging Face Inference API.\",\"base_classes\":[\"BaseLanguageModel\",\"Runnable\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseLLM\"],\"display_name\":\"Hugging Face Inference API\",\"documentation\":\"\",\"custom_fields\":{\"endpoint_url\":null,\"task\":null,\"huggingfacehub_api_token\":null,\"model_kwargs\":null},\"output_types\":[\"BaseLLM\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"ChatOpenAISpecs\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional, Union\\n\\nfrom langchain.llms import BaseLLM\\nfrom langchain_community.chat_models.openai import ChatOpenAI\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import BaseLanguageModel, NestedDict\\n\\n\\nclass ChatOpenAIComponent(CustomComponent):\\n display_name = \\\"ChatOpenAI\\\"\\n description = \\\"`OpenAI` Chat large language models API.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"max_tokens\\\": {\\n \\\"display_name\\\": \\\"Max Tokens\\\",\\n \\\"advanced\\\": False,\\n \\\"required\\\": False,\\n },\\n \\\"model_kwargs\\\": {\\n \\\"display_name\\\": \\\"Model Kwargs\\\",\\n \\\"advanced\\\": True,\\n \\\"required\\\": False,\\n },\\n \\\"model_name\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"advanced\\\": False,\\n \\\"required\\\": False,\\n \\\"options\\\": [\\n \\\"gpt-4-turbo-preview\\\",\\n \\\"gpt-4-0125-preview\\\",\\n \\\"gpt-4-1106-preview\\\",\\n \\\"gpt-4-vision-preview\\\",\\n \\\"gpt-3.5-turbo-0125\\\",\\n \\\"gpt-3.5-turbo-1106\\\",\\n ],\\n },\\n \\\"openai_api_base\\\": {\\n \\\"display_name\\\": \\\"OpenAI API Base\\\",\\n \\\"advanced\\\": False,\\n \\\"required\\\": False,\\n \\\"info\\\": (\\n \\\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\\\n\\\\n\\\"\\n \\\"You can change this to use other APIs like JinaChat, LocalAI and Prem.\\\"\\n ),\\n },\\n \\\"openai_api_key\\\": {\\n \\\"display_name\\\": \\\"OpenAI API Key\\\",\\n \\\"advanced\\\": False,\\n \\\"required\\\": False,\\n \\\"password\\\": True,\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"advanced\\\": False,\\n \\\"required\\\": False,\\n \\\"value\\\": 0.7,\\n },\\n }\\n\\n def build(\\n self,\\n max_tokens: Optional[int] = 256,\\n model_kwargs: NestedDict = {},\\n model_name: str = \\\"gpt-4-1106-preview\\\",\\n openai_api_base: Optional[str] = None,\\n openai_api_key: Optional[str] = None,\\n temperature: float = 0.7,\\n ) -> Union[BaseLanguageModel, BaseLLM]:\\n if not openai_api_base:\\n openai_api_base = \\\"https://api.openai.com/v1\\\"\\n return ChatOpenAI(\\n max_tokens=max_tokens,\\n model_kwargs=model_kwargs,\\n model=model_name,\\n base_url=openai_api_base,\\n api_key=openai_api_key,\\n temperature=temperature,\\n )\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"max_tokens\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":256,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"max_tokens\",\"display_name\":\"Max Tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model_kwargs\":{\"type\":\"NestedDict\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":{},\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model_kwargs\",\"display_name\":\"Model Kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"model_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"gpt-4-1106-preview\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"gpt-4-turbo-preview\",\"gpt-4-0125-preview\",\"gpt-4-1106-preview\",\"gpt-4-vision-preview\",\"gpt-3.5-turbo-0125\",\"gpt-3.5-turbo-1106\"],\"name\":\"model_name\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"openai_api_base\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"openai_api_key\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.7,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"`OpenAI` Chat large language models API.\",\"base_classes\":[\"BaseLanguageModel\",\"Runnable\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseLLM\"],\"display_name\":\"ChatOpenAI\",\"documentation\":\"\",\"custom_fields\":{\"max_tokens\":null,\"model_kwargs\":null,\"model_name\":null,\"openai_api_base\":null,\"openai_api_key\":null,\"temperature\":null},\"output_types\":[\"BaseLanguageModel\",\"BaseLLM\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"OllamaLLMSpecs\":{\"template\":{\"base_url\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"base_url\",\"display_name\":\"Base URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"Endpoint of the Ollama API. Defaults to 'http://localhost:11434' if not specified.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import List, Optional\\n\\nfrom langchain.llms.base import BaseLLM\\nfrom langchain_community.llms.ollama import Ollama\\n\\nfrom langflow import CustomComponent\\n\\n\\nclass OllamaLLM(CustomComponent):\\n display_name = \\\"Ollama\\\"\\n description = \\\"Local LLM with Ollama.\\\"\\n\\n def build_config(self) -> dict:\\n return {\\n \\\"base_url\\\": {\\n \\\"display_name\\\": \\\"Base URL\\\",\\n \\\"info\\\": \\\"Endpoint of the Ollama API. Defaults to 'http://localhost:11434' if not specified.\\\",\\n },\\n \\\"model\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"value\\\": \\\"llama2\\\",\\n \\\"info\\\": \\\"Refer to https://ollama.ai/library for more models.\\\",\\n },\\n \\\"temperature\\\": {\\n \\\"display_name\\\": \\\"Temperature\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"value\\\": 0.8,\\n \\\"info\\\": \\\"Controls the creativity of model responses.\\\",\\n },\\n \\\"mirostat\\\": {\\n \\\"display_name\\\": \\\"Mirostat\\\",\\n \\\"options\\\": [\\\"Disabled\\\", \\\"Mirostat\\\", \\\"Mirostat 2.0\\\"],\\n \\\"info\\\": \\\"Enable/disable Mirostat sampling for controlling perplexity.\\\",\\n \\\"value\\\": \\\"Disabled\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"mirostat_eta\\\": {\\n \\\"display_name\\\": \\\"Mirostat Eta\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Learning rate influencing the algorithm's response to feedback.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"mirostat_tau\\\": {\\n \\\"display_name\\\": \\\"Mirostat Tau\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Controls balance between coherence and diversity.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"num_ctx\\\": {\\n \\\"display_name\\\": \\\"Context Window Size\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"Size of the context window for generating the next token.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"num_gpu\\\": {\\n \\\"display_name\\\": \\\"Number of GPUs\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"Number of GPUs to use for computation.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"num_thread\\\": {\\n \\\"display_name\\\": \\\"Number of Threads\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"Number of threads to use during computation.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"repeat_last_n\\\": {\\n \\\"display_name\\\": \\\"Repeat Last N\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"Sets how far back the model looks to prevent repetition.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"repeat_penalty\\\": {\\n \\\"display_name\\\": \\\"Repeat Penalty\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Penalty for repetitions in generated text.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"stop\\\": {\\n \\\"display_name\\\": \\\"Stop Tokens\\\",\\n \\\"info\\\": \\\"List of tokens to signal the model to stop generating text.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"tfs_z\\\": {\\n \\\"display_name\\\": \\\"TFS Z\\\",\\n \\\"field_type\\\": \\\"float\\\",\\n \\\"info\\\": \\\"Tail free sampling to reduce impact of less probable tokens.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"top_k\\\": {\\n \\\"display_name\\\": \\\"Top K\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"Limits token selection to top K for reducing nonsense generation.\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"top_p\\\": {\\n \\\"display_name\\\": \\\"Top P\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"Works with top-k to control diversity of generated text.\\\",\\n \\\"advanced\\\": True,\\n },\\n }\\n\\n def build(\\n self,\\n base_url: Optional[str],\\n model: str,\\n temperature: Optional[float],\\n mirostat: Optional[str],\\n mirostat_eta: Optional[float] = None,\\n mirostat_tau: Optional[float] = None,\\n num_ctx: Optional[int] = None,\\n num_gpu: Optional[int] = None,\\n num_thread: Optional[int] = None,\\n repeat_last_n: Optional[int] = None,\\n repeat_penalty: Optional[float] = None,\\n stop: Optional[List[str]] = None,\\n tfs_z: Optional[float] = None,\\n top_k: Optional[int] = None,\\n top_p: Optional[int] = None,\\n ) -> BaseLLM:\\n if not base_url:\\n base_url = \\\"http://localhost:11434\\\"\\n\\n # Mapping mirostat settings to their corresponding values\\n mirostat_options = {\\\"Mirostat\\\": 1, \\\"Mirostat 2.0\\\": 2}\\n\\n # Default to 0 for 'Disabled'\\n mirostat_value = mirostat_options.get(mirostat, 0) # type: ignore\\n\\n # Set mirostat_eta and mirostat_tau to None if mirostat is disabled\\n if mirostat_value == 0:\\n mirostat_eta = None\\n mirostat_tau = None\\n\\n try:\\n llm = Ollama(\\n base_url=base_url,\\n model=model,\\n mirostat=mirostat_value,\\n mirostat_eta=mirostat_eta,\\n mirostat_tau=mirostat_tau,\\n num_ctx=num_ctx,\\n num_gpu=num_gpu,\\n num_thread=num_thread,\\n repeat_last_n=repeat_last_n,\\n repeat_penalty=repeat_penalty,\\n temperature=temperature,\\n stop=stop,\\n tfs_z=tfs_z,\\n top_k=top_k,\\n top_p=top_p,\\n )\\n\\n except Exception as e:\\n raise ValueError(\\\"Could not connect to Ollama.\\\") from e\\n\\n return llm\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"mirostat\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"Disabled\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"Disabled\",\"Mirostat\",\"Mirostat 2.0\"],\"name\":\"mirostat\",\"display_name\":\"Mirostat\",\"advanced\":true,\"dynamic\":false,\"info\":\"Enable/disable Mirostat sampling for controlling perplexity.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"mirostat_eta\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"mirostat_eta\",\"display_name\":\"Mirostat Eta\",\"advanced\":true,\"dynamic\":false,\"info\":\"Learning rate influencing the algorithm's response to feedback.\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"mirostat_tau\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"mirostat_tau\",\"display_name\":\"Mirostat Tau\",\"advanced\":true,\"dynamic\":false,\"info\":\"Controls balance between coherence and diversity.\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"model\":{\"type\":\"str\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"llama2\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"model\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"Refer to https://ollama.ai/library for more models.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"num_ctx\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"num_ctx\",\"display_name\":\"Context Window Size\",\"advanced\":true,\"dynamic\":false,\"info\":\"Size of the context window for generating the next token.\",\"title_case\":false},\"num_gpu\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"num_gpu\",\"display_name\":\"Number of GPUs\",\"advanced\":true,\"dynamic\":false,\"info\":\"Number of GPUs to use for computation.\",\"title_case\":false},\"num_thread\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"num_thread\",\"display_name\":\"Number of Threads\",\"advanced\":true,\"dynamic\":false,\"info\":\"Number of threads to use during computation.\",\"title_case\":false},\"repeat_last_n\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"repeat_last_n\",\"display_name\":\"Repeat Last N\",\"advanced\":true,\"dynamic\":false,\"info\":\"Sets how far back the model looks to prevent repetition.\",\"title_case\":false},\"repeat_penalty\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"repeat_penalty\",\"display_name\":\"Repeat Penalty\",\"advanced\":true,\"dynamic\":false,\"info\":\"Penalty for repetitions in generated text.\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"stop\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"stop\",\"display_name\":\"Stop Tokens\",\"advanced\":true,\"dynamic\":false,\"info\":\"List of tokens to signal the model to stop generating text.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"temperature\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":0.8,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"temperature\",\"display_name\":\"Temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"Controls the creativity of model responses.\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"tfs_z\":{\"type\":\"float\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"tfs_z\",\"display_name\":\"TFS Z\",\"advanced\":true,\"dynamic\":false,\"info\":\"Tail free sampling to reduce impact of less probable tokens.\",\"rangeSpec\":{\"min\":-1.0,\"max\":1.0,\"step\":0.1},\"title_case\":false},\"top_k\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_k\",\"display_name\":\"Top K\",\"advanced\":true,\"dynamic\":false,\"info\":\"Limits token selection to top K for reducing nonsense generation.\",\"title_case\":false},\"top_p\":{\"type\":\"int\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"top_p\",\"display_name\":\"Top P\",\"advanced\":true,\"dynamic\":false,\"info\":\"Works with top-k to control diversity of generated text.\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Local LLM with Ollama.\",\"base_classes\":[\"BaseLanguageModel\",\"Runnable\",\"Generic\",\"RunnableSerializable\",\"Serializable\",\"object\",\"BaseLLM\"],\"display_name\":\"Ollama\",\"documentation\":\"\",\"custom_fields\":{\"base_url\":null,\"model\":null,\"temperature\":null,\"mirostat\":null,\"mirostat_eta\":null,\"mirostat_tau\":null,\"num_ctx\":null,\"num_gpu\":null,\"num_thread\":null,\"repeat_last_n\":null,\"repeat_penalty\":null,\"stop\":null,\"tfs_z\":null,\"top_k\":null,\"top_p\":null},\"output_types\":[\"BaseLLM\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true}},\"io\":{\"ChatOutput\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional, Union\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Text\\nfrom langflow.schema import Record\\n\\n\\nclass ChatOutput(CustomComponent):\\n display_name = \\\"Chat Output\\\"\\n description = \\\"Used to send a message to the chat.\\\"\\n\\n field_config = {\\n \\\"code\\\": {\\n \\\"show\\\": True,\\n }\\n }\\n\\n def build_config(self):\\n return {\\n \\\"message\\\": {\\\"input_types\\\": [\\\"Text\\\"], \\\"display_name\\\": \\\"Message\\\"},\\n \\\"sender\\\": {\\n \\\"options\\\": [\\\"Machine\\\", \\\"User\\\"],\\n \\\"display_name\\\": \\\"Sender Type\\\",\\n },\\n \\\"sender_name\\\": {\\\"display_name\\\": \\\"Sender Name\\\"},\\n \\\"session_id\\\": {\\n \\\"display_name\\\": \\\"Session ID\\\",\\n \\\"info\\\": \\\"Session ID of the chat history.\\\",\\n \\\"input_types\\\": [\\\"Text\\\"],\\n },\\n \\\"return_record\\\": {\\n \\\"display_name\\\": \\\"Return Record\\\",\\n \\\"info\\\": \\\"Return the message as a record containing the sender, sender_name, and session_id.\\\",\\n },\\n }\\n\\n def build(\\n self,\\n sender: Optional[str] = \\\"Machine\\\",\\n sender_name: Optional[str] = \\\"AI\\\",\\n session_id: Optional[str] = None,\\n message: Optional[str] = None,\\n return_record: Optional[bool] = False,\\n ) -> Union[Text, Record]:\\n if return_record:\\n if isinstance(message, Record):\\n # Update the data of the record\\n message.data[\\\"sender\\\"] = sender\\n message.data[\\\"sender_name\\\"] = sender_name\\n message.data[\\\"session_id\\\"] = session_id\\n else:\\n message = Record(\\n text=message,\\n data={\\n \\\"sender\\\": sender,\\n \\\"sender_name\\\": sender_name,\\n \\\"session_id\\\": session_id,\\n },\\n )\\n if not message:\\n message = \\\"\\\"\\n self.status = message\\n return message\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"message\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"message\",\"display_name\":\"Message\",\"advanced\":false,\"input_types\":[\"Text\",\"Text\"],\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"return_record\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_record\",\"display_name\":\"Return Record\",\"advanced\":false,\"dynamic\":false,\"info\":\"Return the message as a record containing the sender, sender_name, and session_id.\",\"title_case\":false},\"sender\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"Machine\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"Machine\",\"User\"],\"name\":\"sender\",\"display_name\":\"Sender Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"sender_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"AI\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"sender_name\",\"display_name\":\"Sender Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"session_id\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"session_id\",\"display_name\":\"Session ID\",\"advanced\":false,\"input_types\":[\"Text\",\"Text\"],\"dynamic\":false,\"info\":\"Session ID of the chat history.\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Used to send a message to the chat.\",\"base_classes\":[\"Text\",\"object\",\"Record\"],\"display_name\":\"Chat Output\",\"documentation\":\"\",\"custom_fields\":{\"sender\":null,\"sender_name\":null,\"session_id\":null,\"message\":null,\"return_record\":null},\"output_types\":[\"Text\",\"Record\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"MessageHistory\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import List, Optional\\n\\nfrom langflow import CustomComponent\\nfrom langflow.memory import get_messages\\nfrom langflow.schema import Record\\n\\n\\nclass MessageHistoryComponent(CustomComponent):\\n display_name = \\\"Message History\\\"\\n description = \\\"Used to retrieve stored messages.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"sender\\\": {\\n \\\"options\\\": [\\\"Machine\\\", \\\"User\\\"],\\n \\\"display_name\\\": \\\"Sender Type\\\",\\n },\\n \\\"sender_name\\\": {\\\"display_name\\\": \\\"Sender Name\\\"},\\n \\\"file_path\\\": {\\n \\\"display_name\\\": \\\"File Path\\\",\\n \\\"info\\\": \\\"Path of the local JSON file to store the messages. It should be a unique path for each chat history.\\\",\\n },\\n \\\"n_messages\\\": {\\n \\\"display_name\\\": \\\"Number of Messages\\\",\\n \\\"info\\\": \\\"Number of messages to retrieve.\\\",\\n },\\n \\\"session_id\\\": {\\n \\\"display_name\\\": \\\"Session ID\\\",\\n \\\"info\\\": \\\"Session ID of the chat history.\\\",\\n \\\"input_types\\\": [\\\"Text\\\"],\\n },\\n }\\n\\n def build(\\n self,\\n sender: Optional[str] = None,\\n sender_name: Optional[str] = None,\\n session_id: Optional[str] = None,\\n n_messages: int = 5,\\n ) -> List[Record]:\\n messages = get_messages(\\n sender=sender,\\n sender_name=sender_name,\\n session_id=session_id,\\n limit=n_messages,\\n )\\n self.status = messages\\n return messages\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"n_messages\":{\"type\":\"int\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":5,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"n_messages\",\"display_name\":\"Number of Messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"Number of messages to retrieve.\",\"title_case\":false},\"sender\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"Machine\",\"User\"],\"name\":\"sender\",\"display_name\":\"Sender Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"sender_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"sender_name\",\"display_name\":\"Sender Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"session_id\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"session_id\",\"display_name\":\"Session ID\",\"advanced\":false,\"input_types\":[\"Text\",\"Text\"],\"dynamic\":false,\"info\":\"Session ID of the chat history.\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"Used to retrieve stored messages.\",\"base_classes\":[\"Record\"],\"display_name\":\"Message History\",\"documentation\":\"\",\"custom_fields\":{\"sender\":null,\"sender_name\":null,\"session_id\":null,\"n_messages\":null},\"output_types\":[\"Record\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"TextOutput\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Text\\n\\n\\nclass TextOutput(CustomComponent):\\n display_name = \\\"Text Output\\\"\\n description = \\\"Used to pass text output to the next component.\\\"\\n\\n field_config = {\\n \\\"value\\\": {\\\"display_name\\\": \\\"Value\\\"},\\n }\\n\\n def build(self, value: Optional[str] = \\\"\\\") -> Text:\\n self.status = value\\n if not value:\\n value = \\\"\\\"\\n return value\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"value\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"value\",\"display_name\":\"Value\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Used to pass text output to the next component.\",\"base_classes\":[\"Text\",\"object\"],\"display_name\":\"Text Output\",\"documentation\":\"\",\"custom_fields\":{\"value\":null},\"output_types\":[\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"StoreMessages\":{\"template\":{\"records\":{\"type\":\"Record\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"records\",\"display_name\":\"Records\",\"advanced\":false,\"dynamic\":false,\"info\":\"The list of records to store. Each record should contain the keys 'sender', 'sender_name', and 'session_id'.\",\"title_case\":false},\"texts\":{\"type\":\"Text\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"texts\",\"display_name\":\"Texts\",\"advanced\":false,\"dynamic\":false,\"info\":\"The list of texts to store. If records is not provided, texts must be provided.\",\"title_case\":false},\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import List, Optional\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Text\\nfrom langflow.memory import add_messages\\nfrom langflow.schema import Record\\n\\n\\nclass StoreMessages(CustomComponent):\\n display_name = \\\"Store Messages\\\"\\n description = \\\"Used to store messages.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"records\\\": {\\n \\\"display_name\\\": \\\"Records\\\",\\n \\\"info\\\": \\\"The list of records to store. Each record should contain the keys 'sender', 'sender_name', and 'session_id'.\\\",\\n },\\n \\\"texts\\\": {\\n \\\"display_name\\\": \\\"Texts\\\",\\n \\\"info\\\": \\\"The list of texts to store. If records is not provided, texts must be provided.\\\",\\n },\\n \\\"session_id\\\": {\\n \\\"display_name\\\": \\\"Session ID\\\",\\n \\\"info\\\": \\\"The session ID to store.\\\",\\n },\\n \\\"sender\\\": {\\n \\\"display_name\\\": \\\"Sender\\\",\\n \\\"info\\\": \\\"The sender to store.\\\",\\n },\\n \\\"sender_name\\\": {\\n \\\"display_name\\\": \\\"Sender Name\\\",\\n \\\"info\\\": \\\"The sender name to store.\\\",\\n },\\n }\\n\\n def build(\\n self,\\n records: Optional[List[Record]] = None,\\n texts: Optional[List[Text]] = None,\\n session_id: Optional[str] = None,\\n sender: Optional[str] = None,\\n sender_name: Optional[str] = None,\\n ) -> List[Record]:\\n # Records is the main way to store messages\\n # If records is not provided, we can use texts\\n # but we need to create the records from the texts\\n # and the other parameters\\n if not texts and not records:\\n raise ValueError(\\\"Either texts or records must be provided.\\\")\\n\\n if not records:\\n records = []\\n if not session_id or not sender or not sender_name:\\n raise ValueError(\\\"If passing texts, session_id, sender, and sender_name must be provided.\\\")\\n for text in texts:\\n record = Record(\\n text=text,\\n data={\\n \\\"session_id\\\": session_id,\\n \\\"sender\\\": sender,\\n \\\"sender_name\\\": sender_name,\\n },\\n )\\n records.append(record)\\n elif isinstance(records, Record):\\n records = [records]\\n\\n self.status = records\\n records = add_messages(records)\\n return records\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"sender\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"sender\",\"display_name\":\"Sender\",\"advanced\":false,\"dynamic\":false,\"info\":\"The sender to store.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"sender_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"sender_name\",\"display_name\":\"Sender Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"The sender name to store.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"session_id\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"session_id\",\"display_name\":\"Session ID\",\"advanced\":false,\"dynamic\":false,\"info\":\"The session ID to store.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Used to store messages.\",\"base_classes\":[\"Record\"],\"display_name\":\"Store Messages\",\"documentation\":\"\",\"custom_fields\":{\"records\":null,\"texts\":null,\"session_id\":null,\"sender\":null,\"sender_name\":null},\"output_types\":[\"Record\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"ChatInput\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langflow import CustomComponent\\nfrom langflow.schema import Record\\n\\n\\nclass ChatInput(CustomComponent):\\n display_name = \\\"Chat Input\\\"\\n description = \\\"Used to get user input from the chat.\\\"\\n\\n def build_config(self):\\n return {\\n \\\"message\\\": {\\n \\\"input_types\\\": [\\\"Text\\\"],\\n \\\"display_name\\\": \\\"Message\\\",\\n \\\"multiline\\\": True,\\n },\\n \\\"sender\\\": {\\n \\\"options\\\": [\\\"Machine\\\", \\\"User\\\"],\\n \\\"display_name\\\": \\\"Sender Type\\\",\\n },\\n \\\"sender_name\\\": {\\\"display_name\\\": \\\"Sender Name\\\"},\\n \\\"session_id\\\": {\\n \\\"display_name\\\": \\\"Session ID\\\",\\n \\\"info\\\": \\\"Session ID of the chat history.\\\",\\n },\\n \\\"return_record\\\": {\\n \\\"display_name\\\": \\\"Return Record\\\",\\n \\\"info\\\": \\\"Return the message as a record containing the sender, sender_name, and session_id.\\\",\\n },\\n }\\n\\n def build(\\n self,\\n sender: Optional[str] = \\\"User\\\",\\n sender_name: Optional[str] = \\\"User\\\",\\n message: Optional[str] = None,\\n session_id: Optional[str] = None,\\n return_record: Optional[bool] = False,\\n ) -> Record:\\n if return_record:\\n if isinstance(message, Record):\\n # Update the data of the record\\n message.data[\\\"sender\\\"] = sender\\n message.data[\\\"sender_name\\\"] = sender_name\\n message.data[\\\"session_id\\\"] = session_id\\n else:\\n message = Record(\\n text=message,\\n data={\\n \\\"sender\\\": sender,\\n \\\"sender_name\\\": sender_name,\\n \\\"session_id\\\": session_id,\\n },\\n )\\n if not message:\\n message = \\\"\\\"\\n self.status = message\\n return message\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"message\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"message\",\"display_name\":\"Message\",\"advanced\":false,\"input_types\":[\"Text\",\"Text\"],\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"return_record\":{\"type\":\"bool\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"return_record\",\"display_name\":\"Return Record\",\"advanced\":false,\"dynamic\":false,\"info\":\"Return the message as a record containing the sender, sender_name, and session_id.\",\"title_case\":false},\"sender\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":true,\"show\":true,\"multiline\":false,\"value\":\"User\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"options\":[\"Machine\",\"User\"],\"name\":\"sender\",\"display_name\":\"Sender Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"sender_name\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"User\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"sender_name\",\"display_name\":\"Sender Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"session_id\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"session_id\",\"display_name\":\"Session ID\",\"advanced\":false,\"dynamic\":false,\"info\":\"Session ID of the chat history.\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Used to get user input from the chat.\",\"base_classes\":[\"Record\"],\"display_name\":\"Chat Input\",\"documentation\":\"\",\"custom_fields\":{\"sender\":null,\"sender_name\":null,\"message\":null,\"session_id\":null,\"return_record\":null},\"output_types\":[\"Record\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true},\"TextInput\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from typing import Optional\\n\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Text\\n\\n\\nclass TextInput(CustomComponent):\\n display_name = \\\"Text Input\\\"\\n description = \\\"Used to pass text input to the next component.\\\"\\n\\n field_config = {\\n \\\"value\\\": {\\\"display_name\\\": \\\"Value\\\"},\\n }\\n\\n def build(self, value: Optional[str] = \\\"\\\") -> Text:\\n self.status = value\\n if not value:\\n value = \\\"\\\"\\n return value\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"value\":{\"type\":\"str\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"value\",\"display_name\":\"Value\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"title_case\":false,\"input_types\":[\"Text\"]},\"_type\":\"CustomComponent\"},\"description\":\"Used to pass text input to the next component.\",\"base_classes\":[\"Text\",\"object\"],\"display_name\":\"Text Input\",\"documentation\":\"\",\"custom_fields\":{\"value\":null},\"output_types\":[\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true}},\"prompts\":{\"Prompt\":{\"template\":{\"code\":{\"type\":\"code\",\"required\":true,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":true,\"value\":\"from langchain_core.prompts import PromptTemplate\\nfrom langflow import CustomComponent\\nfrom langflow.field_typing import Prompt, TemplateField, Text\\n\\n\\nclass PromptComponent(CustomComponent):\\n display_name: str = \\\"Prompt\\\"\\n description: str = \\\"A component for creating prompts using templates\\\"\\n beta = True\\n\\n def build_config(self):\\n return {\\n \\\"template\\\": TemplateField(display_name=\\\"Template\\\"),\\n \\\"code\\\": TemplateField(advanced=True),\\n }\\n\\n def build(\\n self,\\n template: Prompt,\\n **kwargs,\\n ) -> Text:\\n prompt_template = PromptTemplate.from_template(template)\\n\\n attributes_to_check = [\\\"text\\\", \\\"page_content\\\"]\\n for key, value in kwargs.items():\\n for attribute in attributes_to_check:\\n if hasattr(value, attribute):\\n kwargs[key] = getattr(value, attribute)\\n\\n try:\\n formated_prompt = prompt_template.format(**kwargs)\\n except Exception as exc:\\n raise ValueError(f\\\"Error formatting prompt: {exc}\\\") from exc\\n self.status = f'Prompt: \\\"{formated_prompt}\\\"'\\n return formated_prompt\\n\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"code\",\"advanced\":true,\"dynamic\":true,\"info\":\"\",\"title_case\":false},\"template\":{\"type\":\"prompt\",\"required\":false,\"placeholder\":\"\",\"list\":false,\"show\":true,\"multiline\":false,\"value\":\"\",\"fileTypes\":[],\"file_path\":\"\",\"password\":false,\"name\":\"template\",\"display_name\":\"Template\",\"advanced\":false,\"input_types\":[\"Text\"],\"dynamic\":false,\"info\":\"\",\"title_case\":false},\"_type\":\"CustomComponent\"},\"description\":\"A component for creating prompts using templates\",\"base_classes\":[\"Text\",\"object\"],\"display_name\":\"Prompt\",\"documentation\":\"\",\"custom_fields\":{\"template\":null},\"output_types\":[\"Text\"],\"field_formatters\":{},\"pinned\":false,\"beta\":true}}}" }, - "cache": {}, - "timings": { "send": -1, "wait": -1, "receive": 0.743 } - } - ] - } - } \ No newline at end of file + "headersSize": -1, + "bodySize": -1, + "redirectURL": "" + }, + "cache": {}, + "timings": { "send": -1, "wait": -1, "receive": 2.771 } + }, + { + "startedDateTime": "2024-02-28T14:32:30.976Z", + "time": 0.753, + "request": { + "method": "GET", + "url": "http://localhost:3000/api/v1/store/check/", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Accept", "value": "application/json, text/plain, */*" }, + { "name": "Accept-Encoding", "value": "gzip, deflate, br, zstd" }, + { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, + { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Connection", "value": "keep-alive" }, + { "name": "Cookie", "value": "access_token_lf=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Host", "value": "localhost:3000" }, + { "name": "Referer", "value": "http://localhost:3000/" }, + { "name": "Sec-Fetch-Dest", "value": "empty" }, + { "name": "Sec-Fetch-Mode", "value": "cors" }, + { "name": "Sec-Fetch-Site", "value": "same-origin" }, + { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36" }, + { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"123\", \"Not:A-Brand\";v=\"8\"" }, + { "name": "sec-ch-ua-mobile", "value": "?0" }, + { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } + ], + "queryString": [], + "headersSize": -1, + "bodySize": -1 + }, + "response": { + "status": 200, + "statusText": "OK", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Access-Control-Allow-Origin", "value": "*" }, + { "name": "connection", "value": "close" }, + { "name": "content-length", "value": "16" }, + { "name": "content-type", "value": "application/json" }, + { "name": "date", "value": "Wed, 28 Feb 2024 14:32:30 GMT" }, + { "name": "server", "value": "uvicorn" } + ], + "content": { + "size": -1, + "mimeType": "application/json", + "text": "{\"enabled\":true}" + }, + "headersSize": -1, + "bodySize": -1, + "redirectURL": "" + }, + "cache": {}, + "timings": { "send": -1, "wait": -1, "receive": 0.753 } + }, + { + "startedDateTime": "2024-02-28T14:32:30.976Z", + "time": 0.525, + "request": { + "method": "GET", + "url": "http://localhost:3000/api/v1/store/check/api_key", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Accept", "value": "application/json, text/plain, */*" }, + { "name": "Accept-Encoding", "value": "gzip, deflate, br, zstd" }, + { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, + { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Connection", "value": "keep-alive" }, + { "name": "Cookie", "value": "access_token_lf=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Host", "value": "localhost:3000" }, + { "name": "Referer", "value": "http://localhost:3000/" }, + { "name": "Sec-Fetch-Dest", "value": "empty" }, + { "name": "Sec-Fetch-Mode", "value": "cors" }, + { "name": "Sec-Fetch-Site", "value": "same-origin" }, + { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36" }, + { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"123\", \"Not:A-Brand\";v=\"8\"" }, + { "name": "sec-ch-ua-mobile", "value": "?0" }, + { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } + ], + "queryString": [], + "headersSize": -1, + "bodySize": -1 + }, + "response": { + "status": 200, + "statusText": "OK", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Access-Control-Allow-Origin", "value": "*" }, + { "name": "connection", "value": "close" }, + { "name": "content-length", "value": "38" }, + { "name": "content-type", "value": "application/json" }, + { "name": "date", "value": "Wed, 28 Feb 2024 14:32:30 GMT" }, + { "name": "server", "value": "uvicorn" } + ], + "content": { + "size": -1, + "mimeType": "application/json", + "text": "{\"has_api_key\":false,\"is_valid\":false}" + }, + "headersSize": -1, + "bodySize": -1, + "redirectURL": "" + }, + "cache": {}, + "timings": { "send": -1, "wait": -1, "receive": 0.525 } + }, + { + "startedDateTime": "2024-02-28T14:32:30.976Z", + "time": 0.658, + "request": { + "method": "GET", + "url": "http://localhost:3000/api/v1/auto_login", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Accept", "value": "application/json, text/plain, */*" }, + { "name": "Accept-Encoding", "value": "gzip, deflate, br, zstd" }, + { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, + { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Connection", "value": "keep-alive" }, + { "name": "Cookie", "value": "access_token_lf=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Host", "value": "localhost:3000" }, + { "name": "Referer", "value": "http://localhost:3000/" }, + { "name": "Sec-Fetch-Dest", "value": "empty" }, + { "name": "Sec-Fetch-Mode", "value": "cors" }, + { "name": "Sec-Fetch-Site", "value": "same-origin" }, + { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36" }, + { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"123\", \"Not:A-Brand\";v=\"8\"" }, + { "name": "sec-ch-ua-mobile", "value": "?0" }, + { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } + ], + "queryString": [], + "headersSize": -1, + "bodySize": -1 + }, + "response": { + "status": 200, + "statusText": "OK", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Access-Control-Allow-Origin", "value": "*" }, + { "name": "connection", "value": "close" }, + { "name": "content-length", "value": "227" }, + { "name": "content-type", "value": "application/json" }, + { "name": "date", "value": "Wed, 28 Feb 2024 14:32:30 GMT" }, + { "name": "server", "value": "uvicorn" }, + { "name": "set-cookie", "value": "access_token_lf=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc; Path=/; SameSite=none; Secure" } + ], + "content": { + "size": -1, + "mimeType": "application/json", + "text": "{\"access_token\":\"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc\",\"refresh_token\":null,\"token_type\":\"bearer\"}" + }, + "headersSize": -1, + "bodySize": -1, + "redirectURL": "" + }, + "cache": {}, + "timings": { "send": -1, "wait": -1, "receive": 0.658 } + }, + { + "startedDateTime": "2024-02-28T14:32:31.023Z", + "time": 0.787, + "request": { + "method": "GET", + "url": "http://localhost:3000/api/v1/users/whoami", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Accept", "value": "application/json, text/plain, */*" }, + { "name": "Accept-Encoding", "value": "gzip, deflate, br, zstd" }, + { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, + { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Connection", "value": "keep-alive" }, + { "name": "Cookie", "value": "access_token_lf=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Host", "value": "localhost:3000" }, + { "name": "Referer", "value": "http://localhost:3000/" }, + { "name": "Sec-Fetch-Dest", "value": "empty" }, + { "name": "Sec-Fetch-Mode", "value": "cors" }, + { "name": "Sec-Fetch-Site", "value": "same-origin" }, + { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36" }, + { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"123\", \"Not:A-Brand\";v=\"8\"" }, + { "name": "sec-ch-ua-mobile", "value": "?0" }, + { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } + ], + "queryString": [], + "headersSize": -1, + "bodySize": -1 + }, + "response": { + "status": 200, + "statusText": "OK", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Access-Control-Allow-Origin", "value": "*" }, + { "name": "connection", "value": "close" }, + { "name": "content-length", "value": "253" }, + { "name": "content-type", "value": "application/json" }, + { "name": "date", "value": "Wed, 28 Feb 2024 14:32:30 GMT" }, + { "name": "server", "value": "uvicorn" } + ], + "content": { + "size": -1, + "mimeType": "application/json", + "text": "{\"id\":\"0389fb29-daa6-408c-b8cb-b8ff8d17343a\",\"username\":\"langflow\",\"profile_image\":null,\"is_active\":true,\"is_superuser\":true,\"create_at\":\"2024-02-28T14:31:41.362911\",\"updated_at\":\"2024-02-28T14:32:30.982826\",\"last_login_at\":\"2024-02-28T14:32:30.982478\"}" + }, + "headersSize": -1, + "bodySize": -1, + "redirectURL": "" + }, + "cache": {}, + "timings": { "send": -1, "wait": -1, "receive": 0.787 } + }, + { + "startedDateTime": "2024-02-28T14:32:32.479Z", + "time": 0.836, + "request": { + "method": "GET", + "url": "http://localhost:3000/api/v1/flows/", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Accept", "value": "application/json, text/plain, */*" }, + { "name": "Accept-Encoding", "value": "gzip, deflate, br, zstd" }, + { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, + { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Connection", "value": "keep-alive" }, + { "name": "Cookie", "value": "access_token_lf=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Host", "value": "localhost:3000" }, + { "name": "Referer", "value": "http://localhost:3000/flows" }, + { "name": "Sec-Fetch-Dest", "value": "empty" }, + { "name": "Sec-Fetch-Mode", "value": "cors" }, + { "name": "Sec-Fetch-Site", "value": "same-origin" }, + { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36" }, + { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"123\", \"Not:A-Brand\";v=\"8\"" }, + { "name": "sec-ch-ua-mobile", "value": "?0" }, + { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } + ], + "queryString": [], + "headersSize": -1, + "bodySize": -1 + }, + "response": { + "status": 200, + "statusText": "OK", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Access-Control-Allow-Origin", "value": "*" }, + { "name": "connection", "value": "close" }, + { "name": "content-length", "value": "2" }, + { "name": "content-type", "value": "application/json" }, + { "name": "date", "value": "Wed, 28 Feb 2024 14:32:31 GMT" }, + { "name": "server", "value": "uvicorn" } + ], + "content": { + "size": -1, + "mimeType": "application/json", + "text": "[]" + }, + "headersSize": -1, + "bodySize": -1, + "redirectURL": "" + }, + "cache": {}, + "timings": { "send": -1, "wait": -1, "receive": 0.836 } + }, + { + "startedDateTime": "2024-02-28T14:32:36.617Z", + "time": 1.679, + "request": { + "method": "POST", + "url": "http://localhost:3000/api/v1/flows/", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Accept", "value": "application/json, text/plain, */*" }, + { "name": "Accept-Encoding", "value": "gzip, deflate, br, zstd" }, + { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, + { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Connection", "value": "keep-alive" }, + { "name": "Content-Length", "value": "170" }, + { "name": "Content-Type", "value": "application/json" }, + { "name": "Cookie", "value": "access_token_lf=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Host", "value": "localhost:3000" }, + { "name": "Origin", "value": "http://localhost:3000" }, + { "name": "Referer", "value": "http://localhost:3000/flows" }, + { "name": "Sec-Fetch-Dest", "value": "empty" }, + { "name": "Sec-Fetch-Mode", "value": "cors" }, + { "name": "Sec-Fetch-Site", "value": "same-origin" }, + { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36" }, + { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"123\", \"Not:A-Brand\";v=\"8\"" }, + { "name": "sec-ch-ua-mobile", "value": "?0" }, + { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } + ], + "queryString": [], + "headersSize": -1, + "bodySize": -1, + "postData": { + "mimeType": "application/json", + "text": "{\"name\":\"Untitled document\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"description\":\"Harness the Power of Conversational AI.\",\"is_component\":false}", + "params": [] + } + }, + "response": { + "status": 201, + "statusText": "Created", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "access-control-allow-credentials", "value": "true" }, + { "name": "access-control-allow-origin", "value": "http://localhost:3000" }, + { "name": "connection", "value": "close" }, + { "name": "content-length", "value": "319" }, + { "name": "content-type", "value": "application/json" }, + { "name": "date", "value": "Wed, 28 Feb 2024 14:32:35 GMT" }, + { "name": "server", "value": "uvicorn" }, + { "name": "vary", "value": "Origin" } + ], + "content": { + "size": -1, + "mimeType": "application/json", + "text": "{\"name\":\"Untitled document\",\"description\":\"Harness the Power of Conversational AI.\",\"data\":{\"nodes\":[],\"edges\":[],\"viewport\":{\"zoom\":1,\"x\":0,\"y\":0}},\"is_component\":false,\"updated_at\":\"2024-02-28T14:32:36.621261\",\"folder\":null,\"id\":\"b3aad40d-cf3b-49d2-804f-bfa33b70beef\",\"user_id\":\"0389fb29-daa6-408c-b8cb-b8ff8d17343a\"}" + }, + "headersSize": -1, + "bodySize": -1, + "redirectURL": "" + }, + "cache": {}, + "timings": { "send": -1, "wait": -1, "receive": 1.679 } + }, + { + "startedDateTime": "2024-02-28T14:32:36.774Z", + "time": 1.023, + "request": { + "method": "GET", + "url": "http://localhost:3000/api/v1/monitor/builds?flow_id=b3aad40d-cf3b-49d2-804f-bfa33b70beef", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Accept", "value": "application/json, text/plain, */*" }, + { "name": "Accept-Encoding", "value": "gzip, deflate, br, zstd" }, + { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, + { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Connection", "value": "keep-alive" }, + { "name": "Cookie", "value": "access_token_lf=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Host", "value": "localhost:3000" }, + { "name": "Referer", "value": "http://localhost:3000/flow/b3aad40d-cf3b-49d2-804f-bfa33b70beef" }, + { "name": "Sec-Fetch-Dest", "value": "empty" }, + { "name": "Sec-Fetch-Mode", "value": "cors" }, + { "name": "Sec-Fetch-Site", "value": "same-origin" }, + { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36" }, + { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"123\", \"Not:A-Brand\";v=\"8\"" }, + { "name": "sec-ch-ua-mobile", "value": "?0" }, + { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } + ], + "queryString": [ + { + "name": "flow_id", + "value": "b3aad40d-cf3b-49d2-804f-bfa33b70beef" + } + ], + "headersSize": -1, + "bodySize": -1 + }, + "response": { + "status": 200, + "statusText": "OK", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Access-Control-Allow-Origin", "value": "*" }, + { "name": "connection", "value": "close" }, + { "name": "content-length", "value": "20" }, + { "name": "content-type", "value": "application/json" }, + { "name": "date", "value": "Wed, 28 Feb 2024 14:32:35 GMT" }, + { "name": "server", "value": "uvicorn" } + ], + "content": { + "size": -1, + "mimeType": "application/json", + "text": "{\"vertex_builds\":{}}" + }, + "headersSize": -1, + "bodySize": -1, + "redirectURL": "" + }, + "cache": {}, + "timings": { "send": -1, "wait": -1, "receive": 1.023 } + }, + { + "startedDateTime": "2024-02-28T14:32:49.526Z", + "time": 0.977, + "request": { + "method": "DELETE", + "url": "http://localhost:3000/api/v1/flows/b3aad40d-cf3b-49d2-804f-bfa33b70beef", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Accept", "value": "application/json, text/plain, */*" }, + { "name": "Accept-Encoding", "value": "gzip, deflate, br, zstd" }, + { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, + { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Connection", "value": "keep-alive" }, + { "name": "Cookie", "value": "access_token_lf=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIwMzg5ZmIyOS1kYWE2LTQwOGMtYjhjYi1iOGZmOGQxNzM0M2EiLCJleHAiOjE3NDA2NjY3NTB9.ef5W5jwNOeVzU3JZ7ylLYf2MLEJcVxC4-fF7EK9Ecdc" }, + { "name": "Host", "value": "localhost:3000" }, + { "name": "Origin", "value": "http://localhost:3000" }, + { "name": "Referer", "value": "http://localhost:3000/flow/b3aad40d-cf3b-49d2-804f-bfa33b70beef" }, + { "name": "Sec-Fetch-Dest", "value": "empty" }, + { "name": "Sec-Fetch-Mode", "value": "cors" }, + { "name": "Sec-Fetch-Site", "value": "same-origin" }, + { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36" }, + { "name": "sec-ch-ua", "value": "\"Chromium\";v=\"123\", \"Not:A-Brand\";v=\"8\"" }, + { "name": "sec-ch-ua-mobile", "value": "?0" }, + { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } + ], + "queryString": [], + "headersSize": -1, + "bodySize": -1 + }, + "response": { + "status": 200, + "statusText": "OK", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "access-control-allow-credentials", "value": "true" }, + { "name": "access-control-allow-origin", "value": "http://localhost:3000" }, + { "name": "connection", "value": "close" }, + { "name": "content-length", "value": "39" }, + { "name": "content-type", "value": "application/json" }, + { "name": "date", "value": "Wed, 28 Feb 2024 14:32:48 GMT" }, + { "name": "server", "value": "uvicorn" }, + { "name": "vary", "value": "Origin" } + ], + "content": { + "size": -1, + "mimeType": "application/json", + "text": "{\"message\":\"Flow deleted successfully\"}" + }, + "headersSize": -1, + "bodySize": -1, + "redirectURL": "" + }, + "cache": {}, + "timings": { "send": -1, "wait": -1, "receive": 0.977 } + } + ] + } +} \ No newline at end of file diff --git a/src/frontend/index.html b/src/frontend/index.html index 8c21f0124..d4b03effb 100644 --- a/src/frontend/index.html +++ b/src/frontend/index.html @@ -1,16 +1,19 @@ - - - - + + + + - + Langflow - - + + -
+
- + diff --git a/src/frontend/package-lock.json b/src/frontend/package-lock.json index 2bfc05143..971c9eee1 100644 --- a/src/frontend/package-lock.json +++ b/src/frontend/package-lock.json @@ -8,12 +8,8 @@ "name": "langflow", "version": "0.1.2", "dependencies": { - "@emotion/react": "^11.11.1", - "@emotion/styled": "^11.11.0", "@headlessui/react": "^1.7.17", - "@heroicons/react": "^2.0.18", - "@mui/material": "^5.14.7", - "@preact/signals-react": "^2.0.0", + "@million/lint": "^0.0.73", "@radix-ui/react-accordion": "^1.1.2", "@radix-ui/react-checkbox": "^1.0.4", "@radix-ui/react-dialog": "^1.0.4", @@ -24,7 +20,7 @@ "@radix-ui/react-menubar": "^1.0.3", "@radix-ui/react-popover": "^1.0.6", "@radix-ui/react-progress": "^1.0.3", - "@radix-ui/react-select": "^1.2.2", + "@radix-ui/react-select": "^2.0.0", "@radix-ui/react-separator": "^1.0.3", "@radix-ui/react-slot": "^1.0.2", "@radix-ui/react-switch": "^1.0.3", @@ -34,31 +30,31 @@ "@tailwindcss/forms": "^0.5.6", "@tailwindcss/line-clamp": "^0.4.4", "@types/axios": "^0.14.0", - "accordion": "^3.0.2", "ace-builds": "^1.24.1", - "add": "^2.0.6", "ansi-to-html": "^0.7.2", "axios": "^1.5.0", "base64-js": "^1.5.1", "class-variance-authority": "^0.6.1", "clsx": "^1.2.1", + "cmdk": "^1.0.0", "dompurify": "^3.0.5", "esbuild": "^0.17.19", + "framer-motion": "^11.0.6", "lodash": "^4.17.21", - "lucide-react": "^0.233.0", + "lucide-react": "^0.331.0", + "million": "^3.0.6", "moment": "^2.29.4", + "playwright": "^1.42.0", "react": "^18.2.0", "react-ace": "^10.1.0", "react-cookie": "^4.1.1", "react-dom": "^18.2.0", "react-error-boundary": "^4.0.11", - "react-icons": "^4.10.1", + "react-icons": "^5.0.1", "react-laag": "^2.0.5", "react-markdown": "^8.0.7", "react-router-dom": "^6.15.0", "react-syntax-highlighter": "^15.5.0", - "react-tabs": "^6.0.2", - "react-tooltip": "^5.21.1", "react18-json-view": "^0.2.3", "reactflow": "^11.9.2", "rehype-mathjax": "^4.0.3", @@ -66,8 +62,6 @@ "remark-math": "^5.1.1", "shadcn-ui": "^0.2.3", "short-unique-id": "^4.4.4", - "switch": "^0.0.0", - "table": "^6.8.1", "tailwind-merge": "^1.14.0", "tailwindcss-animate": "^1.0.7", "uuid": "^9.0.0", @@ -76,7 +70,7 @@ "zustand": "^4.4.7" }, "devDependencies": { - "@playwright/test": "^1.38.0", + "@playwright/test": "^1.42.0", "@swc/cli": "^0.1.62", "@swc/core": "^1.3.80", "@tailwindcss/typography": "^0.5.9", @@ -92,16 +86,29 @@ "@vitejs/plugin-react-swc": "^3.3.2", "autoprefixer": "^10.4.15", "daisyui": "^4.0.4", + "eslint": "^8.57.0", + "eslint-plugin-node": "^11.1.0", "postcss": "^8.4.29", "prettier": "^2.8.8", "prettier-plugin-organize-imports": "^3.2.3", "prettier-plugin-tailwindcss": "^0.3.0", "pretty-quick": "^3.1.3", "tailwindcss": "^3.3.3", + "tailwindcss-dotted-background": "^1.1.0", "typescript": "^5.2.2", + "ua-parser-js": "^1.0.37", "vite": "^4.5.2" } }, + "node_modules/@aashutoshrathi/word-wrap": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz", + "integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/@adobe/css-tools": { "version": "4.3.3", "resolved": "https://registry.npmjs.org/@adobe/css-tools/-/css-tools-4.3.3.tgz", @@ -120,12 +127,12 @@ } }, "node_modules/@ampproject/remapping": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.1.tgz", - "integrity": "sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", "dependencies": { - "@jridgewell/gen-mapping": "^0.3.0", - "@jridgewell/trace-mapping": "^0.3.9" + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" }, "engines": { "node": ">=6.0.0" @@ -146,104 +153,40 @@ } }, "node_modules/@babel/code-frame": { - "version": "7.23.5", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.23.5.tgz", - "integrity": "sha512-CgH3s1a96LipHCmSUmYFPwY7MNx8C3avkq7i4Wl3cfa662ldtUe4VM1TPXX70pfmrlWTb6jLqTYrZyT2ZTJBgA==", + "version": "7.24.2", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.2.tgz", + "integrity": "sha512-y5+tLQyV8pg3fsiln67BVLD1P13Eg4lh5RW9mF0zUuvLrv9uIQ4MCL+CRT+FTsBlBjcIan6PGsLcBN0m3ClUyQ==", "dependencies": { - "@babel/highlight": "^7.23.4", - "chalk": "^2.4.2" + "@babel/highlight": "^7.24.2", + "picocolors": "^1.0.0" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/code-frame/node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dependencies": { - "color-convert": "^1.9.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/code-frame/node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/code-frame/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/@babel/code-frame/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" - }, - "node_modules/@babel/code-frame/node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/@babel/code-frame/node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/code-frame/node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/@babel/compat-data": { - "version": "7.23.5", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.23.5.tgz", - "integrity": "sha512-uU27kfDRlhfKl+w1U6vp16IuvSLtjAxdArVXPa9BvLkrr7CYIsxH5adpHObeAGY/41+syctUWOZ140a2Rvkgjw==", + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.24.1.tgz", + "integrity": "sha512-Pc65opHDliVpRHuKfzI+gSA4zcgr65O4cl64fFJIWEEh8JoHIHh0Oez1Eo8Arz8zq/JhgKodQaxEwUPRtZylVA==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { - "version": "7.23.9", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.23.9.tgz", - "integrity": "sha512-5q0175NOjddqpvvzU+kDiSOAk4PfdO6FvwCWoQ6RO7rTzEe8vlo+4HVfcnAREhD4npMs0e9uZypjTwzZPCf/cw==", + "version": "7.24.3", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.24.3.tgz", + "integrity": "sha512-5FcvN1JHw2sHJChotgx8Ek0lyuh4kCKelgMTTqhYJJtloNvUfpAFMeNQUtdlIaktwrSV9LtCdqwk48wL2wBacQ==", "dependencies": { "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.23.5", - "@babel/generator": "^7.23.6", + "@babel/code-frame": "^7.24.2", + "@babel/generator": "^7.24.1", "@babel/helper-compilation-targets": "^7.23.6", "@babel/helper-module-transforms": "^7.23.3", - "@babel/helpers": "^7.23.9", - "@babel/parser": "^7.23.9", - "@babel/template": "^7.23.9", - "@babel/traverse": "^7.23.9", - "@babel/types": "^7.23.9", + "@babel/helpers": "^7.24.1", + "@babel/parser": "^7.24.1", + "@babel/template": "^7.24.0", + "@babel/traverse": "^7.24.1", + "@babel/types": "^7.24.0", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -258,27 +201,14 @@ "url": "https://opencollective.com/babel" } }, - "node_modules/@babel/core/node_modules/convert-source-map": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", - "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==" - }, - "node_modules/@babel/core/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "bin": { - "semver": "bin/semver.js" - } - }, "node_modules/@babel/generator": { - "version": "7.23.6", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.23.6.tgz", - "integrity": "sha512-qrSfCYxYQB5owCmGLbl8XRpX1ytXlpueOb0N0UmQwA073KZxejgQTzAmJezxvpwQD9uGtK2shHdi55QT+MbjIw==", + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.24.1.tgz", + "integrity": "sha512-DfCRfZsBcrPEHUfuBMgbJ1Ut01Y/itOs+hY2nFLgqsqXd52/iSiVq5TITtUasIUgm+IIKdY2/1I7auiQOEeC9A==", "dependencies": { - "@babel/types": "^7.23.6", - "@jridgewell/gen-mapping": "^0.3.2", - "@jridgewell/trace-mapping": "^0.3.17", + "@babel/types": "^7.24.0", + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25", "jsesc": "^2.5.1" }, "engines": { @@ -300,27 +230,6 @@ "node": ">=6.9.0" } }, - "node_modules/@babel/helper-compilation-targets/node_modules/lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", - "dependencies": { - "yallist": "^3.0.2" - } - }, - "node_modules/@babel/helper-compilation-targets/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/helper-compilation-targets/node_modules/yallist": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", - "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" - }, "node_modules/@babel/helper-environment-visitor": { "version": "7.22.20", "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz", @@ -353,11 +262,11 @@ } }, "node_modules/@babel/helper-module-imports": { - "version": "7.22.15", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.22.15.tgz", - "integrity": "sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w==", + "version": "7.24.3", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.3.tgz", + "integrity": "sha512-viKb0F9f2s0BCS22QSF308z/+1YWKV/76mwt61NBzS5izMzDPwdq1pTrzf+Li3npBWX9KdQbkeCt1jSAM7lZqg==", "dependencies": { - "@babel/types": "^7.22.15" + "@babel/types": "^7.24.0" }, "engines": { "node": ">=6.9.0" @@ -404,9 +313,9 @@ } }, "node_modules/@babel/helper-string-parser": { - "version": "7.23.4", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.23.4.tgz", - "integrity": "sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ==", + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.1.tgz", + "integrity": "sha512-2ofRCjnnA9y+wk8b9IAREroeUP02KHp431N2mhKniy2yKIDKpbrHv9eXwm8cBeWQYcJmzv5qKCu65P47eCF7CQ==", "engines": { "node": ">=6.9.0" } @@ -428,99 +337,36 @@ } }, "node_modules/@babel/helpers": { - "version": "7.23.9", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.23.9.tgz", - "integrity": "sha512-87ICKgU5t5SzOT7sBMfCOZQ2rHjRU+Pcb9BoILMYz600W6DkVRLFBPwQ18gwUVvggqXivaUakpnxWQGbpywbBQ==", + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.24.1.tgz", + "integrity": "sha512-BpU09QqEe6ZCHuIHFphEFgvNSrubve1FtyMton26ekZ85gRGi6LrTF7zArARp2YvyFxloeiRmtSCq5sjh1WqIg==", "dependencies": { - "@babel/template": "^7.23.9", - "@babel/traverse": "^7.23.9", - "@babel/types": "^7.23.9" + "@babel/template": "^7.24.0", + "@babel/traverse": "^7.24.1", + "@babel/types": "^7.24.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/highlight": { - "version": "7.23.4", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.23.4.tgz", - "integrity": "sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A==", + "version": "7.24.2", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.2.tgz", + "integrity": "sha512-Yac1ao4flkTxTteCDZLEvdxg2fZfz1v8M4QpaGypq/WPDqg3ijHYbDfs+LG5hvzSoqaSZ9/Z9lKSP3CjZjv+pA==", "dependencies": { "@babel/helper-validator-identifier": "^7.22.20", "chalk": "^2.4.2", - "js-tokens": "^4.0.0" + "js-tokens": "^4.0.0", + "picocolors": "^1.0.0" }, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/highlight/node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dependencies": { - "color-convert": "^1.9.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/highlight/node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/highlight/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/@babel/highlight/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" - }, - "node_modules/@babel/highlight/node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/@babel/highlight/node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "engines": { - "node": ">=4" - } - }, - "node_modules/@babel/highlight/node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/@babel/parser": { - "version": "7.23.9", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.23.9.tgz", - "integrity": "sha512-9tcKgqKbs3xGJ+NtKF2ndOBBLVwPjl1SHxPQkd36r3Dlirw3xWUeGaTbqr7uGZcTaxkVNwc+03SVP7aCdWrTlA==", + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.1.tgz", + "integrity": "sha512-Zo9c7N3xdOIQrNip7Lc9wvRPzlRtovHVE4lkz8WEDr7uYh/GMQhSiIgFxGIArRHYdJE5kxtZjAf8rT0xhdLCzg==", "bin": { "parser": "bin/babel-parser.js" }, @@ -529,9 +375,9 @@ } }, "node_modules/@babel/runtime": { - "version": "7.23.9", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.23.9.tgz", - "integrity": "sha512-0CX6F+BI2s9dkUqr08KFrAIZgNFj75rdBU/DjCyYLIaV/quFjkk6T+EJ2LkZHyZTbEV4L5p97mNkUsHl2wLFAw==", + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.24.1.tgz", + "integrity": "sha512-+BIznRzyqBf+2wCTxcKE3wDjfGeCoVE61KSHGpkzqrLi8qxqFwBeUFyId2cxkTmm55fzDGnm0+yCxaxygrLUnQ==", "dependencies": { "regenerator-runtime": "^0.14.0" }, @@ -540,31 +386,31 @@ } }, "node_modules/@babel/template": { - "version": "7.23.9", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.23.9.tgz", - "integrity": "sha512-+xrD2BWLpvHKNmX2QbpdpsBaWnRxahMwJjO+KZk2JOElj5nSmKezyS1B4u+QbHMTX69t4ukm6hh9lsYQ7GHCKA==", + "version": "7.24.0", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.24.0.tgz", + "integrity": "sha512-Bkf2q8lMB0AFpX0NFEqSbx1OkTHf0f+0j82mkw+ZpzBnkk7e9Ql0891vlfgi+kHwOk8tQjiQHpqh4LaSa0fKEA==", "dependencies": { "@babel/code-frame": "^7.23.5", - "@babel/parser": "^7.23.9", - "@babel/types": "^7.23.9" + "@babel/parser": "^7.24.0", + "@babel/types": "^7.24.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.23.9", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.23.9.tgz", - "integrity": "sha512-I/4UJ9vs90OkBtY6iiiTORVMyIhJ4kAVmsKo9KFc8UOxMeUfi2hvtIBsET5u9GizXE6/GFSuKCTNfgCswuEjRg==", + "version": "7.24.1", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.24.1.tgz", + "integrity": "sha512-xuU6o9m68KeqZbQuDt2TcKSxUw/mrsvavlEqQ1leZ/B+C9tk6E4sRWy97WaXgvq5E+nU3cXMxv3WKOCanVMCmQ==", "dependencies": { - "@babel/code-frame": "^7.23.5", - "@babel/generator": "^7.23.6", + "@babel/code-frame": "^7.24.1", + "@babel/generator": "^7.24.1", "@babel/helper-environment-visitor": "^7.22.20", "@babel/helper-function-name": "^7.23.0", "@babel/helper-hoist-variables": "^7.22.5", "@babel/helper-split-export-declaration": "^7.22.6", - "@babel/parser": "^7.23.9", - "@babel/types": "^7.23.9", + "@babel/parser": "^7.24.1", + "@babel/types": "^7.24.0", "debug": "^4.3.1", "globals": "^11.1.0" }, @@ -573,9 +419,9 @@ } }, "node_modules/@babel/types": { - "version": "7.23.9", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.23.9.tgz", - "integrity": "sha512-dQjSq/7HaSjRM43FFGnv5keM2HsxpmyV1PfaSVm0nzzjwwTmjOe6J4bC8e3+pTEIgHaHj+1ZlLThRJ2auc/w1Q==", + "version": "7.24.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.24.0.tgz", + "integrity": "sha512-+j7a5c253RfKh8iABBhywc8NSfP5LURe7Uh4qpsh6jc+aLJguvmIUBdjSdEMQv2bENrCR5MfRdjGo7vzS/ob7w==", "dependencies": { "@babel/helper-string-parser": "^7.23.4", "@babel/helper-validator-identifier": "^7.22.20", @@ -585,139 +431,21 @@ "node": ">=6.9.0" } }, - "node_modules/@emotion/babel-plugin": { - "version": "11.11.0", - "resolved": "https://registry.npmjs.org/@emotion/babel-plugin/-/babel-plugin-11.11.0.tgz", - "integrity": "sha512-m4HEDZleaaCH+XgDDsPF15Ht6wTLsgDTeR3WYj9Q/k76JtWhrJjcP4+/XlG8LGT/Rol9qUfOIztXeA84ATpqPQ==", - "dependencies": { - "@babel/helper-module-imports": "^7.16.7", - "@babel/runtime": "^7.18.3", - "@emotion/hash": "^0.9.1", - "@emotion/memoize": "^0.8.1", - "@emotion/serialize": "^1.1.2", - "babel-plugin-macros": "^3.1.0", - "convert-source-map": "^1.5.0", - "escape-string-regexp": "^4.0.0", - "find-root": "^1.1.0", - "source-map": "^0.5.7", - "stylis": "4.2.0" + "node_modules/@esbuild/aix-ppc64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.20.2.tgz", + "integrity": "sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==", + "cpu": [ + "ppc64" + ], + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" } }, - "node_modules/@emotion/cache": { - "version": "11.11.0", - "resolved": "https://registry.npmjs.org/@emotion/cache/-/cache-11.11.0.tgz", - "integrity": "sha512-P34z9ssTCBi3e9EI1ZsWpNHcfY1r09ZO0rZbRO2ob3ZQMnFI35jB536qoXbkdesr5EUhYi22anuEJuyxifaqAQ==", - "dependencies": { - "@emotion/memoize": "^0.8.1", - "@emotion/sheet": "^1.2.2", - "@emotion/utils": "^1.2.1", - "@emotion/weak-memoize": "^0.3.1", - "stylis": "4.2.0" - } - }, - "node_modules/@emotion/hash": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/@emotion/hash/-/hash-0.9.1.tgz", - "integrity": "sha512-gJB6HLm5rYwSLI6PQa+X1t5CFGrv1J1TWG+sOyMCeKz2ojaj6Fnl/rZEspogG+cvqbt4AE/2eIyD2QfLKTBNlQ==" - }, - "node_modules/@emotion/is-prop-valid": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-1.2.1.tgz", - "integrity": "sha512-61Mf7Ufx4aDxx1xlDeOm8aFFigGHE4z+0sKCa+IHCeZKiyP9RLD0Mmx7m8b9/Cf37f7NAvQOOJAbQQGVr5uERw==", - "dependencies": { - "@emotion/memoize": "^0.8.1" - } - }, - "node_modules/@emotion/memoize": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.8.1.tgz", - "integrity": "sha512-W2P2c/VRW1/1tLox0mVUalvnWXxavmv/Oum2aPsRcoDJuob75FC3Y8FbpfLwUegRcxINtGUMPq0tFCvYNTBXNA==" - }, - "node_modules/@emotion/react": { - "version": "11.11.3", - "resolved": "https://registry.npmjs.org/@emotion/react/-/react-11.11.3.tgz", - "integrity": "sha512-Cnn0kuq4DoONOMcnoVsTOR8E+AdnKFf//6kUWc4LCdnxj31pZWn7rIULd6Y7/Js1PiPHzn7SKCM9vB/jBni8eA==", - "dependencies": { - "@babel/runtime": "^7.18.3", - "@emotion/babel-plugin": "^11.11.0", - "@emotion/cache": "^11.11.0", - "@emotion/serialize": "^1.1.3", - "@emotion/use-insertion-effect-with-fallbacks": "^1.0.1", - "@emotion/utils": "^1.2.1", - "@emotion/weak-memoize": "^0.3.1", - "hoist-non-react-statics": "^3.3.1" - }, - "peerDependencies": { - "react": ">=16.8.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@emotion/serialize": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/@emotion/serialize/-/serialize-1.1.3.tgz", - "integrity": "sha512-iD4D6QVZFDhcbH0RAG1uVu1CwVLMWUkCvAqqlewO/rxf8+87yIBAlt4+AxMiiKPLs5hFc0owNk/sLLAOROw3cA==", - "dependencies": { - "@emotion/hash": "^0.9.1", - "@emotion/memoize": "^0.8.1", - "@emotion/unitless": "^0.8.1", - "@emotion/utils": "^1.2.1", - "csstype": "^3.0.2" - } - }, - "node_modules/@emotion/sheet": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/@emotion/sheet/-/sheet-1.2.2.tgz", - "integrity": "sha512-0QBtGvaqtWi+nx6doRwDdBIzhNdZrXUppvTM4dtZZWEGTXL/XE/yJxLMGlDT1Gt+UHH5IX1n+jkXyytE/av7OA==" - }, - "node_modules/@emotion/styled": { - "version": "11.11.0", - "resolved": "https://registry.npmjs.org/@emotion/styled/-/styled-11.11.0.tgz", - "integrity": "sha512-hM5Nnvu9P3midq5aaXj4I+lnSfNi7Pmd4EWk1fOZ3pxookaQTNew6bp4JaCBYM4HVFZF9g7UjJmsUmC2JlxOng==", - "dependencies": { - "@babel/runtime": "^7.18.3", - "@emotion/babel-plugin": "^11.11.0", - "@emotion/is-prop-valid": "^1.2.1", - "@emotion/serialize": "^1.1.2", - "@emotion/use-insertion-effect-with-fallbacks": "^1.0.1", - "@emotion/utils": "^1.2.1" - }, - "peerDependencies": { - "@emotion/react": "^11.0.0-rc.0", - "react": ">=16.8.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@emotion/unitless": { - "version": "0.8.1", - "resolved": "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.8.1.tgz", - "integrity": "sha512-KOEGMu6dmJZtpadb476IsZBclKvILjopjUii3V+7MnXIQCYh8W3NgNcgwo21n9LXZX6EDIKvqfjYxXebDwxKmQ==" - }, - "node_modules/@emotion/use-insertion-effect-with-fallbacks": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@emotion/use-insertion-effect-with-fallbacks/-/use-insertion-effect-with-fallbacks-1.0.1.tgz", - "integrity": "sha512-jT/qyKZ9rzLErtrjGgdkMBn2OP8wl0G3sQlBb3YPryvKHsjvINUhVaPFfP+fpBcOkmrVOVEEHQFJ7nbj2TH2gw==", - "peerDependencies": { - "react": ">=16.8.0" - } - }, - "node_modules/@emotion/utils": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@emotion/utils/-/utils-1.2.1.tgz", - "integrity": "sha512-Y2tGf3I+XVnajdItskUCn6LX+VUDmP6lTL4fcqsXAv43dnlbZiuW4MWQW38rW/BVWSE7Q/7+XQocmpnRYILUmg==" - }, - "node_modules/@emotion/weak-memoize": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/@emotion/weak-memoize/-/weak-memoize-0.3.1.tgz", - "integrity": "sha512-EsBwpc7hBUJWAsNPBmJy4hxWx12v6bshQsldrVmjxJoc3isbxhOrF2IcCpaXxfvq03NwkI7sbsOLXbYuqF/8Ww==" - }, "node_modules/@esbuild/android-arm": { "version": "0.17.19", "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.17.19.tgz", @@ -1048,6 +776,99 @@ "node": ">=12" } }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", + "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.10.0", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.10.0.tgz", + "integrity": "sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA==", + "dev": true, + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", + "dev": true, + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.6.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/eslintrc/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@eslint/eslintrc/node_modules/globals": { + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", + "dev": true, + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@eslint/eslintrc/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@eslint/js": { + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.0.tgz", + "integrity": "sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, "node_modules/@floating-ui/core": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.6.0.tgz", @@ -1057,12 +878,12 @@ } }, "node_modules/@floating-ui/dom": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.1.tgz", - "integrity": "sha512-iA8qE43/H5iGozC3W0YSnVSW42Vh522yyM1gj+BqRwVsTNOyr231PsXDaV04yT39PsO0QL2QpbI/M0ZaLUQgRQ==", + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.3.tgz", + "integrity": "sha512-RnDthu3mzPlQ31Ss/BTwQ1zjzIhr3lk1gZB1OC56h/1vEtaXkESrOqL5fQVMfXpwGtRwX+YsZBdyHtJMQnkArw==", "dependencies": { - "@floating-ui/core": "^1.6.0", - "@floating-ui/utils": "^0.2.1" + "@floating-ui/core": "^1.0.0", + "@floating-ui/utils": "^0.2.0" } }, "node_modules/@floating-ui/react-dom": { @@ -1098,14 +919,61 @@ "react-dom": "^16 || ^17 || ^18" } }, - "node_modules/@heroicons/react": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/@heroicons/react/-/react-2.1.1.tgz", - "integrity": "sha512-JyyN9Lo66kirbCMuMMRPtJxtKJoIsXKS569ebHGGRKbl8s4CtUfLnyKJxteA+vIKySocO4s1SkTkGS4xtG/yEA==", - "peerDependencies": { - "react": ">= 16" + "node_modules/@humanwhocodes/config-array": { + "version": "0.11.14", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz", + "integrity": "sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==", + "dev": true, + "dependencies": { + "@humanwhocodes/object-schema": "^2.0.2", + "debug": "^4.3.1", + "minimatch": "^3.0.5" + }, + "engines": { + "node": ">=10.10.0" } }, + "node_modules/@humanwhocodes/config-array/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/@humanwhocodes/config-array/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", + "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", + "dev": true + }, "node_modules/@isaacs/cliui": { "version": "8.0.2", "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", @@ -1122,52 +990,56 @@ "node": ">=12" } }, - "node_modules/@isaacs/cliui/node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" + "node_modules/@isaacs/cliui/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } }, - "node_modules/@isaacs/cliui/node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" + "ansi-regex": "^6.0.1" }, "engines": { "node": ">=12" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "url": "https://github.com/chalk/strip-ansi?sponsor=1" } }, "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz", - "integrity": "sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==", + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz", + "integrity": "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==", "dependencies": { - "@jridgewell/set-array": "^1.0.1", + "@jridgewell/set-array": "^1.2.1", "@jridgewell/sourcemap-codec": "^1.4.10", - "@jridgewell/trace-mapping": "^0.3.9" + "@jridgewell/trace-mapping": "^0.3.24" }, "engines": { "node": ">=6.0.0" } }, "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz", - "integrity": "sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", "engines": { "node": ">=6.0.0" } }, "node_modules/@jridgewell/set-array": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", - "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", "engines": { "node": ">=6.0.0" } @@ -1178,14 +1050,653 @@ "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==" }, "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.22", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.22.tgz", - "integrity": "sha512-Wf963MzWtA2sjrNt+g18IAln9lKnlRp+K2eH4jjIoF1wYeq3aMREpG09xhlhdzS0EjwU7qmUJYangWa+151vZw==", + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, + "node_modules/@million/lint": { + "version": "0.0.73", + "resolved": "https://registry.npmjs.org/@million/lint/-/lint-0.0.73.tgz", + "integrity": "sha512-UR1VR/GorYt5bRKBtNeS2ZWj6PZk8RVpwV7WDjWmdbLqLAYv4JlRnkPAImZbJR5R50jsHpopmcqqm4mcbyZwiw==", + "dependencies": { + "@babel/core": "^7.23.7", + "@babel/helper-module-imports": "^7.22.15", + "@babel/types": "^7.23.6", + "@radix-ui/react-dialog": "^1.0.5", + "@radix-ui/react-tooltip": "^1.0.7", + "@rollup/pluginutils": "^5.1.0", + "cmdk": "^0.2.1", + "esbuild": "^0.20.1", + "escalade": "^3.1.2", + "isomorphic-fetch": "^3.0.0", + "posthog-node": "^3.6.3", + "react": "^18", + "react-dom": "^18", + "react-draggable": "^4.4.6", + "react-reconciler": "^0.29.0", + "unplugin": "^1.6.0", + "zustand": "^4.5.2" + }, + "bin": { + "lint": "dist/wizard/index.js" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/android-arm": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.20.2.tgz", + "integrity": "sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/android-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.20.2.tgz", + "integrity": "sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/android-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.20.2.tgz", + "integrity": "sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/darwin-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.20.2.tgz", + "integrity": "sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/darwin-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.20.2.tgz", + "integrity": "sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/freebsd-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.20.2.tgz", + "integrity": "sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/freebsd-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.20.2.tgz", + "integrity": "sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/linux-arm": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.20.2.tgz", + "integrity": "sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==", + "cpu": [ + "arm" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/linux-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.20.2.tgz", + "integrity": "sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/linux-ia32": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.20.2.tgz", + "integrity": "sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/linux-loong64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.20.2.tgz", + "integrity": "sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==", + "cpu": [ + "loong64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/linux-mips64el": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.20.2.tgz", + "integrity": "sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==", + "cpu": [ + "mips64el" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/linux-ppc64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.20.2.tgz", + "integrity": "sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==", + "cpu": [ + "ppc64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/linux-riscv64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.20.2.tgz", + "integrity": "sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==", + "cpu": [ + "riscv64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/linux-s390x": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.20.2.tgz", + "integrity": "sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==", + "cpu": [ + "s390x" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/linux-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.20.2.tgz", + "integrity": "sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/netbsd-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.20.2.tgz", + "integrity": "sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/openbsd-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.20.2.tgz", + "integrity": "sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/sunos-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.20.2.tgz", + "integrity": "sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/win32-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.20.2.tgz", + "integrity": "sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==", + "cpu": [ + "arm64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/win32-ia32": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.20.2.tgz", + "integrity": "sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==", + "cpu": [ + "ia32" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@esbuild/win32-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.20.2.tgz", + "integrity": "sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==", + "cpu": [ + "x64" + ], + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@million/lint/node_modules/@radix-ui/primitive": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.0.0.tgz", + "integrity": "sha512-3e7rn8FDMin4CgeL7Z/49smCA3rFYY3Ha2rUQ7HRWFadS5iCRw08ZgVT1LaNTCNqgvrUiyczLflrVrF0SRQtNA==", + "dependencies": { + "@babel/runtime": "^7.13.10" + } + }, + "node_modules/@million/lint/node_modules/@radix-ui/react-compose-refs": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.0.0.tgz", + "integrity": "sha512-0KaSv6sx787/hK3eF53iOkiSLwAGlFMx5lotrqD2pTjB18KbybKoEIgkNZTKC60YECDQTKGTRcDBILwZVqVKvA==", + "dependencies": { + "@babel/runtime": "^7.13.10" + }, + "peerDependencies": { + "react": "^16.8 || ^17.0 || ^18.0" + } + }, + "node_modules/@million/lint/node_modules/@radix-ui/react-context": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.0.0.tgz", + "integrity": "sha512-1pVM9RfOQ+n/N5PJK33kRSKsr1glNxomxONs5c49MliinBY6Yw2Q995qfBUUo0/Mbg05B/sGA0gkgPI7kmSHBg==", + "dependencies": { + "@babel/runtime": "^7.13.10" + }, + "peerDependencies": { + "react": "^16.8 || ^17.0 || ^18.0" + } + }, + "node_modules/@million/lint/node_modules/@radix-ui/react-dismissable-layer": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.0.tgz", + "integrity": "sha512-n7kDRfx+LB1zLueRDvZ1Pd0bxdJWDUZNQ/GWoxDn2prnuJKRdxsjulejX/ePkOsLi2tTm6P24mDqlMSgQpsT6g==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/primitive": "1.0.0", + "@radix-ui/react-compose-refs": "1.0.0", + "@radix-ui/react-primitive": "1.0.0", + "@radix-ui/react-use-callback-ref": "1.0.0", + "@radix-ui/react-use-escape-keydown": "1.0.0" + }, + "peerDependencies": { + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + } + }, + "node_modules/@million/lint/node_modules/@radix-ui/react-focus-guards": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.0.0.tgz", + "integrity": "sha512-UagjDk4ijOAnGu4WMUPj9ahi7/zJJqNZ9ZAiGPp7waUWJO0O1aWXi/udPphI0IUjvrhBsZJGSN66dR2dsueLWQ==", + "dependencies": { + "@babel/runtime": "^7.13.10" + }, + "peerDependencies": { + "react": "^16.8 || ^17.0 || ^18.0" + } + }, + "node_modules/@million/lint/node_modules/@radix-ui/react-focus-scope": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.0.0.tgz", + "integrity": "sha512-C4SWtsULLGf/2L4oGeIHlvWQx7Rf+7cX/vKOAD2dXW0A1b5QXwi3wWeaEgW+wn+SEVrraMUk05vLU9fZZz5HbQ==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-compose-refs": "1.0.0", + "@radix-ui/react-primitive": "1.0.0", + "@radix-ui/react-use-callback-ref": "1.0.0" + }, + "peerDependencies": { + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + } + }, + "node_modules/@million/lint/node_modules/@radix-ui/react-id": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.0.0.tgz", + "integrity": "sha512-Q6iAB/U7Tq3NTolBBQbHTgclPmGWE3OlktGGqrClPozSw4vkQ1DfQAOtzgRPecKsMdJINE05iaoDUG8tRzCBjw==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-use-layout-effect": "1.0.0" + }, + "peerDependencies": { + "react": "^16.8 || ^17.0 || ^18.0" + } + }, + "node_modules/@million/lint/node_modules/@radix-ui/react-portal": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.0.0.tgz", + "integrity": "sha512-a8qyFO/Xb99d8wQdu4o7qnigNjTPG123uADNecz0eX4usnQEj7o+cG4ZX4zkqq98NYekT7UoEQIjxBNWIFuqTA==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-primitive": "1.0.0" + }, + "peerDependencies": { + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + } + }, + "node_modules/@million/lint/node_modules/@radix-ui/react-presence": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.0.0.tgz", + "integrity": "sha512-A+6XEvN01NfVWiKu38ybawfHsBjWum42MRPnEuqPsBZ4eV7e/7K321B5VgYMPv3Xx5An6o1/l9ZuDBgmcmWK3w==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-compose-refs": "1.0.0", + "@radix-ui/react-use-layout-effect": "1.0.0" + }, + "peerDependencies": { + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + } + }, + "node_modules/@million/lint/node_modules/@radix-ui/react-primitive": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-1.0.0.tgz", + "integrity": "sha512-EyXe6mnRlHZ8b6f4ilTDrXmkLShICIuOTTj0GX4w1rp+wSxf3+TD05u1UOITC8VsJ2a9nwHvdXtOXEOl0Cw/zQ==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-slot": "1.0.0" + }, + "peerDependencies": { + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + } + }, + "node_modules/@million/lint/node_modules/@radix-ui/react-slot": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.0.0.tgz", + "integrity": "sha512-3mrKauI/tWXo1Ll+gN5dHcxDPdm/Df1ufcDLCecn+pnCIVcdWE7CujXo8QaXOWRJyZyQWWbpB8eFwHzWXlv5mQ==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-compose-refs": "1.0.0" + }, + "peerDependencies": { + "react": "^16.8 || ^17.0 || ^18.0" + } + }, + "node_modules/@million/lint/node_modules/@radix-ui/react-use-callback-ref": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.0.0.tgz", + "integrity": "sha512-GZtyzoHz95Rhs6S63D2t/eqvdFCm7I+yHMLVQheKM7nBD8mbZIt+ct1jz4536MDnaOGKIxynJ8eHTkVGVVkoTg==", + "dependencies": { + "@babel/runtime": "^7.13.10" + }, + "peerDependencies": { + "react": "^16.8 || ^17.0 || ^18.0" + } + }, + "node_modules/@million/lint/node_modules/@radix-ui/react-use-controllable-state": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.0.0.tgz", + "integrity": "sha512-FohDoZvk3mEXh9AWAVyRTYR4Sq7/gavuofglmiXB2g1aKyboUD4YtgWxKj8O5n+Uak52gXQ4wKz5IFST4vtJHg==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-use-callback-ref": "1.0.0" + }, + "peerDependencies": { + "react": "^16.8 || ^17.0 || ^18.0" + } + }, + "node_modules/@million/lint/node_modules/@radix-ui/react-use-escape-keydown": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.0.0.tgz", + "integrity": "sha512-JwfBCUIfhXRxKExgIqGa4CQsiMemo1Xt0W/B4ei3fpzpvPENKpMKQ8mZSB6Acj3ebrAEgi2xiQvcI1PAAodvyg==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-use-callback-ref": "1.0.0" + }, + "peerDependencies": { + "react": "^16.8 || ^17.0 || ^18.0" + } + }, + "node_modules/@million/lint/node_modules/@radix-ui/react-use-layout-effect": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.0.0.tgz", + "integrity": "sha512-6Tpkq+R6LOlmQb1R5NNETLG0B4YP0wc+klfXafpUCj6JGyaUc8il7/kUZ7m59rGbXGczE9Bs+iz2qloqsZBduQ==", + "dependencies": { + "@babel/runtime": "^7.13.10" + }, + "peerDependencies": { + "react": "^16.8 || ^17.0 || ^18.0" + } + }, + "node_modules/@million/lint/node_modules/cmdk": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/cmdk/-/cmdk-0.2.1.tgz", + "integrity": "sha512-U6//9lQ6JvT47+6OF6Gi8BvkxYQ8SCRRSKIJkthIMsFsLZRG0cKvTtuTaefyIKMQb8rvvXy0wGdpTNq/jPtm+g==", + "dependencies": { + "@radix-ui/react-dialog": "1.0.0" + }, + "peerDependencies": { + "react": "^18.0.0", + "react-dom": "^18.0.0" + } + }, + "node_modules/@million/lint/node_modules/cmdk/node_modules/@radix-ui/react-dialog": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.0.0.tgz", + "integrity": "sha512-Yn9YU+QlHYLWwV1XfKiqnGVpWYWk6MeBVM6x/bcoyPvxgjQGoeT35482viLPctTMWoMw0PoHgqfSox7Ig+957Q==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/primitive": "1.0.0", + "@radix-ui/react-compose-refs": "1.0.0", + "@radix-ui/react-context": "1.0.0", + "@radix-ui/react-dismissable-layer": "1.0.0", + "@radix-ui/react-focus-guards": "1.0.0", + "@radix-ui/react-focus-scope": "1.0.0", + "@radix-ui/react-id": "1.0.0", + "@radix-ui/react-portal": "1.0.0", + "@radix-ui/react-presence": "1.0.0", + "@radix-ui/react-primitive": "1.0.0", + "@radix-ui/react-slot": "1.0.0", + "@radix-ui/react-use-controllable-state": "1.0.0", + "aria-hidden": "^1.1.1", + "react-remove-scroll": "2.5.4" + }, + "peerDependencies": { + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + } + }, + "node_modules/@million/lint/node_modules/esbuild": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.20.2.tgz", + "integrity": "sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==", + "hasInstallScript": true, + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.20.2", + "@esbuild/android-arm": "0.20.2", + "@esbuild/android-arm64": "0.20.2", + "@esbuild/android-x64": "0.20.2", + "@esbuild/darwin-arm64": "0.20.2", + "@esbuild/darwin-x64": "0.20.2", + "@esbuild/freebsd-arm64": "0.20.2", + "@esbuild/freebsd-x64": "0.20.2", + "@esbuild/linux-arm": "0.20.2", + "@esbuild/linux-arm64": "0.20.2", + "@esbuild/linux-ia32": "0.20.2", + "@esbuild/linux-loong64": "0.20.2", + "@esbuild/linux-mips64el": "0.20.2", + "@esbuild/linux-ppc64": "0.20.2", + "@esbuild/linux-riscv64": "0.20.2", + "@esbuild/linux-s390x": "0.20.2", + "@esbuild/linux-x64": "0.20.2", + "@esbuild/netbsd-x64": "0.20.2", + "@esbuild/openbsd-x64": "0.20.2", + "@esbuild/sunos-x64": "0.20.2", + "@esbuild/win32-arm64": "0.20.2", + "@esbuild/win32-ia32": "0.20.2", + "@esbuild/win32-x64": "0.20.2" + } + }, + "node_modules/@million/lint/node_modules/react-remove-scroll": { + "version": "2.5.4", + "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.5.4.tgz", + "integrity": "sha512-xGVKJJr0SJGQVirVFAUZ2k1QLyO6m+2fy0l8Qawbp5Jgrv3DeLalrfMNBFSlmz5kriGGzsVBtGVnf4pTKIhhWA==", + "dependencies": { + "react-remove-scroll-bar": "^2.3.3", + "react-style-singleton": "^2.2.1", + "tslib": "^2.1.0", + "use-callback-ref": "^1.3.0", + "use-sidecar": "^1.1.2" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "^16.8.0 || ^17.0.0 || ^18.0.0", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, "node_modules/@mole-inc/bin-wrapper": { "version": "8.0.1", "resolved": "https://registry.npmjs.org/@mole-inc/bin-wrapper/-/bin-wrapper-8.0.1.tgz", @@ -1205,250 +1716,6 @@ "node": "^12.20.0 || ^14.13.1 || >=16.0.0" } }, - "node_modules/@mui/base": { - "version": "5.0.0-beta.34", - "resolved": "https://registry.npmjs.org/@mui/base/-/base-5.0.0-beta.34.tgz", - "integrity": "sha512-e2mbTGTtReD/y5RFwnhkl1Tgl3XwgJhY040IlfkTVaU9f5LWrVhEnpRsYXu3B1CtLrwiWs4cu7aMHV9yRd4jpw==", - "dependencies": { - "@babel/runtime": "^7.23.9", - "@floating-ui/react-dom": "^2.0.8", - "@mui/types": "^7.2.13", - "@mui/utils": "^5.15.7", - "@popperjs/core": "^2.11.8", - "clsx": "^2.1.0", - "prop-types": "^15.8.1" - }, - "engines": { - "node": ">=12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/mui-org" - }, - "peerDependencies": { - "@types/react": "^17.0.0 || ^18.0.0", - "react": "^17.0.0 || ^18.0.0", - "react-dom": "^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@mui/base/node_modules/clsx": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.0.tgz", - "integrity": "sha512-m3iNNWpd9rl3jvvcBnu70ylMdrXt8Vlq4HYadnU5fwcOtvkSQWPmj7amUcDT2qYI7risszBjI5AUIUox9D16pg==", - "engines": { - "node": ">=6" - } - }, - "node_modules/@mui/core-downloads-tracker": { - "version": "5.15.7", - "resolved": "https://registry.npmjs.org/@mui/core-downloads-tracker/-/core-downloads-tracker-5.15.7.tgz", - "integrity": "sha512-AuF+Wo2Mp/edaO6vJnWjg+gj4tzEz5ChMZnAQpc22DXpSvM8ddgGcZvM7D7F99pIBoSv8ub+Iz0viL+yuGVmhg==", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/mui-org" - } - }, - "node_modules/@mui/material": { - "version": "5.15.7", - "resolved": "https://registry.npmjs.org/@mui/material/-/material-5.15.7.tgz", - "integrity": "sha512-l6+AiKZH3iOJmZCnlpel8ghYQe9Lq0BEuKP8fGj3g5xz4arO9GydqYAtLPMvuHKtArj8lJGNuT2yHYxmejincA==", - "dependencies": { - "@babel/runtime": "^7.23.9", - "@mui/base": "5.0.0-beta.34", - "@mui/core-downloads-tracker": "^5.15.7", - "@mui/system": "^5.15.7", - "@mui/types": "^7.2.13", - "@mui/utils": "^5.15.7", - "@types/react-transition-group": "^4.4.10", - "clsx": "^2.1.0", - "csstype": "^3.1.2", - "prop-types": "^15.8.1", - "react-is": "^18.2.0", - "react-transition-group": "^4.4.5" - }, - "engines": { - "node": ">=12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/mui-org" - }, - "peerDependencies": { - "@emotion/react": "^11.5.0", - "@emotion/styled": "^11.3.0", - "@types/react": "^17.0.0 || ^18.0.0", - "react": "^17.0.0 || ^18.0.0", - "react-dom": "^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@emotion/react": { - "optional": true - }, - "@emotion/styled": { - "optional": true - }, - "@types/react": { - "optional": true - } - } - }, - "node_modules/@mui/material/node_modules/clsx": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.0.tgz", - "integrity": "sha512-m3iNNWpd9rl3jvvcBnu70ylMdrXt8Vlq4HYadnU5fwcOtvkSQWPmj7amUcDT2qYI7risszBjI5AUIUox9D16pg==", - "engines": { - "node": ">=6" - } - }, - "node_modules/@mui/private-theming": { - "version": "5.15.7", - "resolved": "https://registry.npmjs.org/@mui/private-theming/-/private-theming-5.15.7.tgz", - "integrity": "sha512-bcEeeXm7GyQCQvN9dwo8htGv8/6tP05p0i02Z7GXm5EoDPlBcqTNGugsjNLoGq6B0SsdyanjJGw0Jw00o1yAOA==", - "dependencies": { - "@babel/runtime": "^7.23.9", - "@mui/utils": "^5.15.7", - "prop-types": "^15.8.1" - }, - "engines": { - "node": ">=12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/mui-org" - }, - "peerDependencies": { - "@types/react": "^17.0.0 || ^18.0.0", - "react": "^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@mui/styled-engine": { - "version": "5.15.7", - "resolved": "https://registry.npmjs.org/@mui/styled-engine/-/styled-engine-5.15.7.tgz", - "integrity": "sha512-ixSdslOjK1kzdGcxqj7O3d14By/LPQ7EWknsViQ8RaeT863EAQemS+zvUJDTcOpkfJh6q6gPnYMIb2TJCs9eWA==", - "dependencies": { - "@babel/runtime": "^7.23.9", - "@emotion/cache": "^11.11.0", - "csstype": "^3.1.2", - "prop-types": "^15.8.1" - }, - "engines": { - "node": ">=12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/mui-org" - }, - "peerDependencies": { - "@emotion/react": "^11.4.1", - "@emotion/styled": "^11.3.0", - "react": "^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@emotion/react": { - "optional": true - }, - "@emotion/styled": { - "optional": true - } - } - }, - "node_modules/@mui/system": { - "version": "5.15.7", - "resolved": "https://registry.npmjs.org/@mui/system/-/system-5.15.7.tgz", - "integrity": "sha512-9alZ4/dLxsTwUOdqakgzxiL5YW6ntqj0CfzWImgWnBMTZhgGcPsbYpBLniNkkk7/jptma4/bykWXHwju/ls/pg==", - "dependencies": { - "@babel/runtime": "^7.23.9", - "@mui/private-theming": "^5.15.7", - "@mui/styled-engine": "^5.15.7", - "@mui/types": "^7.2.13", - "@mui/utils": "^5.15.7", - "clsx": "^2.1.0", - "csstype": "^3.1.2", - "prop-types": "^15.8.1" - }, - "engines": { - "node": ">=12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/mui-org" - }, - "peerDependencies": { - "@emotion/react": "^11.5.0", - "@emotion/styled": "^11.3.0", - "@types/react": "^17.0.0 || ^18.0.0", - "react": "^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@emotion/react": { - "optional": true - }, - "@emotion/styled": { - "optional": true - }, - "@types/react": { - "optional": true - } - } - }, - "node_modules/@mui/system/node_modules/clsx": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.0.tgz", - "integrity": "sha512-m3iNNWpd9rl3jvvcBnu70ylMdrXt8Vlq4HYadnU5fwcOtvkSQWPmj7amUcDT2qYI7risszBjI5AUIUox9D16pg==", - "engines": { - "node": ">=6" - } - }, - "node_modules/@mui/types": { - "version": "7.2.13", - "resolved": "https://registry.npmjs.org/@mui/types/-/types-7.2.13.tgz", - "integrity": "sha512-qP9OgacN62s+l8rdDhSFRe05HWtLLJ5TGclC9I1+tQngbssu0m2dmFZs+Px53AcOs9fD7TbYd4gc9AXzVqO/+g==", - "peerDependencies": { - "@types/react": "^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@mui/utils": { - "version": "5.15.7", - "resolved": "https://registry.npmjs.org/@mui/utils/-/utils-5.15.7.tgz", - "integrity": "sha512-8qhsxQRNV6aEOjjSk6YQIYJxkF5klhj8oG1FEEU4z6HV78TjNqRxMP08QGcdsibEbez+nihAaz6vu83b4XqbAg==", - "dependencies": { - "@babel/runtime": "^7.23.9", - "@types/prop-types": "^15.7.11", - "prop-types": "^15.8.1", - "react-is": "^18.2.0" - }, - "engines": { - "node": ">=12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/mui-org" - }, - "peerDependencies": { - "@types/react": "^17.0.0 || ^18.0.0", - "react": "^17.0.0 || ^18.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -1491,12 +1758,12 @@ } }, "node_modules/@playwright/test": { - "version": "1.41.2", - "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.41.2.tgz", - "integrity": "sha512-qQB9h7KbibJzrDpkXkYvsmiDJK14FULCCZgEcoe2AvFAS64oCirWTwzTlAYEbKaRxWs5TFesE1Na6izMv3HfGg==", + "version": "1.42.1", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.42.1.tgz", + "integrity": "sha512-Gq9rmS54mjBL/7/MvBaNOBwbfnh7beHvS6oS4srqXFcQHpQCV1+c8JXWE8VLPyRDhgS3H8x8A7hztqI9VnwrAQ==", "dev": true, "dependencies": { - "playwright": "1.41.2" + "playwright": "1.42.1" }, "bin": { "playwright": "cli.js" @@ -1505,40 +1772,6 @@ "node": ">=16" } }, - "node_modules/@popperjs/core": { - "version": "2.11.8", - "resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.8.tgz", - "integrity": "sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/popperjs" - } - }, - "node_modules/@preact/signals-core": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/@preact/signals-core/-/signals-core-1.5.1.tgz", - "integrity": "sha512-dE6f+WCX5ZUDwXzUIWNMhhglmuLpqJhuy3X3xHrhZYI0Hm2LyQwOu0l9mdPiWrVNsE+Q7txOnJPgtIqHCYoBVA==", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/preact" - } - }, - "node_modules/@preact/signals-react": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@preact/signals-react/-/signals-react-2.0.0.tgz", - "integrity": "sha512-tMVi2SXFXlojaiPNWa8dlYaidR/XvEgMSp+iymKJgMssBM/QVtUQrodKZek1BJju+dkVHiyeuQHmkuLOI9oyNw==", - "dependencies": { - "@preact/signals-core": "^1.5.1", - "use-sync-external-store": "^1.2.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/preact" - }, - "peerDependencies": { - "react": "^16.14.0 || 17.x || 18.x" - } - }, "node_modules/@radix-ui/number": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/@radix-ui/number/-/number-1.0.1.tgz", @@ -2224,9 +2457,9 @@ } }, "node_modules/@radix-ui/react-select": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-select/-/react-select-1.2.2.tgz", - "integrity": "sha512-zI7McXr8fNaSrUY9mZe4x/HC0jTLY9fWNhO1oLWYMQGDXuV4UCivIGTxwioSzO0ZCYX9iSLyWmAh/1TOmX3Cnw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-select/-/react-select-2.0.0.tgz", + "integrity": "sha512-RH5b7af4oHtkcHS7pG6Sgv5rk5Wxa7XI8W5gvB1N/yiuDGZxko1ynvOiVhFM7Cis2A8zxF9bTOUVbRDzPepe6w==", "dependencies": { "@babel/runtime": "^7.13.10", "@radix-ui/number": "1.0.1", @@ -2235,12 +2468,12 @@ "@radix-ui/react-compose-refs": "1.0.1", "@radix-ui/react-context": "1.0.1", "@radix-ui/react-direction": "1.0.1", - "@radix-ui/react-dismissable-layer": "1.0.4", + "@radix-ui/react-dismissable-layer": "1.0.5", "@radix-ui/react-focus-guards": "1.0.1", - "@radix-ui/react-focus-scope": "1.0.3", + "@radix-ui/react-focus-scope": "1.0.4", "@radix-ui/react-id": "1.0.1", - "@radix-ui/react-popper": "1.1.2", - "@radix-ui/react-portal": "1.0.3", + "@radix-ui/react-popper": "1.1.3", + "@radix-ui/react-portal": "1.0.4", "@radix-ui/react-primitive": "1.0.3", "@radix-ui/react-slot": "1.0.2", "@radix-ui/react-use-callback-ref": "1.0.1", @@ -2266,113 +2499,6 @@ } } }, - "node_modules/@radix-ui/react-select/node_modules/@radix-ui/react-dismissable-layer": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.4.tgz", - "integrity": "sha512-7UpBa/RKMoHJYjie1gkF1DlK8l1fdU/VKDpoS3rCCo8YBJR294GwcEHyxHw72yvphJ7ld0AXEcSLAzY2F/WyCg==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/primitive": "1.0.1", - "@radix-ui/react-compose-refs": "1.0.1", - "@radix-ui/react-primitive": "1.0.3", - "@radix-ui/react-use-callback-ref": "1.0.1", - "@radix-ui/react-use-escape-keydown": "1.0.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-select/node_modules/@radix-ui/react-focus-scope": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.0.3.tgz", - "integrity": "sha512-upXdPfqI4islj2CslyfUBNlaJCPybbqRHAi1KER7Isel9Q2AtSJ0zRBZv8mWQiFXD2nyAJ4BhC3yXgZ6kMBSrQ==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-compose-refs": "1.0.1", - "@radix-ui/react-primitive": "1.0.3", - "@radix-ui/react-use-callback-ref": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-select/node_modules/@radix-ui/react-popper": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.1.2.tgz", - "integrity": "sha512-1CnGGfFi/bbqtJZZ0P/NQY20xdG3E0LALJaLUEoKwPLwl6PPPfbeiCqMVQnhoFRAxjJj4RpBRJzDmUgsex2tSg==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@floating-ui/react-dom": "^2.0.0", - "@radix-ui/react-arrow": "1.0.3", - "@radix-ui/react-compose-refs": "1.0.1", - "@radix-ui/react-context": "1.0.1", - "@radix-ui/react-primitive": "1.0.3", - "@radix-ui/react-use-callback-ref": "1.0.1", - "@radix-ui/react-use-layout-effect": "1.0.1", - "@radix-ui/react-use-rect": "1.0.1", - "@radix-ui/react-use-size": "1.0.1", - "@radix-ui/rect": "1.0.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-select/node_modules/@radix-ui/react-portal": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.0.3.tgz", - "integrity": "sha512-xLYZeHrWoPmA5mEKEfZZevoVRK/Q43GfzRXkWV6qawIWWK8t6ifIiLQdd7rmQ4Vk1bmI21XhqF9BN3jWf+phpA==", - "dependencies": { - "@babel/runtime": "^7.13.10", - "@radix-ui/react-primitive": "1.0.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, "node_modules/@radix-ui/react-separator": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/@radix-ui/react-separator/-/react-separator-1.0.3.tgz", @@ -2662,11 +2788,11 @@ } }, "node_modules/@reactflow/background": { - "version": "11.3.8", - "resolved": "https://registry.npmjs.org/@reactflow/background/-/background-11.3.8.tgz", - "integrity": "sha512-U4aI54F7PwqgYI0Knv72QFRI/wXeipPmIYAlDsA0j51+tlPxs3Nk2z7G1/4pC11GxEZkgQVfcIXro4l1Fk+bIQ==", + "version": "11.3.9", + "resolved": "https://registry.npmjs.org/@reactflow/background/-/background-11.3.9.tgz", + "integrity": "sha512-byj/G9pEC8tN0wT/ptcl/LkEP/BBfa33/SvBkqE4XwyofckqF87lKp573qGlisfnsijwAbpDlf81PuFL41So4Q==", "dependencies": { - "@reactflow/core": "11.10.3", + "@reactflow/core": "11.10.4", "classcat": "^5.0.3", "zustand": "^4.4.1" }, @@ -2676,11 +2802,11 @@ } }, "node_modules/@reactflow/controls": { - "version": "11.2.8", - "resolved": "https://registry.npmjs.org/@reactflow/controls/-/controls-11.2.8.tgz", - "integrity": "sha512-Y9YVx38sRjYtbPsI/xa+B1FGN73FV1GqqajlmGfrc1TmqhJaX+gaMXMvVazT/N5haK1hMJvOApUTLQ2V/5Rdbg==", + "version": "11.2.9", + "resolved": "https://registry.npmjs.org/@reactflow/controls/-/controls-11.2.9.tgz", + "integrity": "sha512-e8nWplbYfOn83KN1BrxTXS17+enLyFnjZPbyDgHSRLtI5ZGPKF/8iRXV+VXb2LFVzlu4Wh3la/pkxtfP/0aguA==", "dependencies": { - "@reactflow/core": "11.10.3", + "@reactflow/core": "11.10.4", "classcat": "^5.0.3", "zustand": "^4.4.1" }, @@ -2690,9 +2816,9 @@ } }, "node_modules/@reactflow/core": { - "version": "11.10.3", - "resolved": "https://registry.npmjs.org/@reactflow/core/-/core-11.10.3.tgz", - "integrity": "sha512-nV3nep4fjBy3h8mYSnJcclGcQtj8fkUBmNkEwCZCK4ps+n3HNkXFB0BRisSnQz6GRQlYboSsi0cThEl3KdNITw==", + "version": "11.10.4", + "resolved": "https://registry.npmjs.org/@reactflow/core/-/core-11.10.4.tgz", + "integrity": "sha512-j3i9b2fsTX/sBbOm+RmNzYEFWbNx4jGWGuGooh2r1jQaE2eV+TLJgiG/VNOp0q5mBl9f6g1IXs3Gm86S9JfcGw==", "dependencies": { "@types/d3": "^7.4.0", "@types/d3-drag": "^3.0.1", @@ -2710,11 +2836,11 @@ } }, "node_modules/@reactflow/minimap": { - "version": "11.7.8", - "resolved": "https://registry.npmjs.org/@reactflow/minimap/-/minimap-11.7.8.tgz", - "integrity": "sha512-MwyP5q3VomC91Dhd4P6YcxEfnjDbREGYV6rRxbSJSTHiG0x7ETQCcPelYDGy7JvQej77Pa2yJ4g0FDwP7CsQEA==", + "version": "11.7.9", + "resolved": "https://registry.npmjs.org/@reactflow/minimap/-/minimap-11.7.9.tgz", + "integrity": "sha512-le95jyTtt3TEtJ1qa7tZ5hyM4S7gaEQkW43cixcMOZLu33VAdc2aCpJg/fXcRrrf7moN2Mbl9WIMNXUKsp5ILA==", "dependencies": { - "@reactflow/core": "11.10.3", + "@reactflow/core": "11.10.4", "@types/d3-selection": "^3.0.3", "@types/d3-zoom": "^3.0.1", "classcat": "^5.0.3", @@ -2728,11 +2854,11 @@ } }, "node_modules/@reactflow/node-resizer": { - "version": "2.2.8", - "resolved": "https://registry.npmjs.org/@reactflow/node-resizer/-/node-resizer-2.2.8.tgz", - "integrity": "sha512-u/EXLpvOfAmq1sGoPYwoX4gi0PnCi0mH3eHVClHNc8JQD0WCqcV1UeVV7H3PF+1SGhhg/aOv/vPG1PcQ5fu4jQ==", + "version": "2.2.9", + "resolved": "https://registry.npmjs.org/@reactflow/node-resizer/-/node-resizer-2.2.9.tgz", + "integrity": "sha512-HfickMm0hPDIHt9qH997nLdgLt0kayQyslKE0RS/GZvZ4UMQJlx/NRRyj5y47Qyg0NnC66KYOQWDM9LLzRTnUg==", "dependencies": { - "@reactflow/core": "11.10.3", + "@reactflow/core": "11.10.4", "classcat": "^5.0.4", "d3-drag": "^3.0.0", "d3-selection": "^3.0.0", @@ -2744,11 +2870,11 @@ } }, "node_modules/@reactflow/node-toolbar": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/@reactflow/node-toolbar/-/node-toolbar-1.3.8.tgz", - "integrity": "sha512-cfvlTPeJa/ciQTosx2bGrjHT8K/UL9kznpvpOzeZFnJm5UQXmbwAYt4Vo6GfkD51mORcIL7ujQJvB9ym3ZI9KA==", + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/@reactflow/node-toolbar/-/node-toolbar-1.3.9.tgz", + "integrity": "sha512-VmgxKmToax4sX1biZ9LXA7cj/TBJ+E5cklLGwquCCVVxh+lxpZGTBF3a5FJGVHiUNBBtFsC8ldcSZIK4cAlQww==", "dependencies": { - "@reactflow/core": "11.10.3", + "@reactflow/core": "11.10.4", "classcat": "^5.0.3", "zustand": "^4.4.1" }, @@ -2758,9 +2884,9 @@ } }, "node_modules/@remix-run/router": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.15.0.tgz", - "integrity": "sha512-HOil5aFtme37dVQTB6M34G95kPM3MMuqSmIRVCC52eKV+Y/tGSqw9P3rWhlAx6A+mz+MoX+XxsGsNJbaI5qCgQ==", + "version": "1.15.3", + "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.15.3.tgz", + "integrity": "sha512-Oy8rmScVrVxWZVOpEF57ovlnhpZ8CCPlnIIumVcV9nFdiSIrus99+Lw78ekXyGvVDlIsFJbSfmSovJUhCWYV3w==", "engines": { "node": ">=14.0.0" } @@ -2962,31 +3088,6 @@ "url": "https://github.com/sponsors/gregberge" } }, - "node_modules/@svgr/core/node_modules/cosmiconfig": { - "version": "8.3.6", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", - "integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==", - "dependencies": { - "import-fresh": "^3.3.0", - "js-yaml": "^4.1.0", - "parse-json": "^5.2.0", - "path-type": "^4.0.0" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/d-fischer" - }, - "peerDependencies": { - "typescript": ">=4.9.5" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, "node_modules/@svgr/hast-util-to-babel-ast": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-8.0.0.tgz", @@ -3067,23 +3168,47 @@ } } }, - "node_modules/@swc/cli/node_modules/source-map": { - "version": "0.7.4", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", - "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", + "node_modules/@swc/cli/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, "engines": { - "node": ">= 8" + "node": ">=10" } }, + "node_modules/@swc/cli/node_modules/semver": { + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@swc/cli/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, "node_modules/@swc/core": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.4.0.tgz", - "integrity": "sha512-wc5DMI5BJftnK0Fyx9SNJKkA0+BZSJQx8430yutWmsILkHMBD3Yd9GhlMaxasab9RhgKqZp7Ht30hUYO5ZDvQg==", + "version": "1.4.11", + "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.4.11.tgz", + "integrity": "sha512-WKEakMZxkVwRdgMN4AMJ9K5nysY8g8npgQPczmjBeNK5In7QEAZAJwnyccrWwJZU0XjVeHn2uj+XbOKdDW17rg==", "dev": true, "hasInstallScript": true, "dependencies": { - "@swc/counter": "^0.1.1", + "@swc/counter": "^0.1.2", "@swc/types": "^0.1.5" }, "engines": { @@ -3094,16 +3219,16 @@ "url": "https://opencollective.com/swc" }, "optionalDependencies": { - "@swc/core-darwin-arm64": "1.4.0", - "@swc/core-darwin-x64": "1.4.0", - "@swc/core-linux-arm-gnueabihf": "1.4.0", - "@swc/core-linux-arm64-gnu": "1.4.0", - "@swc/core-linux-arm64-musl": "1.4.0", - "@swc/core-linux-x64-gnu": "1.4.0", - "@swc/core-linux-x64-musl": "1.4.0", - "@swc/core-win32-arm64-msvc": "1.4.0", - "@swc/core-win32-ia32-msvc": "1.4.0", - "@swc/core-win32-x64-msvc": "1.4.0" + "@swc/core-darwin-arm64": "1.4.11", + "@swc/core-darwin-x64": "1.4.11", + "@swc/core-linux-arm-gnueabihf": "1.4.11", + "@swc/core-linux-arm64-gnu": "1.4.11", + "@swc/core-linux-arm64-musl": "1.4.11", + "@swc/core-linux-x64-gnu": "1.4.11", + "@swc/core-linux-x64-musl": "1.4.11", + "@swc/core-win32-arm64-msvc": "1.4.11", + "@swc/core-win32-ia32-msvc": "1.4.11", + "@swc/core-win32-x64-msvc": "1.4.11" }, "peerDependencies": { "@swc/helpers": "^0.5.0" @@ -3115,9 +3240,9 @@ } }, "node_modules/@swc/core-darwin-arm64": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.4.0.tgz", - "integrity": "sha512-UTJ/Vz+s7Pagef6HmufWt6Rs0aUu+EJF4Pzuwvr7JQQ5b1DZeAAUeUtkUTFx/PvCbM8Xfw4XdKBUZfrIKCfW8A==", + "version": "1.4.11", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.4.11.tgz", + "integrity": "sha512-C1j1Qp/IHSelVWdEnT7f0iONWxQz6FAqzjCF2iaL+0vFg4V5f2nlgrueY8vj5pNNzSGhrAlxsMxEIp4dj1MXkg==", "cpu": [ "arm64" ], @@ -3131,9 +3256,9 @@ } }, "node_modules/@swc/core-darwin-x64": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.4.0.tgz", - "integrity": "sha512-f8v58u2GsGak8EtZFN9guXqE0Ep10Suny6xriaW2d8FGqESPyNrnBzli3aqkSeQk5gGqu2zJ7WiiKp3XoUOidA==", + "version": "1.4.11", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.4.11.tgz", + "integrity": "sha512-0TTy3Ni8ncgaMCchSQ7FK8ZXQLlamy0FXmGWbR58c+pVZWYZltYPTmheJUvVcR0H2+gPAymRKyfC0iLszDALjg==", "cpu": [ "x64" ], @@ -3147,9 +3272,9 @@ } }, "node_modules/@swc/core-linux-arm-gnueabihf": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.4.0.tgz", - "integrity": "sha512-q2KAkBzmPcTnRij/Y1fgHCKAGevUX/H4uUESrw1J5gmUg9Qip6onKV80lTumA1/aooGJ18LOsB31qdbwmZk9OA==", + "version": "1.4.11", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.4.11.tgz", + "integrity": "sha512-XJLB71uw0rog4DjYAPxFGAuGCBQpgJDlPZZK6MTmZOvI/1t0+DelJ24IjHIxk500YYM26Yv47xPabqFPD7I2zQ==", "cpu": [ "arm" ], @@ -3163,9 +3288,9 @@ } }, "node_modules/@swc/core-linux-arm64-gnu": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.4.0.tgz", - "integrity": "sha512-SknGu96W0mzHtLHWm+62fk5+Omp9fMPFO7AWyGFmz2tr8EgRRXtTSrBUnWhAbgcalnhen48GsvtMdxf1KNputg==", + "version": "1.4.11", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.4.11.tgz", + "integrity": "sha512-vYQwzJvm/iu052d5Iw27UFALIN5xSrGkPZXxLNMHPySVko2QMNNBv35HLatkEQHbQ3X+VKSW9J9SkdtAvAVRAQ==", "cpu": [ "arm64" ], @@ -3179,9 +3304,9 @@ } }, "node_modules/@swc/core-linux-arm64-musl": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.4.0.tgz", - "integrity": "sha512-/k3TDvpBRMDNskHooNN1KqwUhcwkfBlIYxRTnJvsfT2C7My4pffR+4KXmt0IKynlTTbCdlU/4jgX4801FSuliw==", + "version": "1.4.11", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.4.11.tgz", + "integrity": "sha512-eV+KduiRYUFjPsvbZuJ9aknQH9Tj0U2/G9oIZSzLx/18WsYi+upzHbgxmIIHJ2VJgfd7nN40RI/hMtxNsUzR/g==", "cpu": [ "arm64" ], @@ -3195,9 +3320,9 @@ } }, "node_modules/@swc/core-linux-x64-gnu": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.4.0.tgz", - "integrity": "sha512-GYsTMvNt5+WTVlwwQzOOWsPMw6P/F41u5PGHWmfev8Nd4QJ1h3rWPySKk4mV42IJwH9MgQCVSl3ygwNqwl6kFg==", + "version": "1.4.11", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.4.11.tgz", + "integrity": "sha512-WA1iGXZ2HpqM1OR9VCQZJ8sQ1KP2or9O4bO8vWZo6HZJIeoQSo7aa9waaCLRpkZvkng1ct/TF/l6ymqSNFXIzQ==", "cpu": [ "x64" ], @@ -3211,9 +3336,9 @@ } }, "node_modules/@swc/core-linux-x64-musl": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.4.0.tgz", - "integrity": "sha512-jGVPdM/VwF7kK/uYRW5N6FwzKf/FnDjGIR3RPvQokjYJy7Auk+3Oj21C0Jev7sIT9RYnO/TrFEoEozKeD/z2Qw==", + "version": "1.4.11", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.4.11.tgz", + "integrity": "sha512-UkVJToKf0owwQYRnGvjHAeYVDfeimCEcx0VQSbJoN7Iy0ckRZi7YPlmWJU31xtKvikE2bQWCOVe0qbSDqqcWXA==", "cpu": [ "x64" ], @@ -3227,9 +3352,9 @@ } }, "node_modules/@swc/core-win32-arm64-msvc": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.4.0.tgz", - "integrity": "sha512-biHYm1AronEKlt47O/H8sSOBM2BKXMmWT+ApvlxUw50m1RGNnVnE0bgY7tylFuuSiWyXsQPJbmUV708JqORXVg==", + "version": "1.4.11", + "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.4.11.tgz", + "integrity": "sha512-35khwkyly7lF5NDSyvIrukBMzxPorgc5iTSDfVO/LvnmN5+fm4lTlrDr4tUfTdOhv3Emy7CsKlsNAeFRJ+Pm+w==", "cpu": [ "arm64" ], @@ -3243,9 +3368,9 @@ } }, "node_modules/@swc/core-win32-ia32-msvc": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.4.0.tgz", - "integrity": "sha512-TL5L2tFQb19kJwv6+elToGBj74QXCn9j+hZfwQatvZEJRA5rDK16eH6oAE751dGUArhnWlW3Vj65hViPvTuycw==", + "version": "1.4.11", + "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.4.11.tgz", + "integrity": "sha512-Wx8/6f0ufgQF2pbVPsJ2dAmFLwIOW+xBE5fxnb7VnEbGkTgP1qMDWiiAtD9rtvDSuODG3i1AEmAak/2HAc6i6A==", "cpu": [ "ia32" ], @@ -3259,9 +3384,9 @@ } }, "node_modules/@swc/core-win32-x64-msvc": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.4.0.tgz", - "integrity": "sha512-e2xVezU7XZ2Stzn4i7TOQe2Kn84oYdG0M3A7XI7oTdcpsKCcKwgiMoroiAhqCv+iN20KNqhnWwJiUiTj/qN5AA==", + "version": "1.4.11", + "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.4.11.tgz", + "integrity": "sha512-0xRFW6K9UZQH2NVC/0pVB0GJXS45lY24f+6XaPBF1YnMHd8A8GoHl7ugyM5yNUTe2AKhSgk5fJV00EJt/XBtdQ==", "cpu": [ "x64" ], @@ -3281,10 +3406,13 @@ "dev": true }, "node_modules/@swc/types": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.5.tgz", - "integrity": "sha512-myfUej5naTBWnqOCc/MdVOLVjXUXtIA+NpDrDBKJtLLg2shUjBu3cZmB/85RyitKc55+lUUyl7oRfLOvkr2hsw==", - "dev": true + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.6.tgz", + "integrity": "sha512-/JLo/l2JsT/LRd80C3HfbmVpxOAJ11FO2RCEslFrgzLltoP9j8XIbsyDcfCt2WWyX+CM96rBoNM+IToAkFOugg==", + "dev": true, + "dependencies": { + "@swc/counter": "^0.1.3" + } }, "node_modules/@szmarczak/http-timer": { "version": "4.0.6", @@ -3343,9 +3471,9 @@ } }, "node_modules/@tailwindcss/typography": { - "version": "0.5.10", - "resolved": "https://registry.npmjs.org/@tailwindcss/typography/-/typography-0.5.10.tgz", - "integrity": "sha512-Pe8BuPJQJd3FfRnm6H0ulKIGoMEQS+Vq01R6M5aCrFB/ccR/shT+0kXLjouGC1gFLm9hopTFN+DMP0pfwRWzPw==", + "version": "0.5.12", + "resolved": "https://registry.npmjs.org/@tailwindcss/typography/-/typography-0.5.12.tgz", + "integrity": "sha512-CNwpBpconcP7ppxmuq3qvaCxiRWnbhANpY/ruH4L5qs2GCiVDJXde/pjj2HWPV1+Q4G9+V/etrwUYopdcjAlyg==", "dev": true, "dependencies": { "lodash.castarray": "^4.4.0", @@ -3358,11 +3486,11 @@ } }, "node_modules/@tanstack/react-virtual": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@tanstack/react-virtual/-/react-virtual-3.0.4.tgz", - "integrity": "sha512-tiqKW/e2MJVCr7/pRUXulpkyxllaOclkHNfhKTo4pmHjJIqnhMfwIjc1Q1R0Un3PI3kQywywu/791c8z9u0qeA==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@tanstack/react-virtual/-/react-virtual-3.2.0.tgz", + "integrity": "sha512-OEdMByf2hEfDa6XDbGlZN8qO6bTjlNKqjM3im9JG+u3mCL8jALy0T/67oDI001raUUPh1Bdmfn4ZvPOV5knpcg==", "dependencies": { - "@tanstack/virtual-core": "3.0.0" + "@tanstack/virtual-core": "3.2.0" }, "funding": { "type": "github", @@ -3374,9 +3502,9 @@ } }, "node_modules/@tanstack/virtual-core": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.0.0.tgz", - "integrity": "sha512-SYXOBTjJb05rXa2vl55TTwO40A6wKu0R5i1qQwhJYNDIqaIGF7D0HsLw+pJAyi2OvntlEIVusx3xtbbgSUi6zg==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.2.0.tgz", + "integrity": "sha512-P5XgYoAw/vfW65byBbJQCw+cagdXDT/qH6wmABiLt4v4YBT2q2vqCOhihe+D1Nt325F/S/0Tkv6C5z0Lv+VBQQ==", "funding": { "type": "github", "url": "https://github.com/sponsors/tannerlinsley" @@ -3402,6 +3530,82 @@ "node": ">=14" } }, + "node_modules/@testing-library/dom/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "peer": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@testing-library/dom/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "peer": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@testing-library/dom/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "peer": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/@testing-library/dom/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, + "peer": true + }, + "node_modules/@testing-library/dom/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "peer": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@testing-library/dom/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "peer": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/@testing-library/jest-dom": { "version": "5.17.0", "resolved": "https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-5.17.0.tgz", @@ -3424,6 +3628,21 @@ "yarn": ">=1" } }, + "node_modules/@testing-library/jest-dom/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, "node_modules/@testing-library/jest-dom/node_modules/chalk": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz", @@ -3437,6 +3656,45 @@ "node": ">=8" } }, + "node_modules/@testing-library/jest-dom/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/@testing-library/jest-dom/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/@testing-library/jest-dom/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@testing-library/jest-dom/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/@testing-library/react": { "version": "13.4.0", "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-13.4.0.tgz", @@ -3474,6 +3732,76 @@ "node": ">=12" } }, + "node_modules/@testing-library/react/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@testing-library/react/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@testing-library/react/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/@testing-library/react/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/@testing-library/react/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@testing-library/react/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/@testing-library/user-event": { "version": "13.5.0", "resolved": "https://registry.npmjs.org/@testing-library/user-event/-/user-event-13.5.0.tgz", @@ -3693,9 +4021,9 @@ } }, "node_modules/@types/d3-hierarchy": { - "version": "3.1.6", - "resolved": "https://registry.npmjs.org/@types/d3-hierarchy/-/d3-hierarchy-3.1.6.tgz", - "integrity": "sha512-qlmD/8aMk5xGorUvTUWHCiumvgaUXYldYjNVOWtYoTYY/L+WwIEAmJxUmTgr9LoGNG0PPAOmqMDJVDPc7DOpPw==" + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/@types/d3-hierarchy/-/d3-hierarchy-3.1.7.tgz", + "integrity": "sha512-tJFtNoYBtRtkNysX1Xq4sxtjK8YgoWUNpIiUee0/jHGRwqvzYxkq0hGVbbOGSz+JgFxxRu4K8nb3YpG3CMARtg==" }, "node_modules/@types/d3-interpolate": { "version": "3.0.4", @@ -3706,9 +4034,9 @@ } }, "node_modules/@types/d3-path": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-3.0.2.tgz", - "integrity": "sha512-WAIEVlOCdd/NKRYTsqCpOMHQHemKBEINf8YXMYOtXH0GA7SY0dqMB78P3Uhgfy+4X+/Mlw2wDtlETkN6kQUCMA==" + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-3.1.0.tgz", + "integrity": "sha512-P2dlU/q51fkOc/Gfl3Ul9kicV7l+ra934qBFXCFhrZMOL6du1TM0pm1ThYvENukyOn5h9v+yMJ9Fn5JK4QozrQ==" }, "node_modules/@types/d3-polygon": { "version": "3.0.2", @@ -3849,9 +4177,9 @@ } }, "node_modules/@types/lodash": { - "version": "4.14.202", - "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.202.tgz", - "integrity": "sha512-OvlIYQK9tNneDlS0VN54LLd5uiPCBOp7gS5Z0f1mjoJYBrtStzgmJBxONW3U6OZqdtNzZPmn9BS/7WI7BFFcFQ==", + "version": "4.17.0", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.0.tgz", + "integrity": "sha512-t7dhREVv6dbNj0q17X12j7yDG4bD/DHYX7o5/DbDxobP0HnGPgpRz2Ej77aL7TZT3DSw13fqUTj8J4mMnqa7WA==", "dev": true }, "node_modules/@types/mathjax": { @@ -3873,48 +4201,34 @@ "integrity": "sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==" }, "node_modules/@types/node": { - "version": "16.18.79", - "resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.79.tgz", - "integrity": "sha512-Qd7jdLR5zmnIyMhfDrfPqN5tUCvreVpP3Qrf2oSM+F7SNzlb/MwHISGUkdFHtevfkPJ3iAGyeQI/jsbh9EStgQ==", + "version": "16.18.93", + "resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.93.tgz", + "integrity": "sha512-epWuohp6c0bQt0j3RYCiP9x52axHVn+CjS1Rx1VjPwF+ySg8lrigH3yXGs88XqnA+jGM2qnSMuFTsBxft+hO1Q==", "devOptional": true }, - "node_modules/@types/parse-json": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.2.tgz", - "integrity": "sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==" - }, "node_modules/@types/prop-types": { - "version": "15.7.11", - "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.11.tgz", - "integrity": "sha512-ga8y9v9uyeiLdpKddhxYQkxNDrfvuPrlFb0N1qnZZByvcElJaXthF1UhvCh9TLWJBEHeNtdnbysW7Y6Uq8CVng==" + "version": "15.7.12", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.12.tgz", + "integrity": "sha512-5zvhXYtRNRluoE/jAp4GVsSduVUzNWKkOZrCDBWYtE7biZywwdC2AcEzg+cSMLFRfVgeAFqpfNabiPjxFddV1Q==" }, "node_modules/@types/react": { - "version": "18.2.55", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.55.tgz", - "integrity": "sha512-Y2Tz5P4yz23brwm2d7jNon39qoAtMMmalOQv6+fEFt1mT+FcM3D841wDpoUvFXhaYenuROCy3FZYqdTjM7qVyA==", + "version": "18.2.73", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.73.tgz", + "integrity": "sha512-XcGdod0Jjv84HOC7N5ziY3x+qL0AfmubvKOZ9hJjJ2yd5EE+KYjWhdOjt387e9HPheHkdggF9atTifMRtyAaRA==", "dependencies": { "@types/prop-types": "*", - "@types/scheduler": "*", "csstype": "^3.0.2" } }, "node_modules/@types/react-dom": { - "version": "18.2.18", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.18.tgz", - "integrity": "sha512-TJxDm6OfAX2KJWJdMEVTwWke5Sc/E/RlnPGvGfS0W7+6ocy2xhDVQVh/KvC2Uf7kACs+gDytdusDSdWfWkaNzw==", + "version": "18.2.23", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.23.tgz", + "integrity": "sha512-ZQ71wgGOTmDYpnav2knkjr3qXdAFu0vsk8Ci5w3pGAIdj7/kKAyn+VsQDhXsmzzzepAiI9leWMmubXz690AI/A==", "devOptional": true, "dependencies": { "@types/react": "*" } }, - "node_modules/@types/react-transition-group": { - "version": "4.4.10", - "resolved": "https://registry.npmjs.org/@types/react-transition-group/-/react-transition-group-4.4.10.tgz", - "integrity": "sha512-hT/+s0VQs2ojCX823m60m5f0sL5idt9SO6Tj6Dg+rdphGPIeJbJ6CxvBYkgkGKrYeDjvIpKTR38UzmtHJOGW3Q==", - "dependencies": { - "@types/react": "*" - } - }, "node_modules/@types/responselike": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.3.tgz", @@ -3924,11 +4238,6 @@ "@types/node": "*" } }, - "node_modules/@types/scheduler": { - "version": "0.16.8", - "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.8.tgz", - "integrity": "sha512-WZLiwShhwLRmeV6zH+GkbOFT6Z6VklCItrDioxUnv+u4Ll+8vKeFySoFyK/0ctcRpOmwAicELfmys1sDc/Rw+A==" - }, "node_modules/@types/testing-library__jest-dom": { "version": "5.14.9", "resolved": "https://registry.npmjs.org/@types/testing-library__jest-dom/-/testing-library__jest-dom-5.14.9.tgz", @@ -3949,6 +4258,12 @@ "integrity": "sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==", "dev": true }, + "node_modules/@ungap/structured-clone": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", + "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", + "dev": true + }, "node_modules/@vitejs/plugin-react-swc": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-3.6.0.tgz", @@ -3967,15 +4282,10 @@ "integrity": "sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA==", "deprecated": "Use your platform's native atob() and btoa() methods instead" }, - "node_modules/accordion": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/accordion/-/accordion-3.0.2.tgz", - "integrity": "sha512-jbQfFaw+57OBwPt7qSNHuW+RA8smmRwkWRS1Ozh6K/QxUspBgBV/LpdSzlY7vee8TomS6j3D33B9rIeH1qMwsA==" - }, "node_modules/ace-builds": { - "version": "1.32.5", - "resolved": "https://registry.npmjs.org/ace-builds/-/ace-builds-1.32.5.tgz", - "integrity": "sha512-Mrh+qTitOomv1kANaRHO6jpxq6j8BzN9Duwb6pmpuB2sAsfv7MHH9j3LpRHS388uid2GisauC682bemmkdNXKg==" + "version": "1.32.9", + "resolved": "https://registry.npmjs.org/ace-builds/-/ace-builds-1.32.9.tgz", + "integrity": "sha512-dqBLPj//Gq0b92YUtRIsdWsORf4J+4xW3r8/4Wr2Vqid7O1j7YBV/ZsVvWBjZFy+EnvMCRFCFOEIM1cbt4BQ/g==" }, "node_modules/acorn": { "version": "8.11.3", @@ -3997,6 +4307,15 @@ "acorn-walk": "^8.0.2" } }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, "node_modules/acorn-walk": { "version": "8.3.2", "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.2.tgz", @@ -4005,11 +4324,6 @@ "node": ">=0.4.0" } }, - "node_modules/add": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/add/-/add-2.0.6.tgz", - "integrity": "sha512-j5QzrmsokwWWp6kUcJQySpbG+xfOBqqKnup3OIk1pz+kB/80SLorZ9V8zHFLO92Lcd+hbvq8bT+zOGoPkmBV0Q==" - }, "node_modules/agent-base": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", @@ -4022,13 +4336,14 @@ } }, "node_modules/ajv": { - "version": "8.12.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", - "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, "dependencies": { "fast-deep-equal": "^3.1.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" }, "funding": { @@ -4045,17 +4360,14 @@ } }, "node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", "dependencies": { - "color-convert": "^2.0.1" + "color-convert": "^1.9.0" }, "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" + "node": ">=4" } }, "node_modules/ansi-to-html": { @@ -4120,9 +4432,9 @@ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, "node_modules/aria-hidden": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.3.tgz", - "integrity": "sha512-xcLxITLe2HYa1cnYnwCjkOO1PqUHQpozB8x9AR0OgWN2woOBi5kSDVxKfd0b7sb1hw5qFeJhXm9H1nu3xSfLeQ==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.4.tgz", + "integrity": "sha512-y+CcFFwelSXpLZk/7fMB2mUbGtX9lKycf1MWJ7CaTIERyitVlyQx6C+sxcROU2BAJ24OiZyK+8wj2i8AlBoS3A==", "dependencies": { "tslib": "^2.0.0" }, @@ -4155,23 +4467,15 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/astral-regex": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", - "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", - "engines": { - "node": ">=8" - } - }, "node_modules/asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "node_modules/autoprefixer": { - "version": "10.4.17", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.17.tgz", - "integrity": "sha512-/cpVNRLSfhOtcGflT13P2794gVSgmPgTR+erw5ifnMLZb0UnSlkK4tquLmkd3BhA+nLo5tX8Cu0upUsGKvKbmg==", + "version": "10.4.19", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.19.tgz", + "integrity": "sha512-BaENR2+zBZ8xXhM4pUaKUxlVdxZ0EZhjvbopwnXmxRUfqDmwSpC2lAi/QXvx7NRdPCo1WKEcEF6mV64si1z4Ew==", "dev": true, "funding": [ { @@ -4188,8 +4492,8 @@ } ], "dependencies": { - "browserslist": "^4.22.2", - "caniuse-lite": "^1.0.30001578", + "browserslist": "^4.23.0", + "caniuse-lite": "^1.0.30001599", "fraction.js": "^4.3.7", "normalize-range": "^0.1.2", "picocolors": "^1.0.0", @@ -4206,10 +4510,13 @@ } }, "node_modules/available-typed-arrays": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.6.tgz", - "integrity": "sha512-j1QzY8iPNPG4o4xmO3ptzpRxTciqD3MgEHtifP/YnJpIo58Xu+ne4BejlbkuaLfXn/nz6HFiw29bLpj2PNMdGg==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", "dev": true, + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, "engines": { "node": ">= 0.4" }, @@ -4218,29 +4525,15 @@ } }, "node_modules/axios": { - "version": "1.6.7", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.7.tgz", - "integrity": "sha512-/hDJGff6/c7u0hDkvkGxR/oy6CbCs8ziCsC7SqmhjfozqiJGc8Z11wrv9z9lYfY4K8l+H9TpjcMDX0xOZmx+RA==", + "version": "1.6.8", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.8.tgz", + "integrity": "sha512-v/ZHtJDU39mDpyBoFVkETcd/uNdxrWRrg3bKpOKzXFA6Bvqopts6ALSMU3y6ijYxbw2B+wPrIv46egTzJXCLGQ==", "dependencies": { - "follow-redirects": "^1.15.4", + "follow-redirects": "^1.15.6", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" } }, - "node_modules/babel-plugin-macros": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz", - "integrity": "sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg==", - "dependencies": { - "@babel/runtime": "^7.12.5", - "cosmiconfig": "^7.0.0", - "resolve": "^1.19.0" - }, - "engines": { - "node": ">=10", - "npm": ">=6" - } - }, "node_modules/bail": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz", @@ -4320,20 +4613,39 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/bin-version/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "node_modules/bin-version-check/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "dev": true, "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" + "yallist": "^4.0.0" }, "engines": { - "node": ">= 8" + "node": ">=10" } }, + "node_modules/bin-version-check/node_modules/semver": { + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/bin-version-check/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, "node_modules/bin-version/node_modules/execa": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", @@ -4393,57 +4705,15 @@ "node": ">=8" } }, - "node_modules/bin-version/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/bin-version/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/bin-version/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/bin-version/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/binary-extensions": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", - "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", "engines": { "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/bl": { @@ -4476,9 +4746,9 @@ } }, "node_modules/browserslist": { - "version": "4.22.3", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.22.3.tgz", - "integrity": "sha512-UAp55yfwNv0klWNapjs/ktHoguxuQNGnOzxYmfnXIS+8AsRDZkSDxg7R1AX3GKzn078SBI5dzwzj/Yx0Or0e3A==", + "version": "4.23.0", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.0.tgz", + "integrity": "sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ==", "funding": [ { "type": "opencollective", @@ -4494,8 +4764,8 @@ } ], "dependencies": { - "caniuse-lite": "^1.0.30001580", - "electron-to-chromium": "^1.4.648", + "caniuse-lite": "^1.0.30001587", + "electron-to-chromium": "^1.4.668", "node-releases": "^2.0.14", "update-browserslist-db": "^1.0.13" }, @@ -4572,15 +4842,16 @@ } }, "node_modules/call-bind": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.6.tgz", - "integrity": "sha512-Mj50FLHtlsoVfRfnHaZvyrooHcrlceNZdL/QBvJJVd9Ta55qCQK0gs4ss2oZDeV9zFCs6ewzYgVE5yfVmfFpVg==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", "dev": true, "dependencies": { + "es-define-property": "^1.0.0", "es-errors": "^1.3.0", "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.3", - "set-function-length": "^1.2.0" + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" }, "engines": { "node": ">= 0.4" @@ -4617,9 +4888,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001584", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001584.tgz", - "integrity": "sha512-LOz7CCQ9M1G7OjJOF9/mzmqmj3jE/7VOmrfw6Mgs0E8cjOsbRXQJHsPBfmBOXDskXKrHLyyW3n7kpDW/4BsfpQ==", + "version": "1.0.30001603", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001603.tgz", + "integrity": "sha512-iL2iSS0eDILMb9n5yKQoTBim9jMZ0Yrk8g0N9K7UzYyWnfIKzXBZD5ngpM37ZcL/cv0Mli8XtVMRYMQAfFpi5Q==", "funding": [ { "type": "opencollective", @@ -4645,19 +4916,16 @@ } }, "node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dev": true, + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" }, "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" + "node": ">=4" } }, "node_modules/character-entities": { @@ -4688,15 +4956,9 @@ } }, "node_modules/chokidar": { - "version": "3.5.3", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", - "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", - "funding": [ - { - "type": "individual", - "url": "https://paulmillr.com/funding/" - } - ], + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", "dependencies": { "anymatch": "~3.1.2", "braces": "~3.0.2", @@ -4709,10 +4971,24 @@ "engines": { "node": ">= 8.10.0" }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, "optionalDependencies": { "fsevents": "~2.3.2" } }, + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/class-variance-authority": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/class-variance-authority/-/class-variance-authority-0.6.1.tgz", @@ -4729,11 +5005,6 @@ "resolved": "https://registry.npmjs.org/classcat/-/classcat-5.0.4.tgz", "integrity": "sha512-sbpkOw6z413p+HDGcBENe498WM9woqWHiJxCq7nvmxe9WmrUmqfAcxpIwAiMtM5Q3AhYkzXcNQHqsWq0mND51g==" }, - "node_modules/classnames": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.5.1.tgz", - "integrity": "sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==" - }, "node_modules/cli-cursor": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-4.0.0.tgz", @@ -4792,26 +5063,36 @@ "node": ">=6" } }, + "node_modules/cmdk": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/cmdk/-/cmdk-1.0.0.tgz", + "integrity": "sha512-gDzVf0a09TvoJ5jnuPvygTB77+XdOSwEmJ88L6XPFPlv7T3RxbP9jgenfylrAMD0+Le1aO0nVjQUzl2g+vjz5Q==", + "dependencies": { + "@radix-ui/react-dialog": "1.0.5", + "@radix-ui/react-primitive": "1.0.3" + }, + "peerDependencies": { + "react": "^18.0.0", + "react-dom": "^18.0.0" + } + }, "node_modules/code-block-writer": { "version": "12.0.0", "resolved": "https://registry.npmjs.org/code-block-writer/-/code-block-writer-12.0.0.tgz", "integrity": "sha512-q4dMFMlXtKR3XNBHyMHt/3pwYNA69EDk00lloMOaaUMKPUXBw6lpXtbu3MMVG6/uOihGnRDOlkyqsONEUj60+w==" }, "node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" + "color-name": "1.1.3" } }, "node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" }, "node_modules/combined-stream": { "version": "1.0.8", @@ -4842,6 +5123,12 @@ "node": ">= 10" } }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, "node_modules/content-disposition": { "version": "0.5.4", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", @@ -4855,9 +5142,9 @@ } }, "node_modules/convert-source-map": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", - "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==" + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==" }, "node_modules/cookie": { "version": "0.4.2", @@ -4868,29 +5155,41 @@ } }, "node_modules/cosmiconfig": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.1.0.tgz", - "integrity": "sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA==", + "version": "8.3.6", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", + "integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==", "dependencies": { - "@types/parse-json": "^4.0.0", - "import-fresh": "^3.2.1", - "parse-json": "^5.0.0", - "path-type": "^4.0.0", - "yaml": "^1.10.0" + "import-fresh": "^3.3.0", + "js-yaml": "^4.1.0", + "parse-json": "^5.2.0", + "path-type": "^4.0.0" }, "engines": { - "node": ">=10" + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/d-fischer" + }, + "peerDependencies": { + "typescript": ">=4.9.5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } } }, "node_modules/cross-spawn": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", - "integrity": "sha512-pTgQJ5KC0d2hcY8eyL1IzlBPYjTkyH72XRZPnLyKus2mBfNjQs3klqbJU2VILqZryAZUt9JOb3h/mWMy23/f5A==", - "dev": true, + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", "dependencies": { - "lru-cache": "^4.0.1", - "shebang-command": "^1.2.0", - "which": "^1.2.9" + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" } }, "node_modules/css-selector-tokenizer": { @@ -5052,9 +5351,9 @@ } }, "node_modules/daisyui": { - "version": "4.6.1", - "resolved": "https://registry.npmjs.org/daisyui/-/daisyui-4.6.1.tgz", - "integrity": "sha512-IXI8ypN/hkl1AKsag1XPlWt0wfvL4NedTUtUkv/VFP5q/xDbBZrZthq3/9M2yU1egcbbLhp01rluIz0GICUc+g==", + "version": "4.9.0", + "resolved": "https://registry.npmjs.org/daisyui/-/daisyui-4.9.0.tgz", + "integrity": "sha512-9JsDx4E+30kPxThE+6yEwQokqg1957uwTx/skP2RE98fG6Ten6U+S9YXeQg1a3CI958aF5aOb0oEA+KZFfrZUA==", "dev": true, "dependencies": { "css-selector-tokenizer": "^0.8", @@ -5091,6 +5390,29 @@ "node": ">=12" } }, + "node_modules/data-urls/node_modules/tr46": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-3.0.0.tgz", + "integrity": "sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==", + "dependencies": { + "punycode": "^2.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/data-urls/node_modules/whatwg-url": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-11.0.0.tgz", + "integrity": "sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==", + "dependencies": { + "tr46": "^3.0.0", + "webidl-conversions": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, "node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -5192,6 +5514,12 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, "node_modules/defaults": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.4.tgz", @@ -5213,18 +5541,20 @@ } }, "node_modules/define-data-property": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.2.tgz", - "integrity": "sha512-SRtsSqsDbgpJBbW3pABMCOt6rQyeM8s8RiyeSN8jYG8sYmt/kGJejbydttUsnDs1tadr19tvhT4ShwMyoqAm4g==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", "dev": true, "dependencies": { + "es-define-property": "^1.0.0", "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.2", - "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.1" + "gopd": "^1.0.1" }, "engines": { "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/define-properties": { @@ -5271,9 +5601,9 @@ "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==" }, "node_modules/diff": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-5.1.0.tgz", - "integrity": "sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz", + "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==", "engines": { "node": ">=0.3.1" } @@ -5297,21 +5627,24 @@ "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==" }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, "node_modules/dom-accessibility-api": { "version": "0.5.16", "resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz", "integrity": "sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==", "dev": true }, - "node_modules/dom-helpers": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-5.2.1.tgz", - "integrity": "sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==", - "dependencies": { - "@babel/runtime": "^7.8.7", - "csstype": "^3.0.2" - } - }, "node_modules/domexception": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/domexception/-/domexception-4.0.0.tgz", @@ -5325,9 +5658,9 @@ } }, "node_modules/dompurify": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.0.8.tgz", - "integrity": "sha512-b7uwreMYL2eZhrSCRC4ahLTeZcPZxSmYfmcQGXGkXiZSNW1X85v+SDM5KsWcpivIiUBH47Ji7NtyUdpLeF5JZQ==" + "version": "3.0.11", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.0.11.tgz", + "integrity": "sha512-Fan4uMuyB26gFV3ovPoEoQbxRRPfTu3CvImyZnhGq5fsIEO+gEFLp45ISFt+kQBWsK5ulDdT0oV28jS1UrwQLg==" }, "node_modules/dot-case": { "version": "3.0.4", @@ -5344,14 +5677,14 @@ "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" }, "node_modules/electron-to-chromium": { - "version": "1.4.657", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.657.tgz", - "integrity": "sha512-On2ymeleg6QbRuDk7wNgDdXtNqlJLM2w4Agx1D/RiTmItiL+a9oq5p7HUa2ZtkAtGBe/kil2dq/7rPfkbe0r5w==" + "version": "1.4.723", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.723.tgz", + "integrity": "sha512-rxFVtrMGMFROr4qqU6n95rUi9IlfIm+lIAt+hOToy/9r6CDv0XiEcQdC3VP71y1pE5CFTzKV0RvxOGYCPWWHPw==" }, "node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" }, "node_modules/end-of-stream": { "version": "1.4.4", @@ -5378,6 +5711,18 @@ "is-arrayish": "^0.2.1" } }, + "node_modules/es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/es-errors": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", @@ -5452,14 +5797,11 @@ } }, "node_modules/escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">=0.8.0" } }, "node_modules/escodegen": { @@ -5491,6 +5833,293 @@ "node": ">=0.10.0" } }, + "node_modules/eslint": { + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz", + "integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.57.0", + "@humanwhocodes/config-array": "^0.11.14", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "graphemer": "^1.4.0", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3", + "strip-ansi": "^6.0.1", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-plugin-es": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-3.0.1.tgz", + "integrity": "sha512-GUmAsJaN4Fc7Gbtl8uOBlayo2DqhwWvEzykMHSCZHU3XdJ+NSzzZcVhXh3VxX5icqQ+oQdIEawXX8xkR3mIFmQ==", + "dev": true, + "dependencies": { + "eslint-utils": "^2.0.0", + "regexpp": "^3.0.0" + }, + "engines": { + "node": ">=8.10.0" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + }, + "peerDependencies": { + "eslint": ">=4.19.1" + } + }, + "node_modules/eslint-plugin-node": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz", + "integrity": "sha512-oUwtPJ1W0SKD0Tr+wqu92c5xuCeQqB3hSCHasn/ZgjFdA9iDGNkNf2Zi9ztY7X+hNuMib23LNGRm6+uN+KLE3g==", + "dev": true, + "dependencies": { + "eslint-plugin-es": "^3.0.0", + "eslint-utils": "^2.0.0", + "ignore": "^5.1.1", + "minimatch": "^3.0.4", + "resolve": "^1.10.1", + "semver": "^6.1.0" + }, + "engines": { + "node": ">=8.10.0" + }, + "peerDependencies": { + "eslint": ">=5.16.0" + } + }, + "node_modules/eslint-plugin-node/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/eslint-plugin-node/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/eslint-scope": { + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", + "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", + "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^1.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + } + }, + "node_modules/eslint-utils/node_modules/eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/eslint/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/eslint/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/eslint/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/eslint/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/eslint/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint/node_modules/globals": { + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", + "dev": true, + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/eslint/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/eslint/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/esm": { "version": "3.2.25", "resolved": "https://registry.npmjs.org/esm/-/esm-3.2.25.tgz", @@ -5499,6 +6128,23 @@ "node": ">=6" } }, + "node_modules/espree": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", + "dev": true, + "dependencies": { + "acorn": "^8.9.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, "node_modules/esprima": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", @@ -5511,6 +6157,30 @@ "node": ">=4" } }, + "node_modules/esquery": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", + "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", + "dev": true, + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, "node_modules/estraverse": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", @@ -5550,6 +6220,66 @@ "node": ">=4" } }, + "node_modules/execa/node_modules/cross-spawn": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", + "integrity": "sha512-pTgQJ5KC0d2hcY8eyL1IzlBPYjTkyH72XRZPnLyKus2mBfNjQs3klqbJU2VILqZryAZUt9JOb3h/mWMy23/f5A==", + "dev": true, + "dependencies": { + "lru-cache": "^4.0.1", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "node_modules/execa/node_modules/lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "dev": true, + "dependencies": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + }, + "node_modules/execa/node_modules/shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", + "dev": true, + "dependencies": { + "shebang-regex": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/execa/node_modules/shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/execa/node_modules/which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + }, + "node_modules/execa/node_modules/yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A==", + "dev": true + }, "node_modules/executable": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/executable/-/executable-4.1.1.tgz", @@ -5595,7 +6325,8 @@ "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true }, "node_modules/fast-glob": { "version": "3.3.2", @@ -5612,6 +6343,29 @@ "node": ">=8.6.0" } }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true + }, "node_modules/fastparse": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/fastparse/-/fastparse-1.1.2.tgz", @@ -5660,6 +6414,18 @@ "node": "^12.20 || >= 14.13" } }, + "node_modules/file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dev": true, + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, "node_modules/file-type": { "version": "17.1.6", "resolved": "https://registry.npmjs.org/file-type/-/file-type-17.1.6.tgz", @@ -5717,22 +6483,20 @@ "node": ">=8" } }, - "node_modules/find-root": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/find-root/-/find-root-1.1.0.tgz", - "integrity": "sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng==" - }, "node_modules/find-up": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", - "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", "dev": true, "dependencies": { - "locate-path": "^5.0.0", + "locate-path": "^6.0.0", "path-exists": "^4.0.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/find-versions": { @@ -5750,10 +6514,30 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/flat-cache": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", + "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==", + "dev": true, + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.3", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/flatted": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz", + "integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==", + "dev": true + }, "node_modules/follow-redirects": { - "version": "1.15.5", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz", - "integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==", + "version": "1.15.6", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", + "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", "funding": [ { "type": "individual", @@ -5793,46 +6577,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/foreground-child/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/foreground-child/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "engines": { - "node": ">=8" - } - }, - "node_modules/foreground-child/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/foreground-child/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "engines": { - "node": ">=8" - } - }, "node_modules/foreground-child/node_modules/signal-exit": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", @@ -5844,20 +6588,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/foreground-child/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/form-data": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", @@ -5903,6 +6633,30 @@ "url": "https://github.com/sponsors/rawify" } }, + "node_modules/framer-motion": { + "version": "11.0.24", + "resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-11.0.24.tgz", + "integrity": "sha512-l2iM8NR53qtcujgAqYvGPJJGModPNWEVUaATRDLfnaLvUoFpImovBm0AHalSSsY8tW6knP8mfJTW4WYGbnAe4w==", + "dependencies": { + "tslib": "^2.4.0" + }, + "peerDependencies": { + "@emotion/is-prop-valid": "*", + "react": "^18.0.0", + "react-dom": "^18.0.0" + }, + "peerDependenciesMeta": { + "@emotion/is-prop-valid": { + "optional": true + }, + "react": { + "optional": true + }, + "react-dom": { + "optional": true + } + } + }, "node_modules/fs-extra": { "version": "11.2.0", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.2.0.tgz", @@ -5916,6 +6670,12 @@ "node": ">=14.14" } }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, "node_modules/fsevents": { "version": "2.3.2", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", @@ -5991,35 +6751,56 @@ } }, "node_modules/glob": { - "version": "10.3.10", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.10.tgz", - "integrity": "sha512-fa46+tv1Ak0UPK1TOy/pZrIybNNt4HCv7SDzwyfiOZkvZLEbjsZkJBPtDHVshZjbecAoAGSC20MjLDG/qr679g==", + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^2.3.5", - "minimatch": "^9.0.1", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", - "path-scurry": "^1.10.1" - }, - "bin": { - "glob": "dist/esm/bin.mjs" + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" }, "engines": { - "node": ">=16 || 14 >=14.17" + "node": "*" }, "funding": { "url": "https://github.com/sponsors/isaacs" } }, "node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", "dependencies": { - "is-glob": "^4.0.1" + "is-glob": "^4.0.3" }, "engines": { - "node": ">= 6" + "node": ">=10.13.0" + } + }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" } }, "node_modules/globals": { @@ -6072,6 +6853,12 @@ "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true + }, "node_modules/has-bigints": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", @@ -6082,30 +6869,29 @@ } }, "node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true, + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", "engines": { - "node": ">=8" + "node": ">=4" } }, "node_modules/has-property-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.1.tgz", - "integrity": "sha512-VsX8eaIewvas0xnvinAe9bw4WfIeODpGYikiWYLH+dma0Jw6KHYqWiWfhQlgOVK8D6PvjubK5Uc4P0iIhIcNVg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", "dev": true, "dependencies": { - "get-intrinsic": "^1.2.2" + "es-define-property": "^1.0.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/has-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", - "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", + "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", "dev": true, "engines": { "node": ">= 0.4" @@ -6142,9 +6928,9 @@ } }, "node_modules/hasown": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz", - "integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", "dependencies": { "function-bind": "^1.1.2" }, @@ -6424,6 +7210,15 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, "node_modules/indent-string": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", @@ -6433,6 +7228,16 @@ "node": ">=8" } }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", @@ -6680,10 +7485,13 @@ } }, "node_modules/is-map": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.2.tgz", - "integrity": "sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", + "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", "dev": true, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -6711,6 +7519,15 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/is-plain-obj": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", @@ -6742,21 +7559,27 @@ } }, "node_modules/is-set": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.2.tgz", - "integrity": "sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", + "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", "dev": true, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-shared-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", - "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", + "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", "dev": true, "dependencies": { - "call-bind": "^1.0.2" + "call-bind": "^1.0.7" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -6813,22 +7636,28 @@ } }, "node_modules/is-weakmap": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.1.tgz", - "integrity": "sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", + "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", "dev": true, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-weakset": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.2.tgz", - "integrity": "sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.3.tgz", + "integrity": "sha512-LvIm3/KWzS9oRFHugab7d+M/GcBXuXX5xZkzPmN+NxihdQlZUQ4dWuSV1xR/sq6upL1TJEDrfBgRepHFdBtSNQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" + "call-bind": "^1.0.7", + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -6845,6 +7674,15 @@ "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" }, + "node_modules/isomorphic-fetch": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/isomorphic-fetch/-/isomorphic-fetch-3.0.0.tgz", + "integrity": "sha512-qvUtwJ3j6qwsF3jLxkZ72qCgjMysPzDfeV240JHiGZsANBYd+EEuu35v7dfrJ9Up0Ak07D7GGSkGhCHTqg/5wA==", + "dependencies": { + "node-fetch": "^2.6.1", + "whatwg-fetch": "^3.4.1" + } + }, "node_modules/jackspeak": { "version": "2.3.6", "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz", @@ -6877,6 +7715,76 @@ "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, + "node_modules/jest-diff/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-diff/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-diff/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-diff/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-diff/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-diff/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/jest-get-type": { "version": "27.5.1", "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.5.1.tgz", @@ -6901,6 +7809,76 @@ "node": "^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0" } }, + "node_modules/jest-matcher-utils/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-matcher-utils/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-matcher-utils/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-matcher-utils/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/jest-matcher-utils/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-matcher-utils/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/jiti": { "version": "1.21.0", "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.0.tgz", @@ -6969,6 +7947,29 @@ } } }, + "node_modules/jsdom/node_modules/tr46": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-3.0.0.tgz", + "integrity": "sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==", + "dependencies": { + "punycode": "^2.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/jsdom/node_modules/whatwg-url": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-11.0.0.tgz", + "integrity": "sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==", + "dependencies": { + "tr46": "^3.0.0", + "webidl-conversions": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, "node_modules/jsesc": { "version": "2.5.2", "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", @@ -6992,9 +7993,16 @@ "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==" }, "node_modules/json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true }, "node_modules/json5": { "version": "2.2.3", @@ -7019,9 +8027,9 @@ } }, "node_modules/katex": { - "version": "0.16.9", - "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.9.tgz", - "integrity": "sha512-fsSYjWS0EEOwvy81j3vRA8TEAhQhKiqO+FQaKWp0m39qwOzHVBgAUBIXWj1pB+O2W3fIpNa6Y9KSKCVbfPhyAQ==", + "version": "0.16.10", + "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.10.tgz", + "integrity": "sha512-ZiqaC04tp2O5utMsl2TEZTXxa6WSC4yo0fv5ML++D3QZv/vx2Mct0mTlRx3O+uUkjfuAgOkzsCmq5MiUEsDDdA==", "funding": [ "https://opencollective.com/katex", "https://github.com/sponsors/katex" @@ -7051,13 +8059,26 @@ } }, "node_modules/kleur": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", - "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz", + "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==", "engines": { "node": ">=6" } }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, "node_modules/lilconfig": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz", @@ -7072,15 +8093,18 @@ "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==" }, "node_modules/locate-path": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", - "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", "dev": true, "dependencies": { - "p-locate": "^4.1.0" + "p-locate": "^5.0.0" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/lodash": { @@ -7116,11 +8140,6 @@ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true }, - "node_modules/lodash.truncate": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/lodash.truncate/-/lodash.truncate-4.4.2.tgz", - "integrity": "sha512-jttmRe7bRse52OsWIMDLaXxWqRAmtIUccAQ3garviCqJjafXOfNMO0yMfNpdD6zbGaTU0P5Nz7e7gAT6cKmJRw==" - }, "node_modules/log-symbols": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-5.1.0.tgz", @@ -7198,19 +8217,17 @@ } }, "node_modules/lru-cache": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", - "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", - "dev": true, + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", "dependencies": { - "pseudomap": "^1.0.2", - "yallist": "^2.1.2" + "yallist": "^3.0.2" } }, "node_modules/lucide-react": { - "version": "0.233.0", - "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.233.0.tgz", - "integrity": "sha512-r0jMHF0vPDq2wBbZ0B3rtIcBjDyWDKpHu+vAjD2OHn2WLUr3HN5IHovtO0EMgQXuSI7YrMZbjsEZWC2uBHr8nQ==", + "version": "0.331.0", + "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.331.0.tgz", + "integrity": "sha512-CHFJ0ve9vaZ7bB2VRAl27SlX1ELh6pfNC0jS96qGpPEEzLkLDGq4pDBFU8RhOoRMqsjXqTzLm9U6bZ1OcIHq7Q==", "peerDependencies": { "react": "^16.5.1 || ^17.0.0 || ^18.0.0" } @@ -8055,6 +9072,25 @@ "node": ">=8.6" } }, + "node_modules/million": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/million/-/million-3.0.6.tgz", + "integrity": "sha512-OLjRVASGOZdyZw2ctBSSOu5kb9PaxafqkueqVvw0iQtUUnTLVRk1EmtqcNAtJWCIm8wn+WGRpDbnp+5Hi8//Kg==", + "dependencies": { + "@babel/core": "^7.23.7", + "@babel/types": "^7.23.6", + "@rollup/pluginutils": "^5.1.0", + "kleur": "^4.1.5", + "undici": "^6.3.0", + "unplugin": "^1.6.0" + }, + "bin": { + "million": "packages/cli/dist/index.js" + }, + "funding": { + "url": "https://github.com/sponsors/aidenybai" + } + }, "node_modules/mime-db": { "version": "1.52.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", @@ -8109,9 +9145,9 @@ } }, "node_modules/minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", + "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", "dependencies": { "brace-expansion": "^2.0.1" }, @@ -8205,6 +9241,12 @@ "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" } }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true + }, "node_modules/no-case": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", @@ -8233,20 +9275,22 @@ } }, "node_modules/node-fetch": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", - "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", "dependencies": { - "data-uri-to-buffer": "^4.0.0", - "fetch-blob": "^3.1.4", - "formdata-polyfill": "^4.0.10" + "whatwg-url": "^5.0.0" }, "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "node": "4.x || >=6.0.0" }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/node-fetch" + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } } }, "node_modules/node-releases": { @@ -8295,6 +9339,15 @@ "node": ">=4" } }, + "node_modules/npm-run-path/node_modules/path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, "node_modules/nwsapi": { "version": "2.2.7", "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.7.tgz", @@ -8326,13 +9379,13 @@ } }, "node_modules/object-is": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.1.5.tgz", - "integrity": "sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw==", + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.1.6.tgz", + "integrity": "sha512-F8cZ+KfGlSGi09lJT7/Nd6KJZ9ygtvYC0/UYYLI9nmQKLMnydpB9yvbv9K1uSkEu7FU9vYPmVwLg328tX+ot3Q==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1" }, "engines": { "node": ">= 0.4" @@ -8391,6 +9444,23 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/optionator": { + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", + "integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==", + "dev": true, + "dependencies": { + "@aashutoshrathi/word-wrap": "^1.2.3", + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, "node_modules/ora": { "version": "6.3.1", "resolved": "https://registry.npmjs.org/ora/-/ora-6.3.1.tgz", @@ -8413,6 +9483,17 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/ora/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, "node_modules/ora/node_modules/chalk": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", @@ -8424,6 +9505,20 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, + "node_modules/ora/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, "node_modules/os-filter-obj": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/os-filter-obj/-/os-filter-obj-2.0.0.tgz", @@ -8455,30 +9550,33 @@ } }, "node_modules/p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "dev": true, "dependencies": { - "p-try": "^2.0.0" + "yocto-queue": "^0.1.0" }, "engines": { - "node": ">=6" + "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/p-locate": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", - "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", "dev": true, "dependencies": { - "p-limit": "^2.2.0" + "p-limit": "^3.0.2" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/p-try": { @@ -8571,13 +9669,21 @@ "node": ">=8" } }, - "node_modules/path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", "dev": true, "engines": { - "node": ">=4" + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "engines": { + "node": ">=8" } }, "node_modules/path-parse": { @@ -8586,11 +9692,11 @@ "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" }, "node_modules/path-scurry": { - "version": "1.10.1", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.10.1.tgz", - "integrity": "sha512-MkhCqzzBEpPvxxQ71Md0b1Kk51W01lrYvlMzSUaIzNsODdd7mqhiimSZlr+VegAz5Z6Vzt9Xg2ttE//XBhH3EQ==", + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.10.2.tgz", + "integrity": "sha512-7xTavNy5RQXnsjANvVvMkEjvloOinkAjv/Z6Ildz9v2RinZ4SBKTWFOVRbaF8p0vpHnyjV/UwNDdKuUv6M5qcA==", "dependencies": { - "lru-cache": "^9.1.1 || ^10.0.0", + "lru-cache": "^10.2.0", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" }, "engines": { @@ -8662,12 +9768,11 @@ } }, "node_modules/playwright": { - "version": "1.41.2", - "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.41.2.tgz", - "integrity": "sha512-v0bOa6H2GJChDL8pAeLa/LZC4feoAMbSQm1/jF/ySsWWoaNItvrMP7GEkvEEFyCTUYKMxjQKaTSg5up7nR6/8A==", - "dev": true, + "version": "1.42.1", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.42.1.tgz", + "integrity": "sha512-PgwB03s2DZBcNRoW+1w9E+VkLBxweib6KTXM0M3tkiT4jVxKSi6PmVJ591J+0u10LUrgxB7dLRbiJqO5s2QPMg==", "dependencies": { - "playwright-core": "1.41.2" + "playwright-core": "1.42.1" }, "bin": { "playwright": "cli.js" @@ -8680,10 +9785,9 @@ } }, "node_modules/playwright-core": { - "version": "1.41.2", - "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.41.2.tgz", - "integrity": "sha512-VaTvwCA4Y8kxEe+kfm2+uUUw5Lubf38RxF7FpBxLPmGe5sdNkSg5e3ChEigaGrX7qdqT3pt2m/98LiyvU2x6CA==", - "dev": true, + "version": "1.42.1", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.42.1.tgz", + "integrity": "sha512-mxz6zclokgrke9p1vtdy/COWBH+eOZgYUVVU34C73M+4j4HLlQJHtfcqiqqxpP0o8HhMkflvfbquLX5dg6wlfA==", "bin": { "playwright-core": "cli.js" }, @@ -8691,10 +9795,19 @@ "node": ">=16" } }, + "node_modules/possible-typed-array-names": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", + "integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/postcss": { - "version": "8.4.34", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.34.tgz", - "integrity": "sha512-4eLTO36woPSocqZ1zIrFD2K1v6wH7pY1uBh0JIM2KKfrVtGvPFiAku6aNOP0W1Wr9qwnaCsF0Z+CrVnryB2A8Q==", + "version": "8.4.38", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz", + "integrity": "sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==", "funding": [ { "type": "opencollective", @@ -8712,7 +9825,7 @@ "dependencies": { "nanoid": "^3.3.7", "picocolors": "^1.0.0", - "source-map-js": "^1.0.2" + "source-map-js": "^1.2.0" }, "engines": { "node": "^10 || ^12 || >=14" @@ -8787,19 +9900,14 @@ } }, "node_modules/postcss-load-config/node_modules/lilconfig": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.0.0.tgz", - "integrity": "sha512-K2U4W2Ff5ibV7j7ydLr+zLAkIg5JJ4lPn1Ltsdt+Tz/IjQ8buJ55pZAxoP34lqIiwtF9iAvtLv3JGv7CAyAg+g==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.1.tgz", + "integrity": "sha512-O18pf7nyvHTckunPWCV1XUNXU1piu01y2b7ATJ0ppkUkk8ocqVWBrYjJBCwHDjD/ZWcfyrA0P4gKhzWGi5EINQ==", "engines": { "node": ">=14" - } - }, - "node_modules/postcss-load-config/node_modules/yaml": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.4.tgz", - "integrity": "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==", - "engines": { - "node": ">= 14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" } }, "node_modules/postcss-nested": { @@ -8821,9 +9929,9 @@ } }, "node_modules/postcss-nested/node_modules/postcss-selector-parser": { - "version": "6.0.15", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.15.tgz", - "integrity": "sha512-rEYkQOMUCEMhsKbK66tbEU9QVIxbhN18YiniAwA7XQYTVBqrBy+P2p5JcdqsHgKM2zWylp8d7J6eszocfds5Sw==", + "version": "6.0.16", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.16.tgz", + "integrity": "sha512-A0RVJrX+IUkVZbW3ClroRWurercFhieevHB38sr2+l9eUClMqome3LmEmnhlNy+5Mr2EYN6B2Kaw9wYdd+VHiw==", "dependencies": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" @@ -8850,6 +9958,27 @@ "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==" }, + "node_modules/posthog-node": { + "version": "3.6.3", + "resolved": "https://registry.npmjs.org/posthog-node/-/posthog-node-3.6.3.tgz", + "integrity": "sha512-JB+ei0LkwE+rKHyW5z79Nd1jUaGxU6TvkfjFqY9vQaHxU5aU8dRl0UUaEmZdZbHwjp3WmXCBQQRNyimwbNQfCw==", + "dependencies": { + "axios": "^1.6.2", + "rusha": "^0.8.14" + }, + "engines": { + "node": ">=15.0.0" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, "node_modules/prettier": { "version": "2.8.8", "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz", @@ -8985,12 +10114,6 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/pretty-format/node_modules/react-is": { - "version": "17.0.2", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", - "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", - "dev": true - }, "node_modules/pretty-quick": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/pretty-quick/-/pretty-quick-3.3.1.tgz", @@ -9015,20 +10138,6 @@ "prettier": "^2.0.0" } }, - "node_modules/pretty-quick/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/pretty-quick/node_modules/execa": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/execa/-/execa-4.1.0.tgz", @@ -9052,6 +10161,19 @@ "url": "https://github.com/sindresorhus/execa?sponsor=1" } }, + "node_modules/pretty-quick/node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/pretty-quick/node_modules/get-stream": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", @@ -9088,6 +10210,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/pretty-quick/node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/pretty-quick/node_modules/npm-run-path": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", @@ -9100,11 +10234,29 @@ "node": ">=8" } }, - "node_modules/pretty-quick/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "node_modules/pretty-quick/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pretty-quick/node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, "engines": { "node": ">=8" } @@ -9121,42 +10273,6 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, - "node_modules/pretty-quick/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/pretty-quick/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/pretty-quick/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/prismjs": { "version": "1.29.0", "resolved": "https://registry.npmjs.org/prismjs/-/prismjs-1.29.0.tgz", @@ -9177,6 +10293,14 @@ "node": ">= 6" } }, + "node_modules/prompts/node_modules/kleur": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", + "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", + "engines": { + "node": ">=6" + } + }, "node_modules/prop-types": { "version": "15.8.1", "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", @@ -9323,10 +10447,23 @@ "react": "^18.2.0" } }, + "node_modules/react-draggable": { + "version": "4.4.6", + "resolved": "https://registry.npmjs.org/react-draggable/-/react-draggable-4.4.6.tgz", + "integrity": "sha512-LtY5Xw1zTPqHkVmtM3X8MUOxNDOUhv/khTgBgrUvwaS064bwVvxT+q5El0uUFNx5IEPKXuRejr7UqLwBIg5pdw==", + "dependencies": { + "clsx": "^1.1.1", + "prop-types": "^15.8.1" + }, + "peerDependencies": { + "react": ">= 16.3.0", + "react-dom": ">= 16.3.0" + } + }, "node_modules/react-error-boundary": { - "version": "4.0.12", - "resolved": "https://registry.npmjs.org/react-error-boundary/-/react-error-boundary-4.0.12.tgz", - "integrity": "sha512-kJdxdEYlb7CPC1A0SeUY38cHpjuu6UkvzKiAmqmOFL21VRfMhOcWxTCBgLVCO0VEMh9JhFNcVaXlV4/BTpiwOA==", + "version": "4.0.13", + "resolved": "https://registry.npmjs.org/react-error-boundary/-/react-error-boundary-4.0.13.tgz", + "integrity": "sha512-b6PwbdSv8XeOSYvjt8LpgpKrZ0yGdtZokYwkwV2wlcZbxgopHX/hgPl5VgpnoVOWd868n1hktM8Qm4b+02MiLQ==", "dependencies": { "@babel/runtime": "^7.12.5" }, @@ -9335,17 +10472,18 @@ } }, "node_modules/react-icons": { - "version": "4.12.0", - "resolved": "https://registry.npmjs.org/react-icons/-/react-icons-4.12.0.tgz", - "integrity": "sha512-IBaDuHiShdZqmfc/TwHu6+d6k2ltNCf3AszxNmjJc1KUfXdEeRJOKyNvLmAHaarhzGmTSVygNdyu8/opXv2gaw==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/react-icons/-/react-icons-5.0.1.tgz", + "integrity": "sha512-WqLZJ4bLzlhmsvme6iFdgO8gfZP17rfjYEJ2m9RsZjZ+cc4k1hTzknEz63YS1MeT50kVzoa1Nz36f4BEx+Wigw==", "peerDependencies": { "react": "*" } }, "node_modules/react-is": { - "version": "18.2.0", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz", - "integrity": "sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==" + "version": "17.0.2", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz", + "integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==", + "dev": true }, "node_modules/react-laag": { "version": "2.0.5", @@ -9389,6 +10527,26 @@ "react": ">=16" } }, + "node_modules/react-markdown/node_modules/react-is": { + "version": "18.2.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz", + "integrity": "sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==" + }, + "node_modules/react-reconciler": { + "version": "0.29.0", + "resolved": "https://registry.npmjs.org/react-reconciler/-/react-reconciler-0.29.0.tgz", + "integrity": "sha512-wa0fGj7Zht1EYMRhKWwoo1H9GApxYLBuhoAuXN0TlltESAjDssB+Apf0T/DngVqaMyPypDmabL37vw/2aRM98Q==", + "dependencies": { + "loose-envify": "^1.1.0", + "scheduler": "^0.23.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "peerDependencies": { + "react": "^18.2.0" + } + }, "node_modules/react-remove-scroll": { "version": "2.5.5", "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.5.5.tgz", @@ -9414,9 +10572,9 @@ } }, "node_modules/react-remove-scroll-bar": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.4.tgz", - "integrity": "sha512-63C4YQBUt0m6ALadE9XV56hV8BgJWDmmTPY758iIJjfQKt2nYwoUrPk0LXRXcB/yIj82T1/Ixfdpdk68LwIB0A==", + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.6.tgz", + "integrity": "sha512-DtSYaao4mBmX+HDo5YWYdBWQwYIQQshUV/dVxFxK+KM26Wjwp1gZ6rv6OC3oujI6Bfu6Xyg3TwK533AQutsn/g==", "dependencies": { "react-style-singleton": "^2.2.1", "tslib": "^2.0.0" @@ -9435,11 +10593,11 @@ } }, "node_modules/react-router": { - "version": "6.22.0", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.22.0.tgz", - "integrity": "sha512-q2yemJeg6gw/YixRlRnVx6IRJWZD6fonnfZhN1JIOhV2iJCPeRNSH3V1ISwHf+JWcESzLC3BOLD1T07tmO5dmg==", + "version": "6.22.3", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.22.3.tgz", + "integrity": "sha512-dr2eb3Mj5zK2YISHK++foM9w4eBnO23eKnZEDs7c880P6oKbrjz/Svg9+nxqtHQK+oMW4OtjZca0RqPglXxguQ==", "dependencies": { - "@remix-run/router": "1.15.0" + "@remix-run/router": "1.15.3" }, "engines": { "node": ">=14.0.0" @@ -9449,12 +10607,12 @@ } }, "node_modules/react-router-dom": { - "version": "6.22.0", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.22.0.tgz", - "integrity": "sha512-z2w+M4tH5wlcLmH3BMMOMdrtrJ9T3oJJNsAlBJbwk+8Syxd5WFJ7J5dxMEW0/GEXD1BBis4uXRrNIz3mORr0ag==", + "version": "6.22.3", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.22.3.tgz", + "integrity": "sha512-7ZILI7HjcE+p31oQvwbokjk6OA/bnFxrhJ19n82Ex9Ph8fNAq+Hm/7KchpMGlTgWhUxRHMMCut+vEtNpWpowKw==", "dependencies": { - "@remix-run/router": "1.15.0", - "react-router": "6.22.0" + "@remix-run/router": "1.15.3", + "react-router": "6.22.3" }, "engines": { "node": ">=14.0.0" @@ -9501,54 +10659,6 @@ "react": ">= 0.14.0" } }, - "node_modules/react-tabs": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/react-tabs/-/react-tabs-6.0.2.tgz", - "integrity": "sha512-aQXTKolnM28k3KguGDBSAbJvcowOQr23A+CUJdzJtOSDOtTwzEaJA+1U4KwhNL9+Obe+jFS7geuvA7ICQPXOnQ==", - "dependencies": { - "clsx": "^2.0.0", - "prop-types": "^15.5.0" - }, - "peerDependencies": { - "react": "^18.0.0" - } - }, - "node_modules/react-tabs/node_modules/clsx": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.0.tgz", - "integrity": "sha512-m3iNNWpd9rl3jvvcBnu70ylMdrXt8Vlq4HYadnU5fwcOtvkSQWPmj7amUcDT2qYI7risszBjI5AUIUox9D16pg==", - "engines": { - "node": ">=6" - } - }, - "node_modules/react-tooltip": { - "version": "5.26.0", - "resolved": "https://registry.npmjs.org/react-tooltip/-/react-tooltip-5.26.0.tgz", - "integrity": "sha512-UBbwy3fo1KYDwRCOWwM6AEfQsk9shgVfNkXFqgwS33QHplzg7xao/7mX/6wd+lE6KSZzhUNTkB5TNk9SMaBV/A==", - "dependencies": { - "@floating-ui/dom": "^1.0.0", - "classnames": "^2.3.0" - }, - "peerDependencies": { - "react": ">=16.14.0", - "react-dom": ">=16.14.0" - } - }, - "node_modules/react-transition-group": { - "version": "4.4.5", - "resolved": "https://registry.npmjs.org/react-transition-group/-/react-transition-group-4.4.5.tgz", - "integrity": "sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g==", - "dependencies": { - "@babel/runtime": "^7.5.5", - "dom-helpers": "^5.0.1", - "loose-envify": "^1.4.0", - "prop-types": "^15.6.2" - }, - "peerDependencies": { - "react": ">=16.6.0", - "react-dom": ">=16.6.0" - } - }, "node_modules/react18-json-view": { "version": "0.2.7", "resolved": "https://registry.npmjs.org/react18-json-view/-/react18-json-view-0.2.7.tgz", @@ -9558,16 +10668,16 @@ } }, "node_modules/reactflow": { - "version": "11.10.3", - "resolved": "https://registry.npmjs.org/reactflow/-/reactflow-11.10.3.tgz", - "integrity": "sha512-DGNrTdkWjZtPOhj5MV8fiWWGkJo+otMVdoJ9l67bQL+Xf+8NkJ4AHmRXoYIxtgcENzwTr5WTAIJlswV9i91cyw==", + "version": "11.10.4", + "resolved": "https://registry.npmjs.org/reactflow/-/reactflow-11.10.4.tgz", + "integrity": "sha512-0CApYhtYicXEDg/x2kvUHiUk26Qur8lAtTtiSlptNKuyEuGti6P1y5cS32YGaUoDMoCqkm/m+jcKkfMOvSCVRA==", "dependencies": { - "@reactflow/background": "11.3.8", - "@reactflow/controls": "11.2.8", - "@reactflow/core": "11.10.3", - "@reactflow/minimap": "11.7.8", - "@reactflow/node-resizer": "2.2.8", - "@reactflow/node-toolbar": "1.3.8" + "@reactflow/background": "11.3.9", + "@reactflow/controls": "11.2.9", + "@reactflow/core": "11.10.4", + "@reactflow/minimap": "11.7.9", + "@reactflow/node-resizer": "2.2.9", + "@reactflow/node-toolbar": "1.3.9" }, "peerDependencies": { "react": ">=17", @@ -9663,14 +10773,15 @@ "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==" }, "node_modules/regexp.prototype.flags": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz", - "integrity": "sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg==", + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", + "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "set-function-name": "^2.0.0" + "call-bind": "^1.0.6", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "set-function-name": "^2.0.1" }, "engines": { "node": ">= 0.4" @@ -9679,6 +10790,18 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/regexpp": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", + "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + } + }, "node_modules/rehype-mathjax": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/rehype-mathjax/-/rehype-mathjax-4.0.3.tgz", @@ -9757,14 +10880,6 @@ "url": "https://opencollective.com/unified" } }, - "node_modules/require-from-string": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", - "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/requires-port": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", @@ -9836,6 +10951,21 @@ "node": ">=0.10.0" } }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/rollup": { "version": "3.29.4", "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.29.4.tgz", @@ -9873,6 +11003,11 @@ "queue-microtask": "^1.2.2" } }, + "node_modules/rusha": { + "version": "0.8.14", + "resolved": "https://registry.npmjs.org/rusha/-/rusha-0.8.14.tgz", + "integrity": "sha512-cLgakCUf6PedEu15t8kbsjnwIFFR2D4RfL+W3iWFJ4iac7z4B0ZI8fxy4R3J956kAI68HclCFGL8MPoUVC3qVA==" + }, "node_modules/sade": { "version": "1.8.1", "resolved": "https://registry.npmjs.org/sade/-/sade-1.8.1.tgz", @@ -9928,18 +11063,11 @@ } }, "node_modules/semver": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", - "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "bin": { "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" } }, "node_modules/semver-regex": { @@ -9969,7 +11097,7 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/semver/node_modules/lru-cache": { + "node_modules/semver-truncate/node_modules/lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", @@ -9981,38 +11109,54 @@ "node": ">=10" } }, - "node_modules/semver/node_modules/yallist": { + "node_modules/semver-truncate/node_modules/semver": { + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/semver-truncate/node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", "dev": true }, "node_modules/set-function-length": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.1.tgz", - "integrity": "sha512-j4t6ccc+VsKwYHso+kElc5neZpjtq9EnRICFZtWyBsLojhmeF/ZBd/elqm22WJh/BziDe/SBiOeAt0m2mfLD0g==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", "dev": true, "dependencies": { - "define-data-property": "^1.1.2", + "define-data-property": "^1.1.4", "es-errors": "^1.3.0", "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.3", + "get-intrinsic": "^1.2.4", "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.1" + "has-property-descriptors": "^1.0.2" }, "engines": { "node": ">= 0.4" } }, "node_modules/set-function-name": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", - "integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", + "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", "dev": true, "dependencies": { - "define-data-property": "^1.0.1", + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", "functions-have-names": "^1.2.3", - "has-property-descriptors": "^1.0.0" + "has-property-descriptors": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -10043,9 +11187,9 @@ } }, "node_modules/shadcn-ui/node_modules/agent-base": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.0.tgz", - "integrity": "sha512-o/zjMZRhJxny7OyEF+Op8X+efiELC7k7yOjMzgfzVqOzXqkBkWI79YoTdOtsuWd5BWhAGAuOY/Xa6xpiaWXiNg==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", + "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", "dependencies": { "debug": "^4.3.4" }, @@ -10072,44 +11216,6 @@ "node": ">=14" } }, - "node_modules/shadcn-ui/node_modules/cosmiconfig": { - "version": "8.3.6", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", - "integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==", - "dependencies": { - "import-fresh": "^3.3.0", - "js-yaml": "^4.1.0", - "parse-json": "^5.2.0", - "path-type": "^4.0.0" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/d-fischer" - }, - "peerDependencies": { - "typescript": ">=4.9.5" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/shadcn-ui/node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/shadcn-ui/node_modules/execa": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/execa/-/execa-7.2.0.tgz", @@ -10185,26 +11291,32 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/shadcn-ui/node_modules/npm-run-path": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.2.0.tgz", - "integrity": "sha512-W4/tgAXFqFA0iL7fk0+uQ3g7wkL8xJmx3XdK0VGb4cHW//eZTtKGvFBBoRKVTpY7n6ze4NL9ly7rgXcHufqXKg==", + "node_modules/shadcn-ui/node_modules/node-fetch": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", + "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", "dependencies": { - "path-key": "^4.0.0" + "data-uri-to-buffer": "^4.0.0", + "fetch-blob": "^3.1.4", + "formdata-polyfill": "^4.0.10" }, "engines": { "node": "^12.20.0 || ^14.13.1 || >=16.0.0" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "type": "opencollective", + "url": "https://opencollective.com/node-fetch" } }, - "node_modules/shadcn-ui/node_modules/npm-run-path/node_modules/path-key": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", - "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "node_modules/shadcn-ui/node_modules/npm-run-path": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", + "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", + "dependencies": { + "path-key": "^4.0.0" + }, "engines": { - "node": ">=12" + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -10225,30 +11337,14 @@ } }, "node_modules/shadcn-ui/node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", "engines": { - "node": ">=8" - } - }, - "node_modules/shadcn-ui/node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dependencies": { - "shebang-regex": "^3.0.0" + "node": ">=12" }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shadcn-ui/node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "engines": { - "node": ">=8" + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/shadcn-ui/node_modules/strip-final-newline": { @@ -10262,39 +11358,23 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/shadcn-ui/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", - "dev": true, + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", "dependencies": { - "shebang-regex": "^1.0.0" + "shebang-regex": "^3.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", - "dev": true, + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, "node_modules/short-unique-id": { @@ -10307,12 +11387,12 @@ } }, "node_modules/side-channel": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.5.tgz", - "integrity": "sha512-QcgiIWV4WV7qWExbN5llt6frQB/lBven9pqliLXfGPB+K9ZYXxDozp0wLkHS24kWCm+6YXH/f0HhnObZnZOBnQ==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", "dev": true, "dependencies": { - "call-bind": "^1.0.6", + "call-bind": "^1.0.7", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.4", "object-inspect": "^1.13.1" @@ -10343,22 +11423,6 @@ "node": ">=8" } }, - "node_modules/slice-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", - "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", - "dependencies": { - "ansi-styles": "^4.0.0", - "astral-regex": "^2.0.0", - "is-fullwidth-code-point": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/slice-ansi?sponsor=1" - } - }, "node_modules/snake-case": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/snake-case/-/snake-case-3.0.4.tgz", @@ -10393,17 +11457,18 @@ } }, "node_modules/source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ==", + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", + "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", + "dev": true, "engines": { - "node": ">=0.10.0" + "node": ">= 8" } }, "node_modules/source-map-js": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", - "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", + "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", "engines": { "node": ">=0.10.0" } @@ -10473,16 +11538,19 @@ } }, "node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" }, "engines": { - "node": ">=8" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/string-width-cjs": { @@ -10499,29 +11567,23 @@ "node": ">=8" } }, - "node_modules/string-width-cjs/node_modules/strip-ansi": { + "node_modules/string-width-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/string-width/node_modules/ansi-regex": { "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", "engines": { - "node": ">=8" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" } }, "node_modules/string-width/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", @@ -10535,8 +11597,7 @@ "url": "https://github.com/chalk/strip-ansi?sponsor=1" } }, - "node_modules/strip-ansi-cjs": { - "name": "strip-ansi", + "node_modules/strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", @@ -10547,15 +11608,16 @@ "node": ">=8" } }, - "node_modules/strip-ansi/node_modules/ansi-regex": { + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "engines": { - "node": ">=12" + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" + "engines": { + "node": ">=8" } }, "node_modules/strip-bom": { @@ -10596,6 +11658,18 @@ "node": ">=8" } }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/strip-outer": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/strip-outer/-/strip-outer-2.0.0.tgz", @@ -10633,11 +11707,6 @@ "inline-style-parser": "0.1.1" } }, - "node_modules/stylis": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.2.0.tgz", - "integrity": "sha512-Orov6g6BB1sDfYgzWfTHDOxamtX1bE/zo104Dh9e6fqJ3PooipYyfJ0pUmrZO2wAvO8YbEyeFrkV91XTsGMSrw==" - }, "node_modules/sucrase": { "version": "3.35.0", "resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.35.0.tgz", @@ -10667,16 +11736,36 @@ "node": ">= 6" } }, - "node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, + "node_modules/sucrase/node_modules/glob": { + "version": "10.3.12", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.12.tgz", + "integrity": "sha512-TCNv8vJ+xz4QiqTpfOJA7HvYv+tNIRHKfUWw/q+v2jdgN4ebz+KY9tGx5J4rHP0o84mNP+ApH66HRX8us3Khqg==", "dependencies": { - "has-flag": "^4.0.0" + "foreground-child": "^3.1.0", + "jackspeak": "^2.3.6", + "minimatch": "^9.0.1", + "minipass": "^7.0.4", + "path-scurry": "^1.10.2" + }, + "bin": { + "glob": "dist/esm/bin.mjs" }, "engines": { - "node": ">=8" + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" } }, "node_modules/supports-preserve-symlinks-flag": { @@ -10695,42 +11784,11 @@ "resolved": "https://registry.npmjs.org/svg-parser/-/svg-parser-2.0.4.tgz", "integrity": "sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==" }, - "node_modules/switch": { - "version": "0.0.0", - "resolved": "https://registry.npmjs.org/switch/-/switch-0.0.0.tgz", - "integrity": "sha512-Pvi4hlAXWHEIT+4XlQEPPIQ02hRzvn38K/cnZ5sZeM11FsDPoXvBD6i/zyVxFK6cgqSlS8sA5/sIwUGp9+ZMhw==" - }, "node_modules/symbol-tree": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz", "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==" }, - "node_modules/table": { - "version": "6.8.1", - "resolved": "https://registry.npmjs.org/table/-/table-6.8.1.tgz", - "integrity": "sha512-Y4X9zqrCftUhMeH2EptSSERdVKt/nEdijTOacGD/97EKjhQ/Qs8RTlEGABSJNNN8lac9kheH+af7yAkEWlgneA==", - "dependencies": { - "ajv": "^8.0.1", - "lodash.truncate": "^4.4.2", - "slice-ansi": "^4.0.0", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/table/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/tailwind-merge": { "version": "1.14.0", "resolved": "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-1.14.0.tgz", @@ -10741,9 +11799,9 @@ } }, "node_modules/tailwindcss": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.1.tgz", - "integrity": "sha512-qAYmXRfk3ENzuPBakNK0SRrUDipP8NQnEY6772uDhflcQz5EhRdD7JNZxyrFHVQNCwULPBn6FNPp9brpO7ctcA==", + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.3.tgz", + "integrity": "sha512-U7sxQk/n397Bmx4JHbJx/iSOOv5G+II3f1kpLpY2QeUv5DcPdcTsYLlusZfq1NthHS1c1cZoyFmmkex1rzke0A==", "dependencies": { "@alloc/quick-lru": "^5.2.0", "arg": "^5.0.2", @@ -10753,7 +11811,7 @@ "fast-glob": "^3.3.0", "glob-parent": "^6.0.2", "is-glob": "^4.0.3", - "jiti": "^1.19.1", + "jiti": "^1.21.0", "lilconfig": "^2.1.0", "micromatch": "^4.0.5", "normalize-path": "^3.0.0", @@ -10784,21 +11842,19 @@ "tailwindcss": ">=3.0.0 || insiders" } }, - "node_modules/tailwindcss/node_modules/glob-parent": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", - "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", - "dependencies": { - "is-glob": "^4.0.3" - }, - "engines": { - "node": ">=10.13.0" + "node_modules/tailwindcss-dotted-background": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/tailwindcss-dotted-background/-/tailwindcss-dotted-background-1.1.0.tgz", + "integrity": "sha512-uFzCW5Bpyn8XgppTkyzqdHecH7XCDaS/eXvegDrOCYE6PxTm7dRrD9cuUcZe6mxQFVfkLu9rDmHJdqbjz9FdLA==", + "dev": true, + "peerDependencies": { + "tailwindcss": "^3.0.0" } }, "node_modules/tailwindcss/node_modules/postcss-selector-parser": { - "version": "6.0.15", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.15.tgz", - "integrity": "sha512-rEYkQOMUCEMhsKbK66tbEU9QVIxbhN18YiniAwA7XQYTVBqrBy+P2p5JcdqsHgKM2zWylp8d7J6eszocfds5Sw==", + "version": "6.0.16", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.16.tgz", + "integrity": "sha512-A0RVJrX+IUkVZbW3ClroRWurercFhieevHB38sr2+l9eUClMqome3LmEmnhlNy+5Mr2EYN6B2Kaw9wYdd+VHiw==", "dependencies": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" @@ -10807,6 +11863,12 @@ "node": ">=4" } }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", + "dev": true + }, "node_modules/thenify": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", @@ -10890,15 +11952,9 @@ } }, "node_modules/tr46": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-3.0.0.tgz", - "integrity": "sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==", - "dependencies": { - "punycode": "^2.1.1" - }, - "engines": { - "node": ">=12" - } + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" }, "node_modules/trim-lines": { "version": "3.0.1", @@ -10974,10 +12030,34 @@ "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/typescript": { - "version": "5.3.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz", - "integrity": "sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==", + "version": "5.4.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.3.tgz", + "integrity": "sha512-KrPd3PKaCLr78MalgiwJnA25Nm8HAmdwN3mYUYZgG/wizIo9EainNVQI9/yDavtVFRN2h3k8uf3GLHuhDMgEHg==", "devOptional": true, "bin": { "tsc": "bin/tsc", @@ -10987,6 +12067,37 @@ "node": ">=14.17" } }, + "node_modules/ua-parser-js": { + "version": "1.0.37", + "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.37.tgz", + "integrity": "sha512-bhTyI94tZofjo+Dn8SN6Zv8nBDvyXTymAdM3LDI/0IboIUwTu1rEhW7v2TfiVsoYWgkQ4kOVqnI8APUFbIQIFQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/ua-parser-js" + }, + { + "type": "paypal", + "url": "https://paypal.me/faisalman" + }, + { + "type": "github", + "url": "https://github.com/sponsors/faisalman" + } + ], + "engines": { + "node": "*" + } + }, + "node_modules/undici": { + "version": "6.10.2", + "resolved": "https://registry.npmjs.org/undici/-/undici-6.10.2.tgz", + "integrity": "sha512-HcVuBy7ACaDejIMdwCzAvO22OsiE6ir6ziTIr9kAE0vB+PheVe29ZvRN8p7FXCO2uZHTjEoUs5bPiFpuc/hwwQ==", + "engines": { + "node": ">=18.0" + } + }, "node_modules/unified": { "version": "10.1.2", "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.2.tgz", @@ -11118,6 +12229,20 @@ "node": ">= 10.0.0" } }, + "node_modules/unplugin": { + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/unplugin/-/unplugin-1.10.1.tgz", + "integrity": "sha512-d6Mhq8RJeGA8UfKCu54Um4lFA0eSaRa3XxdAJg8tIdxbu1ubW0hBCZUL7yI2uGyYCRndvbK8FLHzqy2XKfeMsg==", + "dependencies": { + "acorn": "^8.11.3", + "chokidar": "^3.6.0", + "webpack-sources": "^3.2.3", + "webpack-virtual-modules": "^0.6.1" + }, + "engines": { + "node": ">=14.0.0" + } + }, "node_modules/update-browserslist-db": { "version": "1.0.13", "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz", @@ -11151,6 +12276,7 @@ "version": "4.4.1", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, "dependencies": { "punycode": "^2.1.0" } @@ -11165,9 +12291,9 @@ } }, "node_modules/use-callback-ref": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.1.tgz", - "integrity": "sha512-Lg4Vx1XZQauB42Hw3kK7JM6yjVjgFmFC5/Ab797s79aARomD2nEErc4mCgM8EZrARLmmbWpi5DGCadmK50DcAQ==", + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.2.tgz", + "integrity": "sha512-elOQwe6Q8gqZgDA8mrh44qRTQqpIHDcZ3hXTLjBe1i4ph8XpNJnO+aQf3NaG+lriLopI4HMx9VjQLfPQ6vhnoA==", "dependencies": { "tslib": "^2.0.0" }, @@ -11247,14 +12373,6 @@ "node": ">=8" } }, - "node_modules/uvu/node_modules/kleur": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz", - "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==", - "engines": { - "node": ">=6" - } - }, "node_modules/vfile": { "version": "5.3.7", "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.3.7.tgz", @@ -11284,9 +12402,9 @@ } }, "node_modules/vite": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/vite/-/vite-4.5.2.tgz", - "integrity": "sha512-tBCZBNSBbHQkaGyhGCDUGqeo2ph8Fstyp6FMSvTtsXeZSPpSMGlviAOav2hxVTqFcx8Hj/twtWKsMJXNY0xI8w==", + "version": "4.5.3", + "resolved": "https://registry.npmjs.org/vite/-/vite-4.5.3.tgz", + "integrity": "sha512-kQL23kMeX92v3ph7IauVkXkikdDRsYMGTVl5KY2E9OY4ONLvkHf04MDTbnfo6NKxZiDLWzVpP5oTa8hQD8U3dg==", "dependencies": { "esbuild": "^0.18.10", "postcss": "^8.4.27", @@ -11745,9 +12863,9 @@ } }, "node_modules/web-streams-polyfill": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.2.tgz", - "integrity": "sha512-3pRGuxRF5gpuZc0W+EpwQRmCD7gRqcDOMt688KmdlDAgAyaB1XlN0zq2njfDNm44XVdIouE7pZ6GzbdyH47uIQ==", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", + "integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==", "engines": { "node": ">= 8" } @@ -11765,6 +12883,19 @@ "node": ">=12" } }, + "node_modules/webpack-sources": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz", + "integrity": "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==", + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/webpack-virtual-modules": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/webpack-virtual-modules/-/webpack-virtual-modules-0.6.1.tgz", + "integrity": "sha512-poXpCylU7ExuvZK8z+On3kX+S8o/2dQ/SVYueKA0D4WEMXROXgY8Ez50/bQEUmvoSMMrWcrJqCHuhAbsiwg7Dg==" + }, "node_modules/whatwg-encoding": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-2.0.0.tgz", @@ -11776,6 +12907,11 @@ "node": ">=12" } }, + "node_modules/whatwg-fetch": { + "version": "3.6.20", + "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-3.6.20.tgz", + "integrity": "sha512-EqhiFU6daOA8kpjOWTL0olhVOF3i7OrFzSYiGsEMB8GcXS+RrzauAERX65xMeNWVqxA6HXH2m69Z9LaKKdisfg==" + }, "node_modules/whatwg-mimetype": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-3.0.0.tgz", @@ -11785,27 +12921,31 @@ } }, "node_modules/whatwg-url": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-11.0.0.tgz", - "integrity": "sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", "dependencies": { - "tr46": "^3.0.0", - "webidl-conversions": "^7.0.0" - }, - "engines": { - "node": ">=12" + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" } }, + "node_modules/whatwg-url/node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + }, "node_modules/which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", "dependencies": { "isexe": "^2.0.0" }, "bin": { - "which": "bin/which" + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" } }, "node_modules/which-boxed-primitive": { @@ -11825,31 +12965,34 @@ } }, "node_modules/which-collection": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.1.tgz", - "integrity": "sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz", + "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==", "dev": true, "dependencies": { - "is-map": "^2.0.1", - "is-set": "^2.0.1", - "is-weakmap": "^2.0.1", - "is-weakset": "^2.0.1" + "is-map": "^2.0.3", + "is-set": "^2.0.3", + "is-weakmap": "^2.0.2", + "is-weakset": "^2.0.3" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/which-typed-array": { - "version": "1.1.14", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.14.tgz", - "integrity": "sha512-VnXFiIW8yNn9kIHN88xvZ4yOWchftKDsRJ8fEPacX/wl1lOvBrhsJ/OeJCXq7B0AaijRuqgzSKalJoPk+D8MPg==", + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", + "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", "dev": true, "dependencies": { - "available-typed-arrays": "^1.0.6", - "call-bind": "^1.0.5", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", "for-each": "^0.3.3", "gopd": "^1.0.1", - "has-tostringtag": "^1.0.1" + "has-tostringtag": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -11896,15 +13039,63 @@ "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, - "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dependencies": { - "ansi-regex": "^5.0.1" + "color-convert": "^2.0.1" }, "engines": { "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/wrap-ansi-cjs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" } }, "node_modules/wrap-ansi/node_modules/ansi-styles": { @@ -11918,25 +13109,18 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/wrap-ansi/node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" - }, - "node_modules/wrap-ansi/node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "node_modules/wrap-ansi/node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" + "ansi-regex": "^6.0.1" }, "engines": { "node": ">=12" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "url": "https://github.com/chalk/strip-ansi?sponsor=1" } }, "node_modules/wrappy": { @@ -11995,17 +13179,31 @@ } }, "node_modules/yallist": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", - "integrity": "sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A==", - "dev": true + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" }, "node_modules/yaml": { - "version": "1.10.2", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", - "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.1.tgz", + "integrity": "sha512-pIXzoImaqmfOrL7teGUBt/T7ZDnyeGBWyXQBvOVhLkWLN37GXv8NMLK406UY6dS51JfcQHsmcW5cJ441bHg6Lg==", + "bin": { + "yaml": "bin.mjs" + }, "engines": { - "node": ">= 6" + "node": ">= 14" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/zod": { @@ -12017,9 +13215,9 @@ } }, "node_modules/zustand": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/zustand/-/zustand-4.5.0.tgz", - "integrity": "sha512-zlVFqS5TQ21nwijjhJlx4f9iGrXSL0o/+Dpy4txAP22miJ8Ti6c1Ol1RLNN98BMib83lmDH/2KmLwaNXpjrO1A==", + "version": "4.5.2", + "resolved": "https://registry.npmjs.org/zustand/-/zustand-4.5.2.tgz", + "integrity": "sha512-2cN1tPkDVkwCy5ickKrI7vijSjPksFRfqS6237NzT0vqSsztTNnQdHw9mmN7uBdk3gceVXU0a+21jFzFzAc9+g==", "dependencies": { "use-sync-external-store": "1.2.0" }, diff --git a/src/frontend/package.json b/src/frontend/package.json index 5ee8b6741..fd27d8bd6 100644 --- a/src/frontend/package.json +++ b/src/frontend/package.json @@ -3,12 +3,8 @@ "version": "0.1.2", "private": true, "dependencies": { - "@emotion/react": "^11.11.1", - "@emotion/styled": "^11.11.0", "@headlessui/react": "^1.7.17", - "@heroicons/react": "^2.0.18", - "@mui/material": "^5.14.7", - "@preact/signals-react": "^2.0.0", + "@million/lint": "^0.0.73", "@radix-ui/react-accordion": "^1.1.2", "@radix-ui/react-checkbox": "^1.0.4", "@radix-ui/react-dialog": "^1.0.4", @@ -19,7 +15,7 @@ "@radix-ui/react-menubar": "^1.0.3", "@radix-ui/react-popover": "^1.0.6", "@radix-ui/react-progress": "^1.0.3", - "@radix-ui/react-select": "^1.2.2", + "@radix-ui/react-select": "^2.0.0", "@radix-ui/react-separator": "^1.0.3", "@radix-ui/react-slot": "^1.0.2", "@radix-ui/react-switch": "^1.0.3", @@ -29,31 +25,31 @@ "@tailwindcss/forms": "^0.5.6", "@tailwindcss/line-clamp": "^0.4.4", "@types/axios": "^0.14.0", - "accordion": "^3.0.2", "ace-builds": "^1.24.1", - "add": "^2.0.6", "ansi-to-html": "^0.7.2", "axios": "^1.5.0", "base64-js": "^1.5.1", "class-variance-authority": "^0.6.1", "clsx": "^1.2.1", + "cmdk": "^1.0.0", "dompurify": "^3.0.5", "esbuild": "^0.17.19", + "framer-motion": "^11.0.6", "lodash": "^4.17.21", - "lucide-react": "^0.233.0", + "lucide-react": "^0.331.0", + "million": "^3.0.6", "moment": "^2.29.4", + "playwright": "^1.42.0", "react": "^18.2.0", "react-ace": "^10.1.0", "react-cookie": "^4.1.1", "react-dom": "^18.2.0", "react-error-boundary": "^4.0.11", - "react-icons": "^4.10.1", + "react-icons": "^5.0.1", "react-laag": "^2.0.5", "react-markdown": "^8.0.7", "react-router-dom": "^6.15.0", "react-syntax-highlighter": "^15.5.0", - "react-tabs": "^6.0.2", - "react-tooltip": "^5.21.1", "react18-json-view": "^0.2.3", "reactflow": "^11.9.2", "rehype-mathjax": "^4.0.3", @@ -61,8 +57,6 @@ "remark-math": "^5.1.1", "shadcn-ui": "^0.2.3", "short-unique-id": "^4.4.4", - "switch": "^0.0.0", - "table": "^6.8.1", "tailwind-merge": "^1.14.0", "tailwindcss-animate": "^1.0.7", "uuid": "^9.0.0", @@ -98,7 +92,7 @@ }, "proxy": "http://127.0.0.1:7860", "devDependencies": { - "@playwright/test": "^1.38.0", + "@playwright/test": "^1.42.0", "@swc/cli": "^0.1.62", "@swc/core": "^1.3.80", "@tailwindcss/typography": "^0.5.9", @@ -114,13 +108,17 @@ "@vitejs/plugin-react-swc": "^3.3.2", "autoprefixer": "^10.4.15", "daisyui": "^4.0.4", + "eslint": "^8.57.0", + "eslint-plugin-node": "^11.1.0", "postcss": "^8.4.29", "prettier": "^2.8.8", "prettier-plugin-organize-imports": "^3.2.3", "prettier-plugin-tailwindcss": "^0.3.0", "pretty-quick": "^3.1.3", "tailwindcss": "^3.3.3", + "tailwindcss-dotted-background": "^1.1.0", "typescript": "^5.2.2", + "ua-parser-js": "^1.0.37", "vite": "^4.5.2" } } diff --git a/src/frontend/playwright-report/index.html b/src/frontend/playwright-report/index.html deleted file mode 100644 index e634446de..000000000 --- a/src/frontend/playwright-report/index.html +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - Document - - - - - diff --git a/src/frontend/playwright.config.ts b/src/frontend/playwright.config.ts index a563c65f7..59f30c4ba 100644 --- a/src/frontend/playwright.config.ts +++ b/src/frontend/playwright.config.ts @@ -20,18 +20,22 @@ export default defineConfig({ /* Opt out of parallel tests on CI. */ workers: process.env.CI ? 2 : undefined, /* Reporter to use. See https://playwright.dev/docs/test-reporters */ - reporter: [ - ["html", { open: "never", outputFolder: "playwright-report/test-results" }], - ], + timeout: 120 * 1000, + // reporter: [ + // ["html", { open: "never", outputFolder: "playwright-report/test-results" }], + // ], + reporter: process.env.CI ? "blob" : "html", /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */ use: { /* Base URL to use in actions like `await page.goto('/')`. */ - // baseURL: "http://127.0.0.1:3000", + baseURL: "http://localhost:3000/", /* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */ trace: "on-first-retry", }, + globalTeardown: require.resolve("./tests/globalTeardown.ts"), + /* Configure projects for major browsers */ projects: [ { @@ -48,39 +52,28 @@ export default defineConfig({ // name: "webkit", // use: { ...devices["Desktop Safari"] }, // }, - - /* Test against mobile viewports. */ - // { - // name: 'Mobile Chrome', - // use: { ...devices['Pixel 5'] }, - // }, - // { - // name: 'Mobile Safari', - // use: { ...devices['iPhone 12'] }, - // }, - - /* Test against branded browsers. */ - // { - // name: 'Microsoft Edge', - // use: { ...devices['Desktop Edge'], channel: 'msedge' }, - // }, - // { - // name: 'Google Chrome', - // use: { ...devices['Desktop Chrome'], channel: 'chrome' }, - // }, ], - /* Run your local dev server before starting the tests */ - // webServer: [ - // { - // command: "npm run backend", - // reuseExistingServer: !process.env.CI, - // timeout: 120 * 1000, - // }, - // { - // command: "npm run start", - // url: "http://127.0.0.1:3000", - // reuseExistingServer: !process.env.CI, - // }, - // ], + webServer: [ + { + command: + "poetry run uvicorn --factory langflow.main:create_app --host 127.0.0.1 --port 7860", + port: 7860, + env: { + LANGFLOW_DATABASE_URL: "sqlite:///./temp", + LANGFLOW_AUTO_LOGIN: "true", + }, + stdout: "ignore", + + reuseExistingServer: !process.env.CI, + timeout: 120 * 1000, + }, + { + command: "npm start", + port: 3000, + env: { + VITE_PROXY_TARGET: "http://127.0.0.1:7860", + }, + }, + ], }); diff --git a/src/frontend/run-tests.sh b/src/frontend/run-tests.sh index 76e2fd756..f04a09662 100755 --- a/src/frontend/run-tests.sh +++ b/src/frontend/run-tests.sh @@ -3,6 +3,17 @@ # Default value for the --ui flag ui=false +# Absolute path to the project root directory +PROJECT_ROOT="../../" + +# Check if necessary commands are available +for cmd in npx poetry fuser; do + if ! command -v $cmd &> /dev/null; then + echo "Error: Required command '$cmd' is not installed. Aborting." + exit 1 + fi +done + # Parse command-line arguments while [[ $# -gt 0 ]]; do key="$1" @@ -23,56 +34,85 @@ done terminate_process_by_port() { port="$1" echo "Terminating process on port: $port" - fuser -k -n tcp "$port" # Forcefully terminate processes using the specified port - echo "Process terminated." + if ! fuser -k -n tcp "$port"; then + echo "Failed to terminate process on port $port. Please check manually." + else + echo "Process terminated." + fi +} + +delete_temp() { + if cd "$PROJECT_ROOT"; then + echo "Deleting temp database" + rm -f temp && echo "Temp database deleted." || echo "Failed to delete temp database." + else + echo "Failed to navigate to project root for cleanup." + fi } # Trap signals to ensure cleanup on script termination -trap 'terminate_process_by_port 7860; terminate_process_by_port 3000' EXIT +trap 'terminate_process_by_port 7860; terminate_process_by_port 3000; delete_temp' EXIT -# install playwright if there is not installed yet -npx playwright install +# Ensure the script is executed from the project root directory +if ! cd "$PROJECT_ROOT"; then + echo "Error: Failed to navigate to project root directory. Aborting." + exit 1 +fi -# Navigate to the project root directory (where the Makefile is located) -cd ../../ +# Install playwright if not installed yet +if ! npx playwright install; then + echo "Error: Failed to install Playwright. Aborting." + exit 1 +fi -# Start the frontend using 'make frontend' in the background -make frontend & +# Start the frontend +make frontend > /dev/null 2>&1 & -# Give some time for the frontend to start (adjust sleep duration as needed) +# Adjust sleep duration as needed sleep 10 -# Navigate to the test directory -cd src/frontend - -# Run frontend only Playwright tests with or without UI based on the --ui flag -if [ "$ui" = true ]; then - PLAYWRIGHT_HTML_REPORT=playwright-report/onlyFront npx playwright test tests/onlyFront --ui --project=chromium -else - PLAYWRIGHT_HTML_REPORT=playwright-report/onlyFront npx playwright test tests/onlyFront --project=chromium +# Install backend dependencies +if ! poetry install; then + echo "Error: Failed to install backend dependencies. Aborting." + exit 1 fi -# Navigate back to the project root directory -cd ../../ - -# Start the backend using 'make backend' in the background -make backend & - -# Give some time for the backend to start (adjust sleep duration as needed) +# Start the backend +LANGFLOW_DATABASE_URL=sqlite:///./temp LANGFLOW_AUTO_LOGIN=True poetry run langflow run --backend-only --port 7860 --host 0.0.0.0 --no-open-browser > /dev/null 2>&1 & +backend_pid=$! # Capture PID of the backend process +# Adjust sleep duration as needed sleep 25 -# Navigate back to the test directory -cd src/frontend - -# Run Playwright tests with or without UI based on the --ui flag -if [ "$ui" = true ]; then - PLAYWRIGHT_HTML_REPORT=playwright-report/e2e npx playwright test tests/end-to-end --ui --project=chromium -else - PLAYWRIGHT_HTML_REPORT=playwright-report/e2e npx playwright test tests/end-to-end --project=chromium +# Navigate to the test directory +if ! cd src/frontend; then + echo "Error: Failed to navigate to test directory. Aborting." + kill $backend_pid # Terminate the backend process if navigation fails + echo "Backend process terminated." + exit 1 fi -npx playwright show-report +# Check if backend is running +if ! lsof -i :7860; then + echo "Error: Backend is not running. Aborting." + exit 1 +fi -# After the tests are finished, you can add cleanup or teardown logic here if needed +# Run Playwright tests +if [ "$ui" = true ]; then + TEST_COMMAND="npx playwright test tests/end-to-end --ui --project=chromium" +else + TEST_COMMAND="npx playwright test tests/end-to-end --project=chromium" +fi -# The trap will automatically terminate processes by port on script exit +if ! PLAYWRIGHT_HTML_REPORT=playwright-report/e2e $TEST_COMMAND; then + echo "Error: Playwright tests failed. Aborting." + exit 1 +fi + +if [ "$ui" = true ]; then + echo "Opening Playwright report..." + npx playwright show-report +fi + + +trap 'terminate_process_by_port 7860; terminate_process_by_port 3000; delete_temp; kill $backend_pid 2>/dev/null' EXIT \ No newline at end of file diff --git a/src/frontend/src/App.css b/src/frontend/src/App.css index 095c63bd7..6aa681415 100644 --- a/src/frontend/src/App.css +++ b/src/frontend/src/App.css @@ -90,3 +90,12 @@ body { .jv-indent::-webkit-scrollbar-thumb:hover { background-color: #bbb !important; } + +.custom-hover { + transition: background-color 0.5s ease; +} + +.custom-hover:hover { + background-color: rgba(99, 102, 241, 0.1); /* Medium indigo color with 20% opacity */ +} + diff --git a/src/frontend/src/App.tsx b/src/frontend/src/App.tsx index 0624c0064..da3c50325 100644 --- a/src/frontend/src/App.tsx +++ b/src/frontend/src/App.tsx @@ -1,9 +1,9 @@ -import _ from "lodash"; import { useContext, useEffect, useState } from "react"; import "reactflow/dist/style.css"; import "./App.css"; import { ErrorBoundary } from "react-error-boundary"; +import { useNavigate } from "react-router-dom"; import ErrorAlert from "./alerts/error"; import NoticeAlert from "./alerts/notice"; import SuccessAlert from "./alerts/success"; @@ -15,109 +15,27 @@ import { FETCH_ERROR_MESSAGE, } from "./constants/constants"; import { AuthContext } from "./contexts/authContext"; -import { getHealth } from "./controllers/API"; +import { getGlobalVariables, getHealth } from "./controllers/API"; import Router from "./routes"; import useAlertStore from "./stores/alertStore"; import { useDarkStore } from "./stores/darkStore"; import useFlowsManagerStore from "./stores/flowsManagerStore"; +import { useGlobalVariablesStore } from "./stores/globalVariables"; import { useStoreStore } from "./stores/storeStore"; import { useTypesStore } from "./stores/typesStore"; export default function App() { - const errorData = useAlertStore((state) => state.errorData); - const errorOpen = useAlertStore((state) => state.errorOpen); - const setErrorOpen = useAlertStore((state) => state.setErrorOpen); - const noticeData = useAlertStore((state) => state.noticeData); - const noticeOpen = useAlertStore((state) => state.noticeOpen); - const setNoticeOpen = useAlertStore((state) => state.setNoticeOpen); - const successData = useAlertStore((state) => state.successData); - const successOpen = useAlertStore((state) => state.successOpen); - const setSuccessOpen = useAlertStore((state) => state.setSuccessOpen); + const removeFromTempNotificationList = useAlertStore( + (state) => state.removeFromTempNotificationList + ); + const tempNotificationList = useAlertStore( + (state) => state.tempNotificationList + ); const [fetchError, setFetchError] = useState(false); const isLoading = useFlowsManagerStore((state) => state.isLoading); - // Initialize state variable for the list of alerts - const [alertsList, setAlertsList] = useState< - Array<{ - type: string; - data: { title: string; list?: Array; link?: string }; - id: string; - }> - >([]); - - // Use effect hook to update alertsList when a new alert is added - useEffect(() => { - // If there is an error alert open with data, add it to the alertsList - if (errorOpen && errorData) { - if ( - alertsList.length > 0 && - JSON.stringify(alertsList[alertsList.length - 1].data) === - JSON.stringify(errorData) - ) { - return; - } - setErrorOpen(false); - setAlertsList((old) => { - let newAlertsList = [ - ...old, - { type: "error", data: _.cloneDeep(errorData), id: _.uniqueId() }, - ]; - return newAlertsList; - }); - } - // If there is a notice alert open with data, add it to the alertsList - else if (noticeOpen && noticeData) { - if ( - alertsList.length > 0 && - JSON.stringify(alertsList[alertsList.length - 1].data) === - JSON.stringify(noticeData) - ) { - return; - } - setNoticeOpen(false); - setAlertsList((old) => { - let newAlertsList = [ - ...old, - { type: "notice", data: _.cloneDeep(noticeData), id: _.uniqueId() }, - ]; - return newAlertsList; - }); - } - // If there is a success alert open with data, add it to the alertsList - else if (successOpen && successData) { - if ( - alertsList.length > 0 && - JSON.stringify(alertsList[alertsList.length - 1].data) === - JSON.stringify(successData) - ) { - return; - } - setSuccessOpen(false); - setAlertsList((old) => { - let newAlertsList = [ - ...old, - { type: "success", data: _.cloneDeep(successData), id: _.uniqueId() }, - ]; - return newAlertsList; - }); - } - }, [ - _, - errorData, - errorOpen, - noticeData, - noticeOpen, - setErrorOpen, - setNoticeOpen, - setSuccessOpen, - successData, - successOpen, - ]); - const removeAlert = (id: string) => { - setAlertsList((prevAlertsList) => - prevAlertsList.filter((alert) => alert.id !== id) - ); + removeFromTempNotificationList(id); }; const { isAuthenticated } = useContext(AuthContext); @@ -126,7 +44,13 @@ export default function App() { const getTypes = useTypesStore((state) => state.getTypes); const refreshVersion = useDarkStore((state) => state.refreshVersion); const refreshStars = useDarkStore((state) => state.refreshStars); + const setGlobalVariables = useGlobalVariablesStore( + (state) => state.setGlobalVariables + ); const checkHasStore = useStoreStore((state) => state.checkHasStore); + const navigate = useNavigate(); + + const [isLoadingHealth, setIsLoadingHealth] = useState(false); useEffect(() => { refreshStars(); @@ -138,22 +62,26 @@ export default function App() { getTypes().then(() => { refreshFlows(); }); + getGlobalVariables().then((res) => { + setGlobalVariables(res); + }); checkHasStore(); fetchApiData(); } }, [isAuthenticated]); useEffect(() => { + checkApplicationHealth(); // Timer to call getHealth every 5 seconds const timer = setInterval(() => { getHealth() .then(() => { - if (fetchError) setFetchError(false); + onHealthCheck(); }) .catch(() => { setFetchError(true); }); - }, 20000); + }, 20000); // 20 seconds // Clean up the timer on component unmount return () => { @@ -161,6 +89,30 @@ export default function App() { }; }, []); + const checkApplicationHealth = () => { + setIsLoadingHealth(true); + getHealth() + .then(() => { + onHealthCheck(); + }) + .catch(() => { + setFetchError(true); + }); + + setTimeout(() => { + setIsLoadingHealth(false); + }, 2000); + }; + + const onHealthCheck = () => { + setFetchError(false); + //This condition is necessary to avoid infinite loop on starter page when the application is not healthy + if (isLoading === true && window.location.pathname === "/") { + navigate("/flows"); + window.location.reload(); + } + }; + return ( //need parent component with width and height
@@ -170,51 +122,71 @@ export default function App() { }} FallbackComponent={CrashErrorComponent} > - {fetchError ? ( - - ) : isLoading ? ( -
- -
- ) : ( - <> - - - )} + <> + { + { + checkApplicationHealth(); + }} + isLoadingHealth={isLoadingHealth} + > + } + + {isLoading ? ( +
+ +
+ ) : ( + <> + + + )} +
-
- {alertsList.map((alert) => ( -
- {alert.type === "error" ? ( - - ) : alert.type === "notice" ? ( - - ) : ( - - )} -
- ))} +
+
+ {tempNotificationList.map((alert) => ( +
+ {alert.type === "error" && ( + + )} +
+ ))} +
+
+ {tempNotificationList.map((alert) => ( +
+ {alert.type === "notice" ? ( + + ) : ( + alert.type === "success" && ( + + ) + )} +
+ ))} +
); diff --git a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx index 533d60842..6a5029101 100644 --- a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx @@ -6,9 +6,9 @@ import CodeAreaComponent from "../../../../components/codeAreaComponent"; import DictComponent from "../../../../components/dictComponent"; import Dropdown from "../../../../components/dropdownComponent"; import FloatComponent from "../../../../components/floatComponent"; -import IconComponent from "../../../../components/genericIconComponent"; -import InputComponent from "../../../../components/inputComponent"; +import { default as IconComponent } from "../../../../components/genericIconComponent"; import InputFileComponent from "../../../../components/inputFileComponent"; +import InputGlobalComponent from "../../../../components/inputGlobalComponent"; import InputListComponent from "../../../../components/inputListComponent"; import IntComponent from "../../../../components/intComponent"; import KeypairListComponent from "../../../../components/keypairListComponent"; @@ -16,18 +16,27 @@ import PromptAreaComponent from "../../../../components/promptComponent"; import TextAreaComponent from "../../../../components/textAreaComponent"; import ToggleShadComponent from "../../../../components/toggleShadComponent"; import { Button } from "../../../../components/ui/button"; +import { RefreshButton } from "../../../../components/ui/refreshButton"; import { + INPUT_HANDLER_HOVER, LANGFLOW_SUPPORTED_TYPES, + OUTPUT_HANDLER_HOVER, TOOLTIP_EMPTY, } from "../../../../constants/constants"; -import { postCustomComponentUpdate } from "../../../../controllers/API"; import useAlertStore from "../../../../stores/alertStore"; import useFlowStore from "../../../../stores/flowStore"; import useFlowsManagerStore from "../../../../stores/flowsManagerStore"; import { useTypesStore } from "../../../../stores/typesStore"; -import { APIClassType } from "../../../../types/api"; +import { + APIClassType, + ResponseErrorDetailAPI, + ResponseErrorTypeAPI, +} from "../../../../types/api"; import { ParameterComponentType } from "../../../../types/components"; -import { NodeDataType } from "../../../../types/flow"; +import { + debouncedHandleUpdateValues, + handleUpdateValues, +} from "../../../../utils/parameterUtils"; import { convertObjToArray, convertValuesToNumbers, @@ -67,6 +76,7 @@ export default function ParameterComponent({ const edges = useFlowStore((state) => state.edges); const setNode = useFlowStore((state) => state.setNode); + const [isLoading, setIsLoading] = useState(false); const flow = currentFlow?.data?.nodes ?? null; const groupedEdge = useRef(null); @@ -83,32 +93,94 @@ export default function ParameterComponent({ const takeSnapshot = useFlowsManagerStore((state) => state.takeSnapshot); - const handleUpdateValues = async (name: string, data: NodeDataType) => { - const code = data.node?.template["code"]?.value; - if (!code) { - console.error("Code not found in the template"); - return; - } - + const handleRefreshButtonPress = async (name, data) => { + setIsLoading(true); try { - const res = await postCustomComponentUpdate(code, name); - if (res.status === 200 && data.node?.template) { - data.node!.template[name] = res.data.template[name]; + let newTemplate = await handleUpdateValues(name, data); + if (newTemplate) { + setNode(data.id, (oldNode) => { + let newNode = cloneDeep(oldNode); + newNode.data = { + ...newNode.data, + }; + newNode.data.node.template = newTemplate; + return newNode; + }); } - } catch (err) { - setErrorData(err as { title: string; list?: Array }); + } catch (error) { + let responseError = error as ResponseErrorDetailAPI; + + setErrorData({ + title: "Error while updating the Component", + list: [responseError.response.data.detail ?? "Unknown error"], + }); } + setIsLoading(false); + renderTooltips(); }; - const handleOnNewValue = ( + useEffect(() => { + async function fetchData() { + if ( + (data.node?.template[name]?.real_time_refresh || + data.node?.template[name]?.refresh_button) && + // options can be undefined but not an empty array + (data.node?.template[name]?.options?.length ?? 0) === 0 + ) { + setIsLoading(true); + try { + let newTemplate = await handleUpdateValues(name, data); + if (newTemplate) { + setNode(data.id, (oldNode) => { + let newNode = cloneDeep(oldNode); + newNode.data = { + ...newNode.data, + }; + newNode.data.node.template = newTemplate; + return newNode; + }); + } + } catch (error) { + let responseError = error as ResponseErrorDetailAPI; + + setErrorData({ + title: "Error while updating the Component", + list: [responseError.response.data.detail ?? "Unknown error"], + }); + } + setIsLoading(false); + renderTooltips(); + } + } + fetchData(); + }, []); + const handleOnNewValue = async ( newValue: string | string[] | boolean | Object[] - ): void => { + ): Promise => { if (data.node!.template[name].value !== newValue) { takeSnapshot(); } + const shouldUpdate = + data.node?.template[name].real_time_refresh && + !data.node?.template[name].refresh_button && + data.node!.template[name].value !== newValue; data.node!.template[name].value = newValue; // necessary to enable ctrl+z inside the input - + let newTemplate; + if (shouldUpdate) { + setIsLoading(true); + try { + newTemplate = await debouncedHandleUpdateValues(name, data); + } catch (error) { + let responseError = error as ResponseErrorTypeAPI; + setErrorData({ + title: "Error while updating the Component", + list: [responseError.response.data.detail.error ?? "Unknown error"], + }); + } + setIsLoading(false); + // this de + } setNode(data.id, (oldNode) => { let newNode = cloneDeep(oldNode); @@ -116,7 +188,9 @@ export default function ParameterComponent({ ...newNode.data, }; - newNode.data.node.template[name].value = newValue; + if (data.node?.template[name].real_time_refresh && newTemplate) { + newNode.data.node.template = newTemplate; + } else newNode.data.node.template[name].value = newValue; return newNode; }); @@ -180,11 +254,7 @@ export default function ParameterComponent({ return (
{index === 0 && ( - - {left - ? "Avaliable input components:" - : "Avaliable output components:"} - + {left ? INPUT_HANDLER_HOVER : OUTPUT_HANDLER_HOVER} )} 2 ? item.display_name.split(", ").map((el, index) => ( - + {index === item.display_name.split(", ").length - 1 @@ -231,7 +301,7 @@ export default function ParameterComponent({ {item.type === "" ? "" : " - "} {item.type.split(", ").length > 2 ? item.type.split(", ").map((el, index) => ( - + {index === item.type.split(", ").length - 1 ? el @@ -252,11 +322,14 @@ export default function ParameterComponent({ refHtml.current = {TOOLTIP_EMPTY}; } } + // If optionalHandle is an empty list, then it is not an optional handle + if (optionalHandle && optionalHandle.length === 0) { + optionalHandle = null; + } useEffect(() => { renderTooltips(); }, [tooltipTitle, flow]); - return !showNode ? ( left && LANGFLOW_SUPPORTED_TYPES.has(type ?? "") && !optionalHandle ? ( <> @@ -265,7 +338,7 @@ export default function ParameterComponent({
@@ -291,7 +364,7 @@ export default function ParameterComponent({ !showNode ? "mt-0" : "" )} style={{ - borderColor: color, + borderColor: color ?? nodeColors.unknown, }} onClick={() => { setFilterEdge(groupedEdge.current); @@ -304,24 +377,40 @@ export default function ParameterComponent({ ) : ( - {showNode && ( -
-
- Building... - ) : !validationStatus ? ( - - Build{" "} - {" "} - flow to validate status. - - ) : ( -
- {typeof validationStatus.params === "string" - ? `Duration: ${validationStatus.duration}\n${validationStatus.params}` - .split("\n") - .map((line, index) => ( -
{line}
- )) - : ""} + {STATUS_BUILDING} + ) : !validationStatus ? ( + {STATUS_BUILD} + ) : ( +
+
+ {lastRunTime && ( +
+
{RUN_TIMESTAMP_PREFIX}
+
+ {lastRunTime} +
+
+ )} +
+
+
Duration:
+
+ {validationStatus?.data.duration}
- ) - } - > -
-
-
-
+
+
+ + Output + +
+ {validationString.split("\n").map((line, index) => ( +
+ {line} +
+ ))} +
- -
-
+ ) + } + side="bottom" + > + + )}
@@ -413,6 +671,7 @@ export default function GenericNode({ autoFocus onBlur={() => { setInputDescription(false); + setInputName(false); setNodeDescription(nodeDescription); setNode(data.id, (old) => ({ ...old, @@ -460,7 +719,7 @@ export default function GenericNode({ ? "font-light italic" : "" )} - onDoubleClick={() => { + onDoubleClick={(e) => { setInputDescription(true); takeSnapshot(); }} @@ -475,15 +734,7 @@ export default function GenericNode({ <> {Object.keys(data.node!.template) .filter((templateField) => templateField.charAt(0) !== "_") - .sort((a, b) => { - if (priorityFields.has(a.toLowerCase())) { - return -1; - } else if (priorityFields.has(b.toLowerCase())) { - return 1; - } else { - return a.localeCompare(b); - } - }) + .sort((a, b) => sortFields(a, b, data.node?.field_order ?? [])) .map((templateField: string, idx) => (
{data.node!.template[templateField].show && @@ -500,13 +751,31 @@ export default function GenericNode({ })} data={data} color={ - nodeColors[ - data.node?.template[templateField].type! - ] ?? - nodeColors[ - types[data.node?.template[templateField].type!] - ] ?? - nodeColors.unknown + data.node?.template[templateField].input_types && + data.node?.template[templateField].input_types! + .length > 0 + ? nodeColors[ + data.node?.template[templateField].input_types![ + data.node?.template[templateField] + .input_types!.length - 1 + ] + ] ?? + nodeColors[ + types[ + data.node?.template[templateField] + .input_types![ + data.node?.template[templateField] + .input_types!.length - 1 + ] + ] + ] + : nodeColors[ + data.node?.template[templateField].type! + ] ?? + nodeColors[ + types[data.node?.template[templateField].type!] + ] ?? + nodeColors.unknown } title={getFieldTitle( data.node?.template!, diff --git a/src/frontend/src/alerts/alertDropDown/index.tsx b/src/frontend/src/alerts/alertDropDown/index.tsx index 967d82174..3577a5de6 100644 --- a/src/frontend/src/alerts/alertDropDown/index.tsx +++ b/src/frontend/src/alerts/alertDropDown/index.tsx @@ -1,3 +1,4 @@ +import { Cross2Icon } from "@radix-ui/react-icons"; import { useState } from "react"; import IconComponent from "../../components/genericIconComponent"; import { @@ -5,6 +6,7 @@ import { PopoverContent, PopoverTrigger, } from "../../components/ui/popover"; +import { ZERO_NOTIFICATIONS } from "../../constants/constants"; import useAlertStore from "../../stores/alertStore"; import { AlertDropdownType } from "../../types/alerts"; import SingleAlert from "./components/singleAlertComponent"; @@ -45,15 +47,15 @@ export default function AlertDropdown({ setTimeout(clearNotificationList, 100); }} > - +
@@ -68,7 +70,7 @@ export default function AlertDropdown({ )) ) : (
- No new notifications + {ZERO_NOTIFICATIONS}
)}
diff --git a/src/frontend/src/assets/Gooey Ring-5s-271px.svg b/src/frontend/src/assets/Gooey Ring-5s-271px.svg deleted file mode 100644 index 6c3433420..000000000 --- a/src/frontend/src/assets/Gooey Ring-5s-271px.svg +++ /dev/null @@ -1,40 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/src/frontend/src/assets/froze-flow.png b/src/frontend/src/assets/froze-flow.png deleted file mode 100644 index 893ce8f85..000000000 Binary files a/src/frontend/src/assets/froze-flow.png and /dev/null differ diff --git a/src/frontend/src/assets/undraw_blog_post_re_fy5x.svg b/src/frontend/src/assets/undraw_blog_post_re_fy5x.svg new file mode 100644 index 000000000..96bcbea23 --- /dev/null +++ b/src/frontend/src/assets/undraw_blog_post_re_fy5x.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/src/frontend/src/assets/undraw_chat_bot_re_e2gj.svg b/src/frontend/src/assets/undraw_chat_bot_re_e2gj.svg new file mode 100644 index 000000000..0d7af9195 --- /dev/null +++ b/src/frontend/src/assets/undraw_chat_bot_re_e2gj.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/src/frontend/src/assets/undraw_cloud_docs_re_xjht.svg b/src/frontend/src/assets/undraw_cloud_docs_re_xjht.svg new file mode 100644 index 000000000..6a0029370 --- /dev/null +++ b/src/frontend/src/assets/undraw_cloud_docs_re_xjht.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/src/frontend/src/assets/undraw_design_components_9vy6.svg b/src/frontend/src/assets/undraw_design_components_9vy6.svg new file mode 100644 index 000000000..c21e134e9 --- /dev/null +++ b/src/frontend/src/assets/undraw_design_components_9vy6.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/src/frontend/src/assets/undraw_mobile_messages_re_yx8w.svg b/src/frontend/src/assets/undraw_mobile_messages_re_yx8w.svg new file mode 100644 index 000000000..f232003d2 --- /dev/null +++ b/src/frontend/src/assets/undraw_mobile_messages_re_yx8w.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/src/frontend/src/assets/undraw_real_time_analytics_re_yliv.svg b/src/frontend/src/assets/undraw_real_time_analytics_re_yliv.svg new file mode 100644 index 000000000..32873b55f --- /dev/null +++ b/src/frontend/src/assets/undraw_real_time_analytics_re_yliv.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/src/frontend/src/assets/undraw_short_bio_re_fmx0.svg b/src/frontend/src/assets/undraw_short_bio_re_fmx0.svg new file mode 100644 index 000000000..95cbf86e6 --- /dev/null +++ b/src/frontend/src/assets/undraw_short_bio_re_fmx0.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/src/frontend/src/assets/undraw_team_collaboration_re_ow29.svg b/src/frontend/src/assets/undraw_team_collaboration_re_ow29.svg new file mode 100644 index 000000000..38bc4b3d5 --- /dev/null +++ b/src/frontend/src/assets/undraw_team_collaboration_re_ow29.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/src/frontend/src/assets/undraw_transfer_files_re_a2a9.svg b/src/frontend/src/assets/undraw_transfer_files_re_a2a9.svg new file mode 100644 index 000000000..c5930b9ea --- /dev/null +++ b/src/frontend/src/assets/undraw_transfer_files_re_a2a9.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/src/frontend/src/components/AccordionComponent/index.tsx b/src/frontend/src/components/AccordionComponent/index.tsx index e00932c76..fd9e42580 100644 --- a/src/frontend/src/components/AccordionComponent/index.tsx +++ b/src/frontend/src/components/AccordionComponent/index.tsx @@ -12,6 +12,7 @@ export default function AccordionComponent({ children, open = [], keyValue, + sideBar, }: AccordionComponentType): JSX.Element { const [value, setValue] = useState( open.length === 0 ? "" : getOpenAccordion() @@ -45,12 +46,14 @@ export default function AccordionComponent({ onClick={() => { handleClick(); }} - className="ml-3" + className={ + sideBar ? "w-full bg-muted px-[0.75rem] py-[0.5rem]" : "ml-3" + } > {trigger} - - {children} + +
{children}
diff --git a/src/frontend/src/components/CrashErrorComponent/index.tsx b/src/frontend/src/components/CrashErrorComponent/index.tsx index 0cf376195..57dc83d83 100644 --- a/src/frontend/src/components/CrashErrorComponent/index.tsx +++ b/src/frontend/src/components/CrashErrorComponent/index.tsx @@ -26,7 +26,7 @@ export default function CrashErrorComponent({

- Please report errors with detailed tracebacks on the{" "} + Please report errors with detailed tracebacks on the{" "} {" "} page.

- Thank you! + Thank you!

diff --git a/src/frontend/src/components/DropdownButtonComponent/index.tsx b/src/frontend/src/components/DropdownButtonComponent/index.tsx index 9491336d6..d49076809 100644 --- a/src/frontend/src/components/DropdownButtonComponent/index.tsx +++ b/src/frontend/src/components/DropdownButtonComponent/index.tsx @@ -13,6 +13,8 @@ export default function DropdownButton({ firstButtonName, onFirstBtnClick, options, + plusButton = false, + dropdownOptions = true, }: dropdownButtonPropsType): JSX.Element { const [showOptions, setShowOptions] = useState(false); @@ -23,28 +25,35 @@ export default function DropdownButton({ = ({ onChange={handleDescriptionChange} value={description!} placeholder="Flow description" - className="mt-2 max-h-[100px] font-normal" + className="mt-2 max-h-[100px] resize-none font-normal" rows={3} onDoubleClickCapture={(event) => { handleFocus(event); diff --git a/src/frontend/src/components/LightTooltipComponent/index.tsx b/src/frontend/src/components/LightTooltipComponent/index.tsx deleted file mode 100644 index 08ea76266..000000000 --- a/src/frontend/src/components/LightTooltipComponent/index.tsx +++ /dev/null @@ -1,17 +0,0 @@ -import Tooltip, { TooltipProps, tooltipClasses } from "@mui/material/Tooltip"; -import { styled } from "@mui/material/styles"; - -export const LightTooltip = styled(({ className, ...props }: TooltipProps) => ( - -))(({ theme }) => ({ - [`& .${tooltipClasses.tooltip}`]: { - backgroundColor: theme.palette.common.white, - color: "rgba(0, 0, 0, 0.87)", - boxShadow: theme.shadows[2], - fontSize: 14, - }, - [`& .${tooltipClasses.arrow}:before`]: { - color: theme.palette.common.white, - boxShadow: theme.shadows[1], - }, -})); diff --git a/src/frontend/src/components/LoadingSpinner/index.tsx b/src/frontend/src/components/LoadingSpinner/index.tsx deleted file mode 100644 index ea02a88d6..000000000 --- a/src/frontend/src/components/LoadingSpinner/index.tsx +++ /dev/null @@ -1,3 +0,0 @@ -export default function LoadingSpinner({}) { - return <>; -} diff --git a/src/frontend/src/components/RadialProgress/index.tsx b/src/frontend/src/components/RadialProgress/index.tsx deleted file mode 100644 index 5c46fd6b8..000000000 --- a/src/frontend/src/components/RadialProgress/index.tsx +++ /dev/null @@ -1,18 +0,0 @@ -import { RadialProgressType } from "../../types/components"; - -export default function RadialProgressComponent({ - value, - color, -}: RadialProgressType): JSX.Element { - const style = { - "--value": value! * 100, - "--size": "1.5rem", - "--thickness": "2px", - } as React.CSSProperties; - - return ( -
- {Math.trunc(value! * 100)}% -
- ); -} diff --git a/src/frontend/src/components/ReactTooltipComponent/index.tsx b/src/frontend/src/components/ReactTooltipComponent/index.tsx deleted file mode 100644 index 18eb67b33..000000000 --- a/src/frontend/src/components/ReactTooltipComponent/index.tsx +++ /dev/null @@ -1,45 +0,0 @@ -"use client"; -import type { FC } from "react"; -import React from "react"; -import { Tooltip as ReactTooltip } from "react-tooltip"; -import "react-tooltip/dist/react-tooltip.css"; -import { TooltipProps } from "../../types/components"; -import { classNames } from "../../utils/utils"; - -const TooltipReact: FC = ({ - selector, - content, - disabled, - position = "top", - children, - htmlContent, - className, - clickable, - delayShow, -}: TooltipProps): JSX.Element => { - return ( -
- {React.cloneElement(children as React.ReactElement, { - "data-tooltip-id": selector, - })} - - {htmlContent && htmlContent} - -
- ); -}; - -export default TooltipReact; diff --git a/src/frontend/src/components/ShadTooltipComponent/index.tsx b/src/frontend/src/components/ShadTooltipComponent/index.tsx index 62d5bc2e4..080935b6f 100644 --- a/src/frontend/src/components/ShadTooltipComponent/index.tsx +++ b/src/frontend/src/components/ShadTooltipComponent/index.tsx @@ -1,4 +1,5 @@ import { ShadToolTipType } from "../../types/components"; +import { cn } from "../../utils/utils"; import { Tooltip, TooltipContent, TooltipTrigger } from "../ui/tooltip"; export default function ShadTooltip({ @@ -14,7 +15,7 @@ export default function ShadTooltip({ {children} - {children} - - ); -} diff --git a/src/frontend/src/components/ViewTriggers/chat/index.tsx b/src/frontend/src/components/ViewTriggers/chat/index.tsx new file mode 100644 index 000000000..9610f22ff --- /dev/null +++ b/src/frontend/src/components/ViewTriggers/chat/index.tsx @@ -0,0 +1,31 @@ +import { Transition } from "@headlessui/react"; + +import IconComponent from "../../genericIconComponent"; + +export default function ChatTrigger({}): JSX.Element { + return ( + + + + ); +} diff --git a/src/frontend/src/components/addNewVariableButtonComponent/addNewVariableButton.tsx b/src/frontend/src/components/addNewVariableButtonComponent/addNewVariableButton.tsx new file mode 100644 index 000000000..8dab6420f --- /dev/null +++ b/src/frontend/src/components/addNewVariableButtonComponent/addNewVariableButton.tsx @@ -0,0 +1,99 @@ +import { useState } from "react"; +import { registerGlobalVariable } from "../../controllers/API"; +import BaseModal from "../../modals/baseModal"; +import useAlertStore from "../../stores/alertStore"; +import { useGlobalVariablesStore } from "../../stores/globalVariables"; +import { ResponseErrorDetailAPI } from "../../types/api"; +import ForwardedIconComponent from "../genericIconComponent"; +import InputComponent from "../inputComponent"; +import { Button } from "../ui/button"; +import { Input } from "../ui/input"; +import { Label } from "../ui/label"; +import { Textarea } from "../ui/textarea"; + +//TODO IMPLEMENT FORM LOGIC + +export default function AddNewVariableButton({ children }): JSX.Element { + const [key, setKey] = useState(""); + const [value, setValue] = useState(""); + const [type, setType] = useState(""); + const [open, setOpen] = useState(false); + const setErrorData = useAlertStore((state) => state.setErrorData); + const addGlobalVariable = useGlobalVariablesStore( + (state) => state.addGlobalVariable + ); + function handleSaveVariable() { + let data: { name: string; value: string; type?: string } = { + name: key, + type, + value, + }; + registerGlobalVariable(data) + .then((res) => { + const { name, id, type } = res.data; + addGlobalVariable(name, id, type); + setKey(""); + setValue(""); + setType(""); + setOpen(false); + }) + .catch((error) => { + let responseError = error as ResponseErrorDetailAPI; + setErrorData({ + title: "Error creating variable", + list: [responseError.response.data.detail ?? "Unknown error"], + }); + }); + } + return ( + + + Create Variable + + {children} + +
+ + { + setKey(e.target.value); + }} + placeholder="Insert a name for the variable..." + > + + { + setType(e); + }} + selectedOption={type} + password={false} + options={["Generic", "Credential"]} + placeholder="Choose a type for the variable..." + > + + -
- -
- )) - : null} - {flowState?.memory_keys!.map((key, index) => ( -
- - - {key} - -
- {}} - size="small" - disabled={true} - /> -
-
- } - key={index} - keyValue={key} - > -
-
- Source: Memory -
-
- -
- ))} - -
-
-
- -
-
- {chatHistory.length > 0 ? ( - chatHistory.map((chat, index) => ( - - )) - ) : ( -
- - 👋{" "} - - Langflow Chat - - -
-
- - Start a conversation and click the agent's thoughts{" "} - - - {" "} - to inspect the chaining process. - -
-
- )} -
-
-
-
- { - setChatValue(value); - if (flowState && chatKey) { - setFlowState((old: FlowState | undefined) => { - let newFlowState = cloneDeep(old!); - newFlowState.input_keys![chatKey] = value; - return newFlowState; - }); - } - }} - inputRef={ref} - /> -
-
-
-
- - - )} - - ); -} diff --git a/src/frontend/src/modals/genericModal/index.tsx b/src/frontend/src/modals/genericModal/index.tsx index 44173057f..e00462218 100644 --- a/src/frontend/src/modals/genericModal/index.tsx +++ b/src/frontend/src/modals/genericModal/index.tsx @@ -6,6 +6,13 @@ import { Badge } from "../../components/ui/badge"; import { Button } from "../../components/ui/button"; import { Textarea } from "../../components/ui/textarea"; import { + BUG_ALERT, + PROMPT_ERROR_ALERT, + PROMPT_SUCCESS_ALERT, + TEMP_NOTICE_ALERT, +} from "../../constants/alerts_constants"; +import { + EDIT_TEXT_PLACEHOLDER, INVALID_CHARACTERS, MAX_WORDS_HIGHLIGHT, PROMPT_DIALOG_SUBTITLE, @@ -90,13 +97,23 @@ export default function GenericModal({ useEffect(() => { setInputValue(value); }, [value, modalOpen]); - const coloredContent = (inputValue || "") .replace(//g, ">") - .replace(regexHighlight, varHighlightHTML({ name: "$1" })) - .replace(/\n/g, "
"); + .replace(regexHighlight, (match, p1, p2) => { + // Decide which group was matched. If p1 is not undefined, do nothing + // we don't want to change the text. If p2 is not undefined, then we + // have a variable, so we should highlight it. + // ! This will not work with multiline or indented json yet + if (p1 !== undefined) { + return match; + } else if (p2 !== undefined) { + return varHighlightHTML({ name: p2 }); + } + return match; + }) + .replace(/\n/g, "
"); function getClassByNumberLength(): string { let sumOfCaracteres: number = 0; wordsHighlight.forEach((element) => { @@ -133,17 +150,17 @@ export default function GenericModal({ } if (!inputVariables || inputVariables.length === 0) { setNoticeData({ - title: "Your template does not have any variables.", + title: TEMP_NOTICE_ALERT, }); } else { setSuccessData({ - title: "Prompt is ready", + title: PROMPT_SUCCESS_ALERT, }); } } else { setIsEdit(true); setErrorData({ - title: "Something went wrong, please try again", + title: BUG_ALERT, }); } }) @@ -151,8 +168,8 @@ export default function GenericModal({ console.log(error); setIsEdit(true); return setErrorData({ - title: "There is something wrong with this prompt, please review it", - list: [error.toString()], + title: PROMPT_ERROR_ALERT, + list: [error.response.data.detail ?? ""], }); }); } @@ -182,7 +199,7 @@ export default function GenericModal({ {myModalTitle}
- -
+ +
{/* BUG ON THIS ICON */} {title} diff --git a/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx index 4c592a3de..e0b7af4f7 100644 --- a/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx @@ -1,4 +1,4 @@ -import _ from "lodash"; +import _, { cloneDeep } from "lodash"; import { MouseEvent, useCallback, useEffect, useRef, useState } from "react"; import ReactFlow, { Background, @@ -12,7 +12,12 @@ import ReactFlow, { updateEdge, } from "reactflow"; import GenericNode from "../../../../CustomNodes/GenericNode"; -import Chat from "../../../../components/chatComponent"; +import { + INVALID_SELECTION_ERROR_ALERT, + UPLOAD_ALERT_LIST, + UPLOAD_ERROR_ALERT, + WRONG_FILE_ERROR_ALERT, +} from "../../../../constants/alerts_constants"; import useAlertStore from "../../../../stores/alertStore"; import useFlowStore from "../../../../stores/flowStore"; import useFlowsManagerStore from "../../../../stores/flowsManagerStore"; @@ -25,12 +30,13 @@ import { getNodeId, isValidConnection, reconnectEdges, + scapeJSONParse, + updateIds, validateSelection, } from "../../../../utils/reactflowUtils"; import { getRandomName, isWrappedWithClass } from "../../../../utils/utils"; import ConnectionLineComponent from "../ConnectionLineComponent"; import SelectionMenu from "../SelectionMenuComponent"; -import ExtraSidebar from "../extraSidebarComponent"; const nodeTypes = { genericNode: GenericNode, @@ -51,12 +57,14 @@ export default function Page({ const templates = useTypesStore((state) => state.templates); const setFilterEdge = useFlowStore((state) => state.setFilterEdge); const reactFlowWrapper = useRef(null); + const [showCanvas, setSHowCanvas] = useState( + Object.keys(templates).length > 0 && Object.keys(types).length > 0 + ); const reactFlowInstance = useFlowStore((state) => state.reactFlowInstance); const setReactFlowInstance = useFlowStore( (state) => state.setReactFlowInstance ); - const nodes = useFlowStore((state) => state.nodes); const edges = useFlowStore((state) => state.edges); const onNodesChange = useFlowStore((state) => state.onNodesChange); @@ -78,13 +86,101 @@ export default function Page({ (state) => state.setLastCopiedSelection ); const onConnect = useFlowStore((state) => state.onConnect); + const currentFlowId = useFlowsManagerStore((state) => state.currentFlowId); + const setErrorData = useAlertStore((state) => state.setErrorData); + const [selectionMenuVisible, setSelectionMenuVisible] = useState(false); + const edgeUpdateSuccessful = useRef(true); const position = useRef({ x: 0, y: 0 }); const [lastSelection, setLastSelection] = useState(null); + function handleGroupNode() { + takeSnapshot(); + if (validateSelection(lastSelection!, edges).length === 0) { + const clonedNodes = cloneDeep(nodes); + const clonedEdges = cloneDeep(edges); + const clonedSelection = cloneDeep(lastSelection); + updateIds({ nodes: clonedNodes, edges: clonedEdges }, clonedSelection!); + const { newFlow, removedEdges } = generateFlow( + clonedSelection!, + clonedNodes, + clonedEdges, + getRandomName() + ); + const newGroupNode = generateNodeFromFlow(newFlow, getNodeId); + const newEdges = reconnectEdges(newGroupNode, removedEdges); + setNodes([ + ...clonedNodes.filter( + (oldNodes) => + !clonedSelection?.nodes.some( + (selectionNode) => selectionNode.id === oldNodes.id + ) + ), + newGroupNode, + ]); + setEdges([ + ...clonedEdges.filter( + (oldEdge) => + !clonedSelection!.nodes.some( + (selectionNode) => + selectionNode.id === oldEdge.target || + selectionNode.id === oldEdge.source + ) + ), + ...newEdges, + ]); + } else { + setErrorData({ + title: INVALID_SELECTION_ERROR_ALERT, + list: validateSelection(lastSelection!, edges), + }); + } + } + + const setNode = useFlowStore((state) => state.setNode); useEffect(() => { const onKeyDown = (event: KeyboardEvent) => { + const selectedNode = nodes.filter((obj) => obj.selected); + if ( + selectionMenuVisible && + (event.ctrlKey || event.metaKey) && + event.key === "g" + ) { + event.preventDefault(); + handleGroupNode(); + } + if ( + (event.ctrlKey || event.metaKey) && + event.key === "p" && + selectedNode.length > 0 + ) { + event.preventDefault(); + setNode(selectedNode[0].id, (old) => ({ + ...old, + data: { + ...old.data, + node: { + ...old.data.node, + frozen: old.data?.node?.frozen ? false : true, + }, + }, + })); + } + if ( + (event.ctrlKey || event.metaKey) && + event.key === "d" && + selectedNode.length > 0 + ) { + event.preventDefault(); + paste( + { nodes: selectedNode, edges: [] }, + { + x: position.current.x, + y: position.current.y, + } + ); + } if (!isWrappedWithClass(event, "noundo")) { if ( (event.key === "y" || (event.key === "z" && event.shiftKey)) && @@ -158,18 +254,10 @@ export default function Page({ document.removeEventListener("keydown", onKeyDown); document.removeEventListener("mousemove", handleMouseMove); }; - }, [lastCopiedSelection, lastSelection, takeSnapshot]); - - const [selectionMenuVisible, setSelectionMenuVisible] = useState(false); - - const setErrorData = useAlertStore((state) => state.setErrorData); - - const edgeUpdateSuccessful = useRef(true); - - const currentFlowId = useFlowsManagerStore((state) => state.currentFlowId); + }, [lastCopiedSelection, lastSelection, takeSnapshot, selectionMenuVisible]); useEffect(() => { - if (reactFlowInstance) { + if (reactFlowInstance && currentFlowId) { resetFlow({ nodes: flow?.data?.nodes ?? [], edges: flow?.data?.edges ?? [], @@ -184,6 +272,12 @@ export default function Page({ }; }, []); + useEffect(() => { + setSHowCanvas( + Object.keys(templates).length > 0 && Object.keys(types).length > 0 + ); + }, [templates, types]); + const onConnectMod = useCallback( (params: Connection) => { takeSnapshot(); @@ -262,14 +356,14 @@ export default function Page({ position: position, }).catch((error) => { setErrorData({ - title: "Error uploading file", + title: UPLOAD_ERROR_ALERT, list: [error], }); }); } else { setErrorData({ - title: "Invalid file type", - list: ["Please upload a JSON file"], + title: WRONG_FILE_ERROR_ALERT, + list: [UPLOAD_ALERT_LIST], }); } } @@ -286,6 +380,8 @@ export default function Page({ (oldEdge: Edge, newConnection: Connection) => { if (isValidConnection(newConnection, nodes, edges)) { edgeUpdateSuccessful.current = true; + oldEdge.data.targetHandle = scapeJSONParse(newConnection.targetHandle!); + oldEdge.data.sourceHandle = scapeJSONParse(newConnection.sourceHandle!); setEdges((els) => updateEdge(oldEdge, newConnection, els)); } }, @@ -329,116 +425,74 @@ export default function Page({ setFilterEdge([]); }, []); + function onMouseAction(edge: Edge, color: string): void { + const edges = useFlowStore.getState().edges; + const newEdges = _.cloneDeep(edges); + const style = { stroke: color, transition: "stroke 0.25s" }; + const updatedEdges = newEdges.map((obj) => { + if (obj.id === edge.id) { + return { ...obj, style }; + } + return obj; + }); + setEdges(updatedEdges); + } + return ( -
- {!view && } - {/* Main area */} -
- {/* Primary column */} -
-
- {Object.keys(templates).length > 0 && - Object.keys(types).length > 0 ? ( -
- - - {!view && ( - - )} - { - takeSnapshot(); - if ( - validateSelection(lastSelection!, edges).length === 0 - ) { - const { newFlow, removedEdges } = generateFlow( - lastSelection!, - nodes, - edges, - getRandomName() - ); - const newGroupNode = generateNodeFromFlow( - newFlow, - getNodeId - ); - const newEdges = reconnectEdges( - newGroupNode, - removedEdges - ); - setNodes((oldNodes) => [ - ...oldNodes.filter( - (oldNodes) => - !lastSelection?.nodes.some( - (selectionNode) => - selectionNode.id === oldNodes.id - ) - ), - newGroupNode, - ]); - setEdges((oldEdges) => [ - ...oldEdges.filter( - (oldEdge) => - !lastSelection!.nodes.some( - (selectionNode) => - selectionNode.id === oldEdge.target || - selectionNode.id === oldEdge.source - ) - ), - ...newEdges, - ]); - } else { - setErrorData({ - title: "Invalid selection", - list: validateSelection(lastSelection!, edges), - }); - } - }} - /> - - {!view && } -
- ) : ( - <> +
+ {showCanvas ? ( +
+ + + {!view && ( + )} -
+ { + handleGroupNode(); + }} + /> +
-
+ ) : ( + <> + )}
); } diff --git a/src/frontend/src/pages/FlowPage/components/ParentDisclosureComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/ParentDisclosureComponent/index.tsx new file mode 100644 index 000000000..edc479fce --- /dev/null +++ b/src/frontend/src/pages/FlowPage/components/ParentDisclosureComponent/index.tsx @@ -0,0 +1,41 @@ +import { Disclosure } from "@headlessui/react"; +import IconComponent from "../../../../components/genericIconComponent"; +import { DisclosureComponentType } from "../../../../types/components"; + +export default function ParentDisclosureComponent({ + button: { title, Icon, buttons = [] }, + children, + openDisc, +}: DisclosureComponentType): JSX.Element { + return ( + + {({ open }) => ( + <> +
+ +
+ {title} +
+
+ {buttons.map((btn, index) => ( + + ))} +
+ +
+
+
+
+ {children} + + )} +
+ ); +} diff --git a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx index f9e904581..f713ca884 100644 --- a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx @@ -1,10 +1,11 @@ import { cloneDeep } from "lodash"; +import { LinkIcon, SparklesIcon } from "lucide-react"; import { useEffect, useMemo, useState } from "react"; import ShadTooltip from "../../../../components/ShadTooltipComponent"; import IconComponent from "../../../../components/genericIconComponent"; import { Input } from "../../../../components/ui/input"; import { Separator } from "../../../../components/ui/separator"; -import ApiModal from "../../../../modals/ApiModal"; +import { PRIORITY_SIDEBAR_ORDER } from "../../../../constants/constants"; import ExportModal from "../../../../modals/exportModal"; import ShareModal from "../../../../modals/shareModal"; import useAlertStore from "../../../../stores/alertStore"; @@ -24,7 +25,9 @@ import { sensitiveSort, } from "../../../../utils/utils"; import DisclosureComponent from "../DisclosureComponent"; +import ParentDisclosureComponent from "../ParentDisclosureComponent"; import SidebarDraggableComponent from "./sideBarDraggableComponent"; +import { sortKeys } from "./utils"; export default function ExtraSidebar(): JSX.Element { const data = useTypesStore((state) => state.data); @@ -37,7 +40,6 @@ export default function ExtraSidebar(): JSX.Element { const hasApiKey = useStoreStore((state) => state.hasApiKey); const validApiKey = useStoreStore((state) => state.validApiKey); - const isBuilt = useFlowStore((state) => state.isBuilt); const setErrorData = useAlertStore((state) => state.setErrorData); const [dataFilter, setFilterData] = useState(data); const [search, setSearch] = useState(""); @@ -232,73 +234,22 @@ export default function ExtraSidebar(): JSX.Element { [] ); + const getIcon = useMemo(() => { + return (SBSectionName: string) => { + if (nodeIconsLucide[SBSectionName]) { + return ( + + ); + } + }; + }, []); + return (
-
- {hasStore && validApiKey && ( - -
{ModalMemo}
-
- )} -
- - - -
- {(!hasApiKey || !validApiKey) && ( - -
{ExportMemo}
-
- )} - -
- {currentFlow && currentFlow.data && ( - - - - )} -
-
-
-
handleBlur()} @@ -314,94 +265,215 @@ export default function ExtraSidebar(): JSX.Element { setSearch(event.target.value); }} /> -
+
{ + if (search) { + setFilterData(data); + setSearch(""); + } + }} + >
- +
+
+
+ Core Components +
+
{Object.keys(dataFilter) - .sort((a, b) => { - if (a.toLowerCase() === "saved_components") { - return -1; - } else if (b.toLowerCase() === "saved_components") { - return 1; - } else if (a.toLowerCase() === "custom_components") { - return -2; - } else if (b.toLowerCase() === "custom_components") { - return 2; - } else { - return a.localeCompare(b); - } - }) + .sort(sortKeys) + .filter((x) => PRIORITY_SIDEBAR_ORDER.includes(x)) .map((SBSectionName: keyof APIObjectType, index) => Object.keys(dataFilter[SBSectionName]).length > 0 ? ( - -
- {Object.keys(dataFilter[SBSectionName]) - .sort((a, b) => - sensitiveSort( - dataFilter[SBSectionName][a].display_name, - dataFilter[SBSectionName][b].display_name + <> + +
+ {Object.keys(dataFilter[SBSectionName]) + .sort((a, b) => + sensitiveSort( + dataFilter[SBSectionName][a].display_name, + dataFilter[SBSectionName][b].display_name + ) ) - ) - .map((SBItemName: string, index) => ( - - - onDragStart(event, { - //split type to remove type in nodes saved with same name removing it's - type: removeCountFromString(SBItemName), - node: dataFilter[SBSectionName][SBItemName], - }) - } - color={nodeColors[SBSectionName]} - itemName={SBItemName} - //convert error to boolean - error={!!dataFilter[SBSectionName][SBItemName].error} - display_name={ + .map((SBItemName: string, index) => ( + - - ))} -
-
+ side="right" + key={index} + > + + onDragStart(event, { + //split type to remove type in nodes saved with same name removing it's + type: removeCountFromString(SBItemName), + node: dataFilter[SBSectionName][SBItemName], + }) + } + color={nodeColors[SBSectionName]} + itemName={SBItemName} + //convert error to boolean + error={ + !!dataFilter[SBSectionName][SBItemName].error + } + display_name={ + dataFilter[SBSectionName][SBItemName].display_name + } + official={ + dataFilter[SBSectionName][SBItemName].official === + false + ? false + : true + } + /> + + ))} +
+
+ ) : (
) - )} + )}{" "} + + {Object.keys(dataFilter) + .sort(sortKeys) + .filter((x) => !PRIORITY_SIDEBAR_ORDER.includes(x)) + .map((SBSectionName: keyof APIObjectType, index) => + Object.keys(dataFilter[SBSectionName]).length > 0 ? ( + <> + +
+ {Object.keys(dataFilter[SBSectionName]) + .sort((a, b) => + sensitiveSort( + dataFilter[SBSectionName][a].display_name, + dataFilter[SBSectionName][b].display_name + ) + ) + .map((SBItemName: string, index) => ( + + + onDragStart(event, { + //split type to remove type in nodes saved with same name removing it's + type: removeCountFromString(SBItemName), + node: dataFilter[SBSectionName][SBItemName], + }) + } + color={nodeColors[SBSectionName]} + itemName={SBItemName} + //convert error to boolean + error={ + !!dataFilter[SBSectionName][SBItemName].error + } + display_name={ + dataFilter[SBSectionName][SBItemName] + .display_name + } + official={ + dataFilter[SBSectionName][SBItemName] + .official === false + ? false + : true + } + /> + + ))} +
+
+ {index === + Object.keys(dataFilter).length - + PRIORITY_SIDEBAR_ORDER.length + + 1 && ( + <> +
+
+ {/* BUG ON THIS ICON */} + + + + Discover More + +
+
+
+ +
+
+
+ + )} + + ) : ( +
+ ) + )} +
); diff --git a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/sideBarDraggableComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/sideBarDraggableComponent/index.tsx index f33e97a1d..7d468b4ba 100644 --- a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/sideBarDraggableComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/sideBarDraggableComponent/index.tsx @@ -43,6 +43,7 @@ export const SidebarDraggableComponent = forwardRef( const deleteComponent = useFlowsManagerStore( (state) => state.deleteComponent ); + const version = useDarkStore((state) => state.version); const [cursorPos, setCursorPos] = useState({ x: 0, y: 0 }); const popoverRef = useRef(null); diff --git a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/utils.tsx b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/utils.tsx new file mode 100644 index 000000000..ee0509cd0 --- /dev/null +++ b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/utils.tsx @@ -0,0 +1,26 @@ +import { PRIORITY_SIDEBAR_ORDER } from "../../../../constants/constants"; + +export function sortKeys(a: string, b: string) { + // Define the order of specific keys + + const indexA = PRIORITY_SIDEBAR_ORDER.indexOf(a.toLowerCase()); + const indexB = PRIORITY_SIDEBAR_ORDER.indexOf(b.toLowerCase()); + + // Check if both keys are in the predefined order + if (indexA !== -1 && indexB !== -1) { + return indexA - indexB; + } + + // If only 'a' is in the predefined order, it should come first + if (indexA !== -1) { + return -1; + } + + // If only 'b' is in the predefined order, it should come first + if (indexB !== -1) { + return 1; + } + + // If neither 'a' nor 'b' are in the predefined order, sort them alphabetically + return a.localeCompare(b); +} diff --git a/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx index 279165a69..8a134f48a 100644 --- a/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx @@ -1,6 +1,8 @@ -import { cloneDeep } from "lodash"; +import _, { cloneDeep } from "lodash"; import { useEffect, useState } from "react"; +import { useUpdateNodeInternals } from "reactflow"; import ShadTooltip from "../../../../components/ShadTooltipComponent"; +import CodeAreaComponent from "../../../../components/codeAreaComponent"; import IconComponent from "../../../../components/genericIconComponent"; import { Select, @@ -8,13 +10,17 @@ import { SelectItem, SelectTrigger, } from "../../../../components/ui/select-custom"; +import { postCustomComponent } from "../../../../controllers/API"; import ConfirmationModal from "../../../../modals/ConfirmationModal"; import EditNodeModal from "../../../../modals/EditNodeModal"; import ShareModal from "../../../../modals/shareModal"; +import useAlertStore from "../../../../stores/alertStore"; import { useDarkStore } from "../../../../stores/darkStore"; import useFlowStore from "../../../../stores/flowStore"; import useFlowsManagerStore from "../../../../stores/flowsManagerStore"; import { useStoreStore } from "../../../../stores/storeStore"; +import { useTypesStore } from "../../../../stores/typesStore"; +import { APIClassType } from "../../../../types/api"; import { nodeToolbarPropsType } from "../../../../types/components"; import { FlowType } from "../../../../types/flow"; import { @@ -24,14 +30,20 @@ import { updateFlowPosition, } from "../../../../utils/reactflowUtils"; import { classNames } from "../../../../utils/utils"; +import ToolbarSelectItem from "./toolbarSelectItem"; export default function NodeToolbarComponent({ data, deleteNode, - position, setShowNode, numberOfHandles, showNode, + name = "code", + selected, + updateNodeCode, + setShowState, + onCloseAdvancedModal, + isOutdated, }: nodeToolbarPropsType): JSX.Element { const nodeLength = Object.keys(data.node!.template).filter( (templateField) => @@ -48,7 +60,7 @@ export default function NodeToolbarComponent({ data.node.template[templateField].type === "dict" || data.node.template[templateField].type === "NestedDict") ).length; - + const templates = useTypesStore((state) => state.templates); const hasStore = useStoreStore((state) => state.hasStore); const hasApiKey = useStoreStore((state) => state.hasApiKey); const validApiKey = useStoreStore((state) => state.validApiKey); @@ -56,26 +68,44 @@ export default function NodeToolbarComponent({ const isMinimal = numberOfHandles <= 1; const isGroup = data.node?.flow ? true : false; + // const frozen = data.node?.frozen ?? false; const paste = useFlowStore((state) => state.paste); const nodes = useFlowStore((state) => state.nodes); const edges = useFlowStore((state) => state.edges); const setNodes = useFlowStore((state) => state.setNodes); - const setEdges = useFlowStore((state) => state.setEdges); + const setEdges = useFlowStore((state) => state.setEdges); + const unselectAll = useFlowStore((state) => state.unselectAll); const saveComponent = useFlowsManagerStore((state) => state.saveComponent); + const getNodePosition = useFlowStore((state) => state.getNodePosition); const flows = useFlowsManagerStore((state) => state.flows); const version = useDarkStore((state) => state.version); const takeSnapshot = useFlowsManagerStore((state) => state.takeSnapshot); const [showModalAdvanced, setShowModalAdvanced] = useState(false); const [showconfirmShare, setShowconfirmShare] = useState(false); const [showOverrideModal, setShowOverrideModal] = useState(false); - - const [flowComponent, setFlowComponent] = useState(); + const [flowComponent, setFlowComponent] = useState( + createFlowComponent(cloneDeep(data), version) + ); const openInNewTab = (url) => { window.open(url, "_blank", "noreferrer"); }; + useEffect(() => { + if (!showModalAdvanced) { + onCloseAdvancedModal!(false); + } + }, [showModalAdvanced]); + const updateNodeInternals = useUpdateNodeInternals(); + + const setLastCopiedSelection = useFlowStore( + (state) => state.setLastCopiedSelection + ); + + const setSuccessData = useAlertStore((state) => state.setSuccessData); + const setNoticeData = useAlertStore((state) => state.setNoticeData); + useEffect(() => { setFlowComponent(createFlowComponent(cloneDeep(data), version)); }, [ @@ -97,6 +127,9 @@ export default function NodeToolbarComponent({ takeSnapshot(); setShowNode(data.showNode ?? true ? false : true); break; + case "Share": + if (hasApiKey || hasStore) setShowconfirmShare(true); + break; case "Download": downloadNode(flowComponent!); break; @@ -112,7 +145,7 @@ export default function NodeToolbarComponent({ takeSnapshot(); expandGroupNode( data.id, - updateFlowPosition(position, data.node?.flow!), + updateFlowPosition(getNodePosition(data.id), data.node?.flow!), data.node!.template, nodes, edges, @@ -122,6 +155,58 @@ export default function NodeToolbarComponent({ break; case "override": setShowOverrideModal(true); + break; + case "delete": + deleteNode(data.id); + break; + case "copy": + const node = nodes.filter((node) => node.id === data.id); + setLastCopiedSelection({ nodes: _.cloneDeep(node), edges: [] }); + break; + case "duplicate": + paste( + { + nodes: [nodes.find((node) => node.id === data.id)!], + edges: [], + }, + { + x: 50, + y: 10, + paneX: nodes.find((node) => node.id === data.id)?.position.x, + paneY: nodes.find((node) => node.id === data.id)?.position.y, + } + ); + break; + case "update": + takeSnapshot(); + // to update we must get the code from the templates in useTypesStore + const thisNodeTemplate = templates[data.type].template; + // if the template does not have a code key + // return + if (!thisNodeTemplate.code) return; + + const currentCode = thisNodeTemplate.code.value; + if (data.node) { + postCustomComponent(currentCode, data.node) + .then((apiReturn) => { + const { data } = apiReturn; + if (data && updateNodeCode) { + updateNodeCode(data, currentCode, "code"); + } + }) + .catch((err) => { + console.log(err); + }); + setNode(data.id, (oldNode) => { + let newNode = cloneDeep(oldNode); + newNode.data = { + ...data, + }; + newNode.data.node.template.code.value = currentCode; + return newNode; + }); + } + break; } }; @@ -130,74 +215,243 @@ export default function NodeToolbarComponent({ Object.values(flow).includes(data.node?.display_name!) ); + const setNode = useFlowStore((state) => state.setNode); + + const handleOnNewValue = ( + newValue: string | string[] | boolean | Object[] + ): void => { + if (data.node!.template[name].value !== newValue) { + takeSnapshot(); + } + + data.node!.template[name].value = newValue; // necessary to enable ctrl+z inside the input + + setNode(data.id, (oldNode) => { + let newNode = cloneDeep(oldNode); + + newNode.data = { + ...newNode.data, + }; + + newNode.data.node.template[name].value = newValue; + + return newNode; + }); + }; + + const handleNodeClass = (newNodeClass: APIClassType, code?: string): void => { + if (!data.node) return; + if (data.node!.template[name].value !== code) { + takeSnapshot(); + } + + setNode(data.id, (oldNode) => { + let newNode = cloneDeep(oldNode); + + newNode.data = { + ...newNode.data, + node: newNodeClass, + description: newNodeClass.description ?? data.node!.description, + display_name: newNodeClass.display_name ?? data.node!.display_name, + }; + + newNode.data.node.template[name].value = code; + + return newNode; + }); + updateNodeInternals(data.id); + }; + + const [openModal, setOpenModal] = useState(false); + const hasCode = Object.keys(data.node!.template).includes("code"); + + useEffect(() => { + function onKeyDown(event: KeyboardEvent) { + if ( + selected && + (hasApiKey || hasStore) && + (event.ctrlKey || event.metaKey) && + event.key === "u" + ) { + event.preventDefault(); + handleSelectChange("update"); + } + if ( + selected && + isGroup && + (event.ctrlKey || event.metaKey) && + event.key === "g" + ) { + event.preventDefault(); + handleSelectChange("ungroup"); + } + if ( + selected && + (hasApiKey || hasStore) && + (event.ctrlKey || event.metaKey) && + event.shiftKey && + event.key === "S" + ) { + event.preventDefault(); + setShowconfirmShare((state) => !state); + } + + if ( + selected && + (event.ctrlKey || event.metaKey) && + event.shiftKey && + event.key === "Q" + ) { + event.preventDefault(); + if (isMinimal) { + setShowState((show) => !show); + setShowNode(data.showNode ?? true ? false : true); + return; + } + setNoticeData({ + title: + "Minimization are only available for nodes with one handle or fewer.", + }); + } + if ( + selected && + (event.ctrlKey || event.metaKey) && + event.shiftKey && + event.key === "U" + ) { + event.preventDefault(); + if (hasCode) return setOpenModal((state) => !state); + setNoticeData({ title: `You can not access ${data.id} code` }); + } + if ( + selected && + !isGroup && + (event.ctrlKey || event.metaKey) && + event.shiftKey && + event.key === "A" + ) { + event.preventDefault(); + setShowModalAdvanced((state) => !state); + } + if (selected && (event.ctrlKey || event.metaKey) && event.key === "s") { + if (isSaved) { + event.preventDefault(); + return setShowOverrideModal((state) => !state); + } + if (hasCode) { + event.preventDefault(); + saveComponent(cloneDeep(data), false); + setSuccessData({ title: `${data.id} saved successfully` }); + } + } + if ( + selected && + (event.ctrlKey || event.metaKey) && + event.shiftKey && + event.key === "D" + ) { + event.preventDefault(); + if (data.node?.documentation) { + return openInNewTab(data.node?.documentation); + } + setNoticeData({ + title: `${data.id} docs is not available at the moment.`, + }); + } + if (selected && (event.ctrlKey || event.metaKey) && event.key === "j") { + event.preventDefault(); + downloadNode(flowComponent!); + } + } + + document.addEventListener("keydown", onKeyDown); + + return () => { + document.removeEventListener("keydown", onKeyDown); + }; + }, [isSaved, showNode, data.showNode, isMinimal]); + return ( <>
- + {hasCode && ( + + + + )} + + - + + + + + {/* - - {hasStore && ( - - - - )} + /> + + */} @@ -311,9 +624,13 @@ export default function NodeToolbarComponent({ index={6} onConfirm={(index, user) => { saveComponent(cloneDeep(data), true); + setSuccessData({ title: `${data.id} successfully overridden!` }); }} onClose={setShowOverrideModal} - onCancel={() => saveComponent(cloneDeep(data), false)} + onCancel={() => { + saveComponent(cloneDeep(data), false); + setSuccessData({ title: "New node successfully saved!" }); + }} > @@ -328,12 +645,36 @@ export default function NodeToolbarComponent({ open={showModalAdvanced} setOpen={setShowModalAdvanced} /> - + {showconfirmShare && ( + + )} + {hasCode && ( +
+ {openModal && ( + + )} +
+ )}
diff --git a/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/toolbarSelectItem/index.tsx b/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/toolbarSelectItem/index.tsx new file mode 100644 index 000000000..88e74623a --- /dev/null +++ b/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/toolbarSelectItem/index.tsx @@ -0,0 +1,51 @@ +import ForwardedIconComponent from "../../../../../components/genericIconComponent"; +import { toolbarSelectItemProps } from "../../../../../types/components"; + +export default function ToolbarSelectItem({ + shift, + isMac, + keyboardKey, + value, + icon, + styleObj, + dataTestId, + ping, +}: toolbarSelectItemProps) { + return ( +
+ + {value} + + {isMac ? ( + + ) : ( + + {shift ? "Ctrl" : "Ctrl +"} + + )} + {shift && ( + + )} + + {keyboardKey} + +
+ ); +} diff --git a/src/frontend/src/pages/FlowPage/index.tsx b/src/frontend/src/pages/FlowPage/index.tsx index 9aa88a6e2..a222dddcc 100644 --- a/src/frontend/src/pages/FlowPage/index.tsx +++ b/src/frontend/src/pages/FlowPage/index.tsx @@ -1,11 +1,13 @@ import { useEffect } from "react"; import { useParams } from "react-router-dom"; +import FlowToolbar from "../../components/chatComponent"; import Header from "../../components/headerComponent"; import { useDarkStore } from "../../stores/darkStore"; import useFlowsManagerStore from "../../stores/flowsManagerStore"; import Page from "./components/PageComponent"; +import ExtraSidebar from "./components/extraSidebarComponent"; -export default function FlowPage(): JSX.Element { +export default function FlowPage({ view }: { view?: boolean }): JSX.Element { const setCurrentFlowId = useFlowsManagerStore( (state) => state.setCurrentFlowId ); @@ -17,12 +19,22 @@ export default function FlowPage(): JSX.Element { useEffect(() => { setCurrentFlowId(id!); }, [id]); - return ( <>
- {currentFlow && } + {currentFlow && ( +
+ {!view && } +
+ {/* Primary column */} +
+ +
+ {!view && } +
+
+ )} state.uploadFlow); const removeFlow = useFlowsManagerStore((state) => state.removeFlow); const isLoading = useFlowsManagerStore((state) => state.isLoading); + const setExamples = useFlowsManagerStore((state) => state.setExamples); const flows = useFlowsManagerStore((state) => state.flows); const setSuccessData = useAlertStore((state) => state.setSuccessData); const setErrorData = useAlertStore((state) => state.setErrorData); - const [pageSize, setPageSize] = useState(10); + const [pageSize, setPageSize] = useState(20); const [pageIndex, setPageIndex] = useState(1); const [loadingScreen, setLoadingScreen] = useState(true); @@ -30,7 +36,7 @@ export default function ComponentsComponent({ useEffect(() => { if (isLoading) return; - const all = flows + let all = flows .filter((f) => (f.is_component ?? false) === is_component) .sort((a, b) => { if (a?.updated_at && b?.updated_at) { @@ -76,14 +82,14 @@ export default function ComponentsComponent({ }) .catch((error) => { setErrorData({ - title: "Error uploading file", + title: CONSOLE_ERROR_MSG, list: [error], }); }); } else { setErrorData({ - title: "Invalid file type", - list: ["Please upload a JSON file"], + title: WRONG_FILE_ERROR_ALERT, + list: [UPLOAD_ALERT_LIST], }); } } @@ -91,7 +97,7 @@ export default function ComponentsComponent({ function resetFilter() { setPageIndex(1); - setPageSize(10); + setPageSize(20); } return ( diff --git a/src/frontend/src/pages/MainPage/index.tsx b/src/frontend/src/pages/MainPage/index.tsx index 6e0c5cb55..e7aa678e7 100644 --- a/src/frontend/src/pages/MainPage/index.tsx +++ b/src/frontend/src/pages/MainPage/index.tsx @@ -1,17 +1,21 @@ import { Group, ToyBrick } from "lucide-react"; -import { useEffect } from "react"; +import { useEffect, useState } from "react"; import { Outlet, useLocation, useNavigate } from "react-router-dom"; import DropdownButton from "../../components/DropdownButtonComponent"; import IconComponent from "../../components/genericIconComponent"; import PageLayout from "../../components/pageLayout"; import SidebarNav from "../../components/sidebarComponent"; import { Button } from "../../components/ui/button"; -import { USER_PROJECTS_HEADER } from "../../constants/constants"; +import { CONSOLE_ERROR_MSG } from "../../constants/alerts_constants"; +import { + MY_COLLECTION_DESC, + USER_PROJECTS_HEADER, +} from "../../constants/constants"; +import NewFlowModal from "../../modals/NewFlowModal"; import useAlertStore from "../../stores/alertStore"; import useFlowsManagerStore from "../../stores/flowsManagerStore"; import { downloadFlows } from "../../utils/reactflowUtils"; export default function HomePage(): JSX.Element { - const addFlow = useFlowsManagerStore((state) => state.addFlow); const uploadFlow = useFlowsManagerStore((state) => state.uploadFlow); const setCurrentFlowId = useFlowsManagerStore( (state) => state.setCurrentFlowId @@ -21,6 +25,7 @@ export default function HomePage(): JSX.Element { const setErrorData = useAlertStore((state) => state.setErrorData); const location = useLocation(); const pathname = location.pathname; + const [openModal, setOpenModal] = useState(false); const is_component = pathname === "/components"; const dropdownOptions = [ { @@ -40,7 +45,7 @@ export default function HomePage(): JSX.Element { }) .catch((error) => { setErrorData({ - title: "Error uploading file", + title: CONSOLE_ERROR_MSG, list: [error], }); }); @@ -71,7 +76,7 @@ export default function HomePage(): JSX.Element { return (
} @@ -112,6 +115,7 @@ export default function HomePage(): JSX.Element {
+ ); } diff --git a/src/frontend/src/pages/ProfileSettingsPage/index.tsx b/src/frontend/src/pages/ProfileSettingsPage/index.tsx index 6a5bc7b70..a7af7c86a 100644 --- a/src/frontend/src/pages/ProfileSettingsPage/index.tsx +++ b/src/frontend/src/pages/ProfileSettingsPage/index.tsx @@ -6,6 +6,12 @@ import GradientChooserComponent from "../../components/gradientChooserComponent" import Header from "../../components/headerComponent"; import InputComponent from "../../components/inputComponent"; import { Button } from "../../components/ui/button"; +import { + EDIT_PASSWORD_ALERT_LIST, + EDIT_PASSWORD_ERROR_ALERT, + SAVE_ERROR_ALERT, + SAVE_SUCCESS_ALERT, +} from "../../constants/alerts_constants"; import { CONTROL_PATCH_USER_STATE } from "../../constants/constants"; import { AuthContext } from "../../contexts/authContext"; import { resetPassword, updateUser } from "../../controllers/API"; @@ -37,8 +43,8 @@ export default function ProfileSettingsPage(): JSX.Element { async function handlePatchUser() { if (password !== cnfPassword) { setErrorData({ - title: "Error changing password", - list: ["Passwords do not match"], + title: EDIT_PASSWORD_ERROR_ALERT, + list: [EDIT_PASSWORD_ALERT_LIST], }); return; } @@ -54,10 +60,10 @@ export default function ProfileSettingsPage(): JSX.Element { } handleInput({ target: { name: "password", value: "" } }); handleInput({ target: { name: "cnfPassword", value: "" } }); - setSuccessData({ title: "Changes saved successfully!" }); + setSuccessData({ title: SAVE_SUCCESS_ALERT }); } catch (error) { setErrorData({ - title: "Error saving changes", + title: SAVE_ERROR_ALERT, list: [(error as any).response.data.detail], }); } diff --git a/src/frontend/src/pages/StorePage/index.tsx b/src/frontend/src/pages/StorePage/index.tsx index d83c8a174..412dd1a68 100644 --- a/src/frontend/src/pages/StorePage/index.tsx +++ b/src/frontend/src/pages/StorePage/index.tsx @@ -20,6 +20,13 @@ import { SelectTrigger, SelectValue, } from "../../components/ui/select"; +import { + APIKEY_ERROR_ALERT, + COMPONENTS_ERROR_ALERT, + INVALID_API_ERROR_ALERT, + NOAPI_ERROR_ALERT, +} from "../../constants/alerts_constants"; +import { STORE_DESC, STORE_TITLE } from "../../constants/constants"; import { AuthContext } from "../../contexts/authContext"; import { getStoreComponents, getStoreTags } from "../../controllers/API"; import StoreApiKeyModal from "../../modals/StoreApiKeyModal"; @@ -62,18 +69,14 @@ export default function StorePage(): JSX.Element { if (!loadingApiKey) { if (!hasApiKey) { setErrorData({ - title: "API Key Error", - list: [ - "You don't have an API Key. Please add one to use the Langflow Store.", - ], + title: APIKEY_ERROR_ALERT, + list: [NOAPI_ERROR_ALERT], }); setLoading(false); } else if (!validApiKey) { setErrorData({ - title: "API Key Error", - list: [ - "Your API Key is not valid. Please add a valid API Key to use the Langflow Store.", - ], + title: APIKEY_ERROR_ALERT, + list: [INVALID_API_ERROR_ALERT], }); } } @@ -144,14 +147,14 @@ export default function StorePage(): JSX.Element { } }) .catch((err) => { - if (err.response.status === 403 || err.response.status === 401) { + if (err.response?.status === 403 || err.response?.status === 401) { setValidApiKey(false); } else { setSearchData([]); setTotalRowsCount(0); setLoading(false); setErrorData({ - title: "Error getting components.", + title: COMPONENTS_ERROR_ALERT, list: [err["response"]["data"]["detail"]], }); } @@ -171,8 +174,8 @@ export default function StorePage(): JSX.Element { return ( {StoreApiKeyModal && ( diff --git a/src/frontend/src/pages/deleteAccountPage/index.tsx b/src/frontend/src/pages/deleteAccountPage/index.tsx index af219209b..c82cc1cc7 100644 --- a/src/frontend/src/pages/deleteAccountPage/index.tsx +++ b/src/frontend/src/pages/deleteAccountPage/index.tsx @@ -10,7 +10,6 @@ export default function DeleteAccountPage() { // Implement your account deletion logic here // For example, make an API call to delete the account // Upon successful deletion, you can redirect the user to another page - console.log("Account deleted!"); // Implement the logic to redirect the user after account deletion. // For example, use react-router-dom's useHistory hook. }; diff --git a/src/frontend/src/pages/loginPage/index.tsx b/src/frontend/src/pages/loginPage/index.tsx index f7cdbd190..2d9ea59c9 100644 --- a/src/frontend/src/pages/loginPage/index.tsx +++ b/src/frontend/src/pages/loginPage/index.tsx @@ -4,6 +4,7 @@ import { Link, useNavigate } from "react-router-dom"; import InputComponent from "../../components/inputComponent"; import { Button } from "../../components/ui/button"; import { Input } from "../../components/ui/input"; +import { SIGNIN_ERROR_ALERT } from "../../constants/alerts_constants"; import { CONTROL_LOGIN_STATE } from "../../constants/constants"; import { AuthContext } from "../../contexts/authContext"; import { onLogin } from "../../controllers/API"; @@ -42,7 +43,7 @@ export default function LoginPage(): JSX.Element { }) .catch((error) => { setErrorData({ - title: "Error signing in", + title: SIGNIN_ERROR_ALERT, list: [error["response"]["data"]["detail"]], }); }); diff --git a/src/frontend/src/pages/signUpPage/index.tsx b/src/frontend/src/pages/signUpPage/index.tsx index d731eee9e..1cdad3d59 100644 --- a/src/frontend/src/pages/signUpPage/index.tsx +++ b/src/frontend/src/pages/signUpPage/index.tsx @@ -4,6 +4,7 @@ import { Link, useNavigate } from "react-router-dom"; import InputComponent from "../../components/inputComponent"; import { Button } from "../../components/ui/button"; import { Input } from "../../components/ui/input"; +import { SIGNUP_ERROR_ALERT } from "../../constants/alerts_constants"; import { CONTROL_INPUT_STATE, SIGN_UP_SUCCESS, @@ -60,7 +61,7 @@ export default function SignUp(): JSX.Element { }, } = error; setErrorData({ - title: "Error signing up", + title: SIGNUP_ERROR_ALERT, list: [detail], }); return; diff --git a/src/frontend/src/stores/alertStore.ts b/src/frontend/src/stores/alertStore.ts index 63c58e28c..e23a8d4e3 100644 --- a/src/frontend/src/stores/alertStore.ts +++ b/src/frontend/src/stores/alertStore.ts @@ -2,6 +2,7 @@ import { uniqueId } from "lodash"; import { create } from "zustand"; import { AlertItemType } from "../types/alerts"; import { AlertStoreType } from "../types/zustand/alert"; +import { customStringify } from "../utils/reactflowUtils"; const pushNotificationList = ( list: AlertItemType[], @@ -13,67 +14,130 @@ const pushNotificationList = ( const useAlertStore = create((set, get) => ({ errorData: { title: "", list: [] }, - errorOpen: false, noticeData: { title: "", link: "" }, - noticeOpen: false, successData: { title: "" }, - successOpen: false, notificationCenter: false, notificationList: [], + tempNotificationList: [], loading: true, setErrorData: (newState: { title: string; list?: Array }) => { if (newState.title && newState.title !== "") { set({ errorData: newState, - errorOpen: true, notificationCenter: true, - notificationList: pushNotificationList(get().notificationList, { - type: "error", - title: newState.title, - list: newState.list, - id: uniqueId(), - }), + notificationList: [ + { + type: "error", + title: newState.title, + list: newState.list, + id: uniqueId(), + }, + ...get().notificationList, + ], }); + const tempList = get().tempNotificationList; + if ( + !tempList.some((item) => { + return ( + customStringify({ + title: item.title, + type: item.type, + list: item.list, + }) === customStringify({ ...newState, type: "error" }) + ); + }) + ) { + set({ + tempNotificationList: [ + { + type: "error", + title: newState.title, + list: newState.list, + id: uniqueId(), + }, + ...get().tempNotificationList, + ], + }); + } } }, - setErrorOpen: (newState: boolean) => { - set({ errorOpen: newState }); - }, setNoticeData: (newState: { title: string; link?: string }) => { if (newState.title && newState.title !== "") { set({ noticeData: newState, - noticeOpen: true, notificationCenter: true, - notificationList: pushNotificationList(get().notificationList, { - type: "notice", - title: newState.title, - link: newState.link, - id: uniqueId(), - }), + notificationList: [ + { + type: "notice", + title: newState.title, + link: newState.link, + id: uniqueId(), + }, + ...get().notificationList, + ], }); + const tempList = get().tempNotificationList; + if ( + !tempList.some((item) => { + return ( + customStringify({ + title: item.title, + type: item.type, + link: item.link, + }) === customStringify({ ...newState, type: "notice" }) + ); + }) + ) { + set({ + tempNotificationList: [ + { + type: "notice", + title: newState.title, + link: newState.link, + id: uniqueId(), + }, + ...get().tempNotificationList, + ], + }); + } } }, - setNoticeOpen: (newState: boolean) => { - set({ noticeOpen: newState }); - }, setSuccessData: (newState: { title: string }) => { if (newState.title && newState.title !== "") { set({ successData: newState, - successOpen: true, notificationCenter: true, - notificationList: pushNotificationList(get().notificationList, { - type: "success", - title: newState.title, - id: uniqueId(), - }), + notificationList: [ + { + type: "success", + title: newState.title, + id: uniqueId(), + }, + ...get().notificationList, + ], }); + const tempList = get().tempNotificationList; + if ( + !tempList.some((item) => { + return ( + customStringify({ title: item.title, type: item.type }) === + customStringify({ ...newState, type: "success" }) + ); + }) + ) { + set({ + tempNotificationList: [ + { + type: "success", + title: newState.title, + id: uniqueId(), + }, + ...get().tempNotificationList, + ], + }); + } } }, - setSuccessOpen: (newState: boolean) => { - set({ successOpen: newState }); - }, setNotificationCenter: (newState: boolean) => { set({ notificationCenter: newState }); }, @@ -90,6 +154,16 @@ const useAlertStore = create((set, get) => ({ setLoading: (newState: boolean) => { set({ loading: newState }); }, + clearTempNotificationList: () => { + set({ tempNotificationList: [] }); + }, + removeFromTempNotificationList: (index: string) => { + set({ + tempNotificationList: get().tempNotificationList.filter( + (item) => item.id !== index + ), + }); + }, })); export default useAlertStore; diff --git a/src/frontend/src/stores/darkStore.tsx b/src/frontend/src/stores/darkStore.tsx index 6278e59c7..f3ccc2e5c 100644 --- a/src/frontend/src/stores/darkStore.tsx +++ b/src/frontend/src/stores/darkStore.tsx @@ -2,9 +2,11 @@ import { create } from "zustand"; import { getRepoStars, getVersion } from "../controllers/API"; import { DarkStoreType } from "../types/zustand/dark"; -export const useDarkStore = create((set) => ({ +const startedStars = Number(window.localStorage.getItem("githubStars")) ?? 0; + +export const useDarkStore = create((set, get) => ({ dark: JSON.parse(window.localStorage.getItem("isDark")!) ?? false, - stars: 0, + stars: startedStars, version: "", setDark: (dark) => set(() => ({ dark: dark })), refreshVersion: () => { @@ -13,8 +15,23 @@ export const useDarkStore = create((set) => ({ }); }, refreshStars: () => { - getRepoStars("logspace-ai", "langflow").then((res) => { - set(() => ({ stars: res })); - }); + let lastUpdated = window.localStorage.getItem("githubStarsLastUpdated"); + let diff = 0; + // check if lastUpdated actually exists + if (lastUpdated !== null) { + diff = Math.abs(new Date().getTime() - new Date(lastUpdated).getTime()); + } + + // if lastUpdated is null or the difference is greater than 2 hours + if (lastUpdated === null || diff > 7200000) { + getRepoStars("logspace-ai", "langflow").then((res) => { + window.localStorage.setItem("githubStars", res.toString()); + window.localStorage.setItem( + "githubStarsLastUpdated", + new Date().toString() + ); + set(() => ({ stars: res, lastUpdated: new Date() })); + }); + } }, })); diff --git a/src/frontend/src/stores/flowStore.ts b/src/frontend/src/stores/flowStore.ts index cb117196f..bcc8f6309 100644 --- a/src/frontend/src/stores/flowStore.ts +++ b/src/frontend/src/stores/flowStore.ts @@ -1,4 +1,4 @@ -import { cloneDeep } from "lodash"; +import { cloneDeep, zip } from "lodash"; import { Edge, EdgeChange, @@ -9,55 +9,123 @@ import { applyNodeChanges, } from "reactflow"; import { create } from "zustand"; +import { + FLOW_BUILD_SUCCESS_ALERT, + MISSED_ERROR_ALERT, +} from "../constants/alerts_constants"; +import { BuildStatus } from "../constants/enums"; +import { getFlowPool } from "../controllers/API"; +import { VertexBuildTypeAPI } from "../types/api"; import { NodeDataType, NodeType, sourceHandleType, targetHandleType, } from "../types/flow"; -import { FlowStoreType } from "../types/zustand/flow"; import { + ChatOutputType, + FlowPoolObjectType, + FlowStoreType, + VertexLayerElementType, + chatInputType, +} from "../types/zustand/flow"; +import { buildVertices } from "../utils/buildUtils"; +import { + checkChatInput, cleanEdges, getHandleId, getNodeId, scapeJSONParse, scapedJSONStringfy, + updateGroupRecursion, + validateNodes, } from "../utils/reactflowUtils"; +import { getInputsAndOutputs } from "../utils/storeUtils"; +import useAlertStore from "./alertStore"; +import { useDarkStore } from "./darkStore"; import useFlowsManagerStore from "./flowsManagerStore"; // this is our useStore hook that we can use in our components to get parts of the store and call actions const useFlowStore = create((set, get) => ({ - sseData: {}, flowState: undefined, + flowBuildStatus: {}, nodes: [], edges: [], isBuilding: false, - isPending: false, - isBuilt: false, + isPending: true, + hasIO: false, reactFlowInstance: null, lastCopiedSelection: null, + flowPool: {}, + inputs: [], + outputs: [], + setFlowPool: (flowPool) => { + set({ flowPool }); + }, + addDataToFlowPool: (data: FlowPoolObjectType, nodeId: string) => { + let newFlowPool = cloneDeep({ ...get().flowPool }); + if (!newFlowPool[nodeId]) newFlowPool[nodeId] = [data]; + else { + newFlowPool[nodeId].push(data); + } + get().setFlowPool(newFlowPool); + }, + getNodePosition: (nodeId: string) => { + const node = get().nodes.find((node) => node.id === nodeId); + return node?.position || { x: 0, y: 0 }; + }, + updateFlowPool: ( + nodeId: string, + data: FlowPoolObjectType | ChatOutputType | chatInputType, + buildId?: string + ) => { + let newFlowPool = cloneDeep({ ...get().flowPool }); + if (!newFlowPool[nodeId]) { + return; + } else { + let index = newFlowPool[nodeId].length - 1; + if (buildId) { + index = newFlowPool[nodeId].findIndex((flow) => flow.id === buildId); + } + //check if the data is a flowpool object + if ((data as FlowPoolObjectType).data?.artifacts !== undefined) { + newFlowPool[nodeId][index] = data as FlowPoolObjectType; + } + //update data artifact + else { + newFlowPool[nodeId][index].data.artifacts = data; + } + } + get().setFlowPool(newFlowPool); + }, + CleanFlowPool: () => { + get().setFlowPool({}); + }, setPending: (isPending) => { set({ isPending }); }, resetFlow: ({ nodes, edges, viewport }) => { + const currentFlow = useFlowsManagerStore.getState().currentFlow; + let newEdges = cleanEdges(nodes, edges); + const { inputs, outputs } = getInputsAndOutputs(nodes); set({ nodes, - edges, + edges: newEdges, flowState: undefined, - sseData: {}, - isBuilt: false, + inputs, + outputs, + hasIO: inputs.length > 0 || outputs.length > 0, }); get().reactFlowInstance!.setViewport(viewport); - }, - updateSSEData: (sseData) => { - set((state) => ({ sseData: { ...state.sseData, ...sseData } })); + if (currentFlow) { + getFlowPool({ flowId: currentFlow.id }).then((flowPool) => { + set({ flowPool: flowPool.data.vertex_builds }); + }); + } }, setIsBuilding: (isBuilding) => { set({ isBuilding }); }, - setIsBuilt: (isBuilt) => { - set({ isBuilt }); - }, setFlowState: (flowState) => { const newFlowState = typeof flowState === "function" ? flowState(get().flowState) : flowState; @@ -84,50 +152,53 @@ const useFlowStore = create((set, get) => ({ setNodes: (change) => { let newChange = typeof change === "function" ? change(get().nodes) : change; let newEdges = cleanEdges(newChange, get().edges); + const { inputs, outputs } = getInputsAndOutputs(newChange); set({ edges: newEdges, nodes: newChange, flowState: undefined, - isBuilt: false, - sseData: {}, + inputs, + outputs, + hasIO: inputs.length > 0 || outputs.length > 0, }); const flowsManager = useFlowsManagerStore.getState(); - - flowsManager.autoSaveCurrentFlow( - newChange, - newEdges, - get().reactFlowInstance?.getViewport() ?? { x: 0, y: 0, zoom: 1 } - ); + if (!get().isBuilding) { + flowsManager.autoSaveCurrentFlow( + newChange, + newEdges, + get().reactFlowInstance?.getViewport() ?? { x: 0, y: 0, zoom: 1 } + ); + } }, setEdges: (change) => { let newChange = typeof change === "function" ? change(get().edges) : change; - set({ edges: newChange, flowState: undefined, - isBuilt: false, - sseData: {}, }); const flowsManager = useFlowsManagerStore.getState(); - - flowsManager.autoSaveCurrentFlow( - get().nodes, - newChange, - get().reactFlowInstance?.getViewport() ?? { x: 0, y: 0, zoom: 1 } - ); + if (!get().isBuilding) { + flowsManager.autoSaveCurrentFlow( + get().nodes, + newChange, + get().reactFlowInstance?.getViewport() ?? { x: 0, y: 0, zoom: 1 } + ); + } }, setNode: (id: string, change: Node | ((oldState: Node) => Node)) => { let newChange = typeof change === "function" ? change(get().nodes.find((node) => node.id === id)!) : change; - get().setNodes((oldNodes) => oldNodes.map((node) => { if (node.id === id) { + if ((node.data as NodeDataType).node?.frozen) { + (newChange.data as NodeDataType).node!.frozen = false; + } return newChange; } return node; @@ -156,6 +227,18 @@ const useFlowStore = create((set, get) => ({ ); }, paste: (selection, position) => { + function updateGroup() {} + + if ( + selection.nodes.some((node) => node.data.type === "ChatInput") && + checkChatInput(get().nodes) + ) { + useAlertStore.getState().setErrorData({ + title: "Error pasting components", + list: ["You can only have one ChatInput component in the flow"], + }); + return; + } let minimumX = Infinity; let minimumY = Infinity; let idsMap = {}; @@ -182,6 +265,8 @@ const useFlowStore = create((set, get) => ({ let newId = getNodeId(node.data.type); idsMap[node.id] = newId; + updateGroupRecursion(node, selection.edges); + // Create a new node object const newNode: NodeType = { id: newId, @@ -235,11 +320,7 @@ const useFlowStore = create((set, get) => ({ id, data: cloneDeep(edge.data), style: { stroke: "#555" }, - className: - targetHandleObject.type === "Text" - ? "stroke-gray-800 " - : "stroke-gray-900 ", - animated: targetHandleObject.type === "Text", + className: "stroke-gray-900 ", selected: false, }, newEdges.map((edge) => ({ ...edge, selected: false })) @@ -277,8 +358,6 @@ const useFlowStore = create((set, get) => ({ nodes: [], edges: [], flowState: undefined, - sseData: {}, - isBuilt: false, getFilterEdge: [], }); }, @@ -287,6 +366,31 @@ const useFlowStore = create((set, get) => ({ }, getFilterEdge: [], onConnect: (connection) => { + const dark = useDarkStore.getState().dark; + // const commonMarkerProps = { + // type: MarkerType.ArrowClosed, + // width: 20, + // height: 20, + // color: dark ? "#555555" : "#000000", + // }; + + // const inputTypes = INPUT_TYPES; + // const outputTypes = OUTPUT_TYPES; + + // const findNode = useFlowStore + // .getState() + // .nodes.find( + // (node) => node.id === connection.source || node.id === connection.target + // ); + + // const sourceType = findNode?.data?.type; + // let isIoIn = false; + // let isIoOut = false; + // if (sourceType) { + // isIoIn = inputTypes.has(sourceType); + // isIoOut = outputTypes.has(sourceType); + // } + let newEdges: Edge[] = []; get().setEdges((oldEdges) => { newEdges = addEdge( @@ -297,17 +401,11 @@ const useFlowStore = create((set, get) => ({ sourceHandle: scapeJSONParse(connection.sourceHandle!), }, style: { stroke: "#555" }, - className: - ((scapeJSONParse(connection.targetHandle!) as targetHandleType) - .type === "Text" - ? "stroke-foreground " - : "stroke-foreground ") + " stroke-connection", - animated: - (scapeJSONParse(connection.targetHandle!) as targetHandleType) - .type === "Text", + className: "stroke-foreground stroke-connection", }, oldEdges ); + return newEdges; }); useFlowsManagerStore @@ -329,6 +427,204 @@ const useFlowStore = create((set, get) => ({ }); }); }, + buildFlow: async ({ + startNodeId, + stopNodeId, + input_value, + }: { + startNodeId?: string; + stopNodeId?: string; + input_value?: string; + }) => { + get().setIsBuilding(true); + const currentFlow = useFlowsManagerStore.getState().currentFlow; + const setSuccessData = useAlertStore.getState().setSuccessData; + const setErrorData = useAlertStore.getState().setErrorData; + const setNoticeData = useAlertStore.getState().setNoticeData; + function validateSubgraph(nodes: string[]) { + const errorsObjs = validateNodes( + get().nodes.filter((node) => nodes.includes(node.id)), + get().edges + ); + + const errors = errorsObjs.map((obj) => obj.errors).flat(); + if (errors.length > 0) { + setErrorData({ + title: MISSED_ERROR_ALERT, + list: errors, + }); + get().setIsBuilding(false); + const ids = errorsObjs.map((obj) => obj.id).flat(); + + get().updateBuildStatus(ids, BuildStatus.ERROR); + throw new Error("Invalid nodes"); + } + } + function handleBuildUpdate( + vertexBuildData: VertexBuildTypeAPI, + status: BuildStatus, + runId: string + ) { + if (vertexBuildData && vertexBuildData.inactivated_vertices) { + get().removeFromVerticesBuild(vertexBuildData.inactivated_vertices); + } + + if (vertexBuildData.next_vertices_ids) { + // next_vertices_ids is a list of vertices that are going to be built next + // verticesLayers is a list of list of vertices ids, where each list is a layer of vertices + // we want to add a new layer (next_vertices_ids) to the list of layers (verticesLayers) + // and the values of next_vertices_ids to the list of vertices ids (verticesIds) + + // const nextVertices will be the zip of vertexBuildData.next_vertices_ids and + // vertexBuildData.top_level_vertices + // the VertexLayerElementType as {id: next_vertices_id, layer: top_level_vertex} + const nextVertices: VertexLayerElementType[] = zip( + vertexBuildData.next_vertices_ids, + vertexBuildData.top_level_vertices + ).map(([id, reference]) => ({ id: id!, reference })); + + const newLayers = [ + ...get().verticesBuild!.verticesLayers, + nextVertices, + ]; + const newIds = [ + ...get().verticesBuild!.verticesIds, + ...vertexBuildData.next_vertices_ids, + ]; + get().updateVerticesBuild({ + verticesIds: newIds, + verticesLayers: newLayers, + runId: runId, + verticesToRun: get().verticesBuild!.verticesToRun, + }); + get().updateBuildStatus( + vertexBuildData.top_level_vertices, + BuildStatus.TO_BUILD + ); + } + + get().addDataToFlowPool( + { ...vertexBuildData, buildId: runId }, + vertexBuildData.id + ); + + useFlowStore.getState().updateBuildStatus([vertexBuildData.id], status); + + const verticesIds = get().verticesBuild?.verticesIds; + const newFlowBuildStatus = { ...get().flowBuildStatus }; + // filter out the vertices that are not status + const verticesToUpdate = verticesIds?.filter( + (id) => newFlowBuildStatus[id]?.status !== BuildStatus.BUILT + ); + + if (verticesToUpdate) { + useFlowStore.getState().updateBuildStatus(verticesToUpdate, status); + } + } + await buildVertices({ + input_value, + flowId: currentFlow!.id, + startNodeId, + stopNodeId, + onGetOrderSuccess: () => { + setNoticeData({ title: "Running components" }); + }, + onBuildComplete: (allNodesValid) => { + const nodeId = startNodeId || stopNodeId; + if (nodeId && allNodesValid) { + setSuccessData({ + title: `${ + get().nodes.find((node) => node.id === nodeId)?.data.node + ?.display_name + } built successfully`, + }); + } else { + setSuccessData({ title: FLOW_BUILD_SUCCESS_ALERT }); + } + get().setIsBuilding(false); + }, + onBuildUpdate: handleBuildUpdate, + onBuildError: (title: string, list: string[], elementList) => { + const idList = elementList + .map((element) => element.id) + .filter(Boolean) as string[]; + useFlowStore.getState().updateBuildStatus(idList, BuildStatus.BUILT); + setErrorData({ list, title }); + get().setIsBuilding(false); + }, + onBuildStart: (elementList) => { + const idList = elementList + // reference is the id of the vertex or the id of the parent in a group node + .map((element) => element.reference) + .filter(Boolean) as string[]; + useFlowStore.getState().updateBuildStatus(idList, BuildStatus.BUILDING); + }, + onValidateNodes: validateSubgraph, + }); + get().setIsBuilding(false); + get().revertBuiltStatusFromBuilding(); + }, + getFlow: () => { + return { + nodes: get().nodes, + edges: get().edges, + viewport: get().reactFlowInstance?.getViewport()!, + }; + }, + updateVerticesBuild: ( + vertices: { + verticesIds: string[]; + verticesLayers: VertexLayerElementType[][]; + runId: string; + verticesToRun: string[]; + } | null + ) => { + set({ verticesBuild: vertices }); + }, + verticesBuild: null, + addToVerticesBuild: (vertices: string[]) => { + const verticesBuild = get().verticesBuild; + if (!verticesBuild) return; + set({ + verticesBuild: { + ...verticesBuild, + verticesIds: [...verticesBuild.verticesIds, ...vertices], + }, + }); + }, + removeFromVerticesBuild: (vertices: string[]) => { + const verticesBuild = get().verticesBuild; + if (!verticesBuild) return; + set({ + verticesBuild: { + ...verticesBuild, + verticesIds: get().verticesBuild!.verticesIds.filter( + (vertex) => !vertices.includes(vertex) + ), + }, + }); + }, + updateBuildStatus: (nodeIdList: string[], status: BuildStatus) => { + const newFlowBuildStatus = { ...get().flowBuildStatus }; + nodeIdList.forEach((id) => { + newFlowBuildStatus[id] = { + status, + }; + if (status == BuildStatus.BUILT) { + const timestamp_string = new Date(Date.now()).toLocaleString(); + newFlowBuildStatus[id].timestamp = timestamp_string; + } + }); + set({ flowBuildStatus: newFlowBuildStatus }); + }, + revertBuiltStatusFromBuilding: () => { + const newFlowBuildStatus = { ...get().flowBuildStatus }; + Object.keys(newFlowBuildStatus).forEach((id) => { + if (newFlowBuildStatus[id].status === BuildStatus.BUILDING) { + newFlowBuildStatus[id].status = BuildStatus.BUILT; + } + }); + }, })); export default useFlowStore; diff --git a/src/frontend/src/stores/flowsManagerStore.ts b/src/frontend/src/stores/flowsManagerStore.ts index 4a64b3125..9471889c3 100644 --- a/src/frontend/src/stores/flowsManagerStore.ts +++ b/src/frontend/src/stores/flowsManagerStore.ts @@ -1,7 +1,11 @@ import { AxiosError } from "axios"; -import { cloneDeep } from "lodash"; +import { cloneDeep, debounce } from "lodash"; import { Edge, Node, Viewport, XYPosition } from "reactflow"; import { create } from "zustand"; +import { + SAVE_DEBOUNCE_TIME, + STARTER_FOLDER_NAME, +} from "../constants/constants"; import { deleteFlowFromDatabase, readFlowsFromDatabase, @@ -37,6 +41,10 @@ const past = {}; const future = {}; const useFlowsManagerStore = create((set, get) => ({ + examples: [], + setExamples: (examples: FlowType[]) => { + set({ examples }); + }, currentFlowId: "", setCurrentFlowId: (currentFlowId: string) => { set((state) => ({ @@ -52,6 +60,7 @@ const useFlowsManagerStore = create((set, get) => ({ }); }, currentFlow: undefined, + saveLoading: false, isLoading: true, setIsLoading: (isLoading: boolean) => set({ isLoading }), refreshFlows: () => { @@ -61,7 +70,16 @@ const useFlowsManagerStore = create((set, get) => ({ .then((dbData) => { if (dbData) { const { data, flows } = processFlows(dbData, false); - get().setFlows(flows); + get().setExamples( + flows.filter( + (f) => f.folder === STARTER_FOLDER_NAME && !f.user_id + ) + ); + get().setFlows( + flows.filter( + (f) => !(f.folder === STARTER_FOLDER_NAME && !f.user_id) + ) + ); useTypesStore.setState((state) => ({ data: { ...state.data, ["saved_components"]: data }, })); @@ -70,6 +88,7 @@ const useFlowsManagerStore = create((set, get) => ({ } }) .catch((e) => { + set({ isLoading: false }); useAlertStore.getState().setErrorData({ title: "Could not load flows from database", }); @@ -78,22 +97,19 @@ const useFlowsManagerStore = create((set, get) => ({ }); }, autoSaveCurrentFlow: (nodes: Node[], edges: Edge[], viewport: Viewport) => { - // Clear the previous timeout if it exists. - if (saveTimeoutId) { - clearTimeout(saveTimeoutId); + if (get().currentFlow) { + get().saveFlow( + { ...get().currentFlow!, data: { nodes, edges, viewport } }, + true + ); } - - // Set up a new timeout. - saveTimeoutId = setTimeout(() => { - if (get().currentFlow) { - get().saveFlow( - { ...get().currentFlow!, data: { nodes, edges, viewport } }, - true - ); - } - }, 300); // Delay of 300ms. }, saveFlow: (flow: FlowType, silent?: boolean) => { + set({ saveLoading: true }); // set saveLoading true immediately + return get().saveFlowDebounce(flow, silent); // call the debounced function directly + }, + saveFlowDebounce: debounce((flow: FlowType, silent?: boolean) => { + set({ saveLoading: true }); return new Promise((resolve, reject) => { updateFlowInDatabase(flow) .then((updatedFlow) => { @@ -115,6 +131,7 @@ const useFlowsManagerStore = create((set, get) => ({ //update tabs state resolve(); + set({ saveLoading: false }); } }) .catch((err) => { @@ -125,7 +142,7 @@ const useFlowsManagerStore = create((set, get) => ({ reject(err); }); }); - }, + }, SAVE_DEBOUNCE_TIME), uploadFlows: () => { return new Promise((resolve) => { const input = document.createElement("input"); diff --git a/src/frontend/src/stores/globalVariables.ts b/src/frontend/src/stores/globalVariables.ts new file mode 100644 index 000000000..e2f3e37df --- /dev/null +++ b/src/frontend/src/stores/globalVariables.ts @@ -0,0 +1,34 @@ +import { create } from "zustand"; +import { GlobalVariablesStore } from "../types/zustand/globalVariables"; + +export const useGlobalVariablesStore = create( + (set, get) => ({ + globalVariablesEntries: [], + globalVariables: {}, + setGlobalVariables: (variables) => { + set({ + globalVariables: variables, + globalVariablesEntries: Object.keys(variables), + }); + }, + addGlobalVariable: (name, id, type) => { + const data = { id, type }; + const newVariables = { ...get().globalVariables, [name]: data }; + set({ + globalVariables: newVariables, + globalVariablesEntries: Object.keys(newVariables), + }); + }, + removeGlobalVariable: (name) => { + const newVariables = { ...get().globalVariables }; + delete newVariables[name]; + set({ + globalVariables: newVariables, + globalVariablesEntries: Object.keys(newVariables), + }); + }, + getVariableId: (name) => { + return get().globalVariables[name]?.id; + }, + }) +); diff --git a/src/frontend/src/stores/shortcuts.ts b/src/frontend/src/stores/shortcuts.ts new file mode 100644 index 000000000..798613708 --- /dev/null +++ b/src/frontend/src/stores/shortcuts.ts @@ -0,0 +1,15 @@ +import { create } from "zustand"; +import { shortcutsStoreType } from "../types/store"; + +export const useShortcutsStore = create((set, get) => ({ + openCodeModalWShortcut: false, + handleModalWShortcut: (modal) => { + switch (modal) { + case "code": + set({ + openCodeModalWShortcut: !get().openCodeModalWShortcut, + }); + break; + } + }, +})); diff --git a/src/frontend/src/stores/typesStore.ts b/src/frontend/src/stores/typesStore.ts index 557a3a30d..808a5d646 100644 --- a/src/frontend/src/stores/typesStore.ts +++ b/src/frontend/src/stores/typesStore.ts @@ -27,10 +27,6 @@ export const useTypesStore = create((set, get) => ({ resolve(); }) .catch((error) => { - useAlertStore.getState().setErrorData({ - title: "An error has occurred while fetching types.", - list: ["Please refresh the page."], - }); console.error("An error has occurred while fetching types."); console.log(error); reject(); diff --git a/src/frontend/src/style/applies.css b/src/frontend/src/style/applies.css index e2bcdb7d9..8900938bf 100644 --- a/src/frontend/src/style/applies.css +++ b/src/frontend/src/style/applies.css @@ -69,7 +69,7 @@ @apply flex h-full w-[14.5rem] flex-col overflow-hidden border-r scrollbar-hide; } .side-bar-search-div-placement { - @apply relative mx-auto mb-2 mt-2 flex items-center; + @apply relative mx-auto flex items-center py-3; } .side-bar-components-icon { @apply h-6 w-4 text-ring; @@ -114,7 +114,10 @@ @apply pointer-events-none; } .extra-side-bar-buttons { - @apply relative inline-flex w-full items-center justify-center rounded-md bg-background px-2 py-2 text-foreground shadow-sm ring-1 ring-inset ring-input transition-all duration-500 ease-in-out; + @apply relative inline-flex w-full items-center justify-center rounded-md bg-background px-2 py-2 text-foreground transition-all duration-500 ease-in-out; + } + .header-menubar-item { + @apply relative flex cursor-default cursor-pointer select-none items-center rounded-sm px-2 py-1.5 text-sm outline-none transition-colors hover:bg-accent hover:text-accent-foreground data-[disabled]:pointer-events-none data-[disabled]:opacity-50; } .extra-side-bar-buttons:hover { @apply hover:bg-muted; @@ -175,14 +178,24 @@ @apply fixed bottom-4 right-4; } .message-button-icon { - @apply fill-chat-trigger stroke-chat-trigger stroke-1; + @apply fill-medium-indigo stroke-medium-indigo stroke-1; } .disabled-message-button-icon { @apply fill-chat-trigger-disabled stroke-chat-trigger-disabled stroke-1; } + .parent-disclosure-arrangement { + @apply flex w-full select-none items-center justify-between bg-background px-3 py-1; + } .components-disclosure-arrangement { @apply -mt-px flex w-full select-none items-center justify-between border-y border-y-input bg-muted px-3 py-2; } + .components-disclosure-arrangement-child { + /* different color than the non child */ + @apply -mt-px flex w-full select-none items-center justify-between border-y border-y-input bg-muted px-3 py-2; + } + .parent-disclosure-title { + @apply p-2 px-2 text-sm font-medium; + } .components-disclosure-title { @apply flex items-center text-sm text-primary; } @@ -267,7 +280,7 @@ @apply relative flex flex-col justify-center bg-background; } .generic-node-div-title { - @apply flex w-full items-center gap-8 border-b bg-muted p-4; + @apply flex w-full items-center gap-2 border-b bg-muted p-4; } .generic-node-title-arrangement { @apply flex-max-width items-center truncate; @@ -276,33 +289,53 @@ @apply h-10 w-10 rounded p-1; } .generic-node-tooltip-div { - @apply ml-2 truncate; + @apply ml-2 max-w-[220px] truncate; } .generic-node-validation-div { @apply max-h-96 overflow-auto; } .generic-node-status-position { - @apply relative top-[3px] h-5 w-5; + @apply h-5 w-5; } .generic-node-status-animation { @apply hidden h-4 w-4 animate-spin rounded-full bg-ring opacity-0; } .generic-node-status { - @apply h-4 w-4 rounded-full opacity-100; + @apply animate-wiggle opacity-100; } .green-status { - @apply generic-node-status bg-status-green; + @apply generic-node-status text-status-green; } + .gray-status { + @apply generic-node-status text-status-gray; + } + .red-status { - @apply generic-node-status bg-status-red; + @apply generic-node-status text-status-red; } .yellow-status { - @apply generic-node-status bg-status-yellow; + @apply generic-node-status text-status-yellow; + } + .inactive-status { + /* what colour for inactive status? + muted-foreground is too strong, maybe use a lighter shade of it? + + */ + @apply border-none ring ring-muted-foreground; + } + .built-invalid-status { + @apply border-none ring ring-[#FF9090]; + } + .built-invalid-status-dark { + @apply border-none ring ring-[#751C1C]; + } + .building-status { + @apply border-none ring; } .status-build-animation { - @apply hidden h-4 w-4 animate-spin rounded-full bg-ring opacity-0; + @apply opacity-0; } .status-div { @apply absolute w-4 duration-200 ease-in-out; @@ -411,7 +444,9 @@ .code-area-external-link:hover { @apply hover:text-accent-foreground; } - + .dropdown-component-disabled { + @apply pointer-events-none cursor-not-allowed; + } .dropdown-component-outline { @apply input-edit-node relative pr-8; } @@ -421,11 +456,17 @@ .dropdown-component-display { @apply block w-full truncate bg-background; } + .dropdown-component-display-disabled { + @apply text-muted-foreground; + } .dropdown-component-arrow { - @apply pointer-events-none absolute inset-y-0 right-0 flex items-center pr-2; + @apply pointer-events-none absolute inset-y-0 flex items-center pr-2; } .dropdown-component-arrow-color { - @apply extra-side-bar-save-disable h-5 w-5; + @apply h-5 w-5 text-accent-foreground; + } + .dropdown-component-arrow-color-disable { + @apply h-5 w-5 text-muted-foreground; } .dropdown-component-options { @apply z-10 mt-1 max-h-60 overflow-auto rounded-md bg-background py-1 text-base shadow-lg ring-1 ring-black ring-opacity-5 focus:outline-none sm:text-sm; @@ -449,7 +490,7 @@ @apply absolute inset-y-0 right-0 flex items-center pr-4; } .dropdown-component-check-icon { - @apply h-5 w-5 text-black; + @apply h-5 w-5 text-primary; } .edit-flow-arrangement { @@ -467,7 +508,7 @@ @apply flex items-center gap-0.5 rounded-md px-1.5 py-1 text-sm font-medium; } .header-menu-bar-display { - @apply flex max-w-[120px] cursor-pointer items-center gap-2 lg:max-w-[200px]; + @apply flex max-w-[115px] cursor-pointer items-center gap-2 lg:max-w-[145px]; } .header-menu-flow-name { @apply flex-1 truncate; @@ -553,7 +594,7 @@ } .app-div { - @apply fixed bottom-5 left-5 flex flex-col-reverse; + @apply absolute bottom-5 left-5 flex flex-col-reverse; } .chat-input-modal-txtarea { @@ -825,13 +866,13 @@ } .api-modal-tabs { - @apply flex h-full max-w-full flex-col overflow-hidden rounded-md border bg-muted text-center sm:w-[75vw] md:w-[75vw] lg:w-[75vw] xl:w-[76vw] 2xl:w-full; + @apply flex h-full flex-col overflow-hidden rounded-md border bg-muted text-center; } .api-modal-tablist-div { - @apply flex items-center justify-between px-2; + @apply flex items-center justify-between px-2 py-2; } .api-modal-tabs-content { - @apply -mt-1 h-full w-full overflow-hidden px-4 pb-4; + @apply -mt-1 h-full w-full px-4 pb-4; } .api-modal-accordion-display { @apply mt-2 flex h-full w-full; @@ -876,13 +917,13 @@ @apply mr-2 h-5 w-5 rotate-[44deg]; } .form-modal-play-icon { - @apply mx-1 h-5 w-5; + @apply mx-1 h-5 w-5 fill-current; } .form-modal-chat-position { @apply flex-max-width px-2 py-6 pl-4 pr-9; } .form-modal-chatbot-icon { - @apply mb-3 ml-3 mr-6 mt-1; + @apply mb-3 ml-3 mr-6 mt-1 flex flex-col; } .form-modal-chat-image { @apply flex flex-col items-center gap-1; @@ -926,9 +967,6 @@ .form-modal-initial-prompt-btn { @apply mb-2 flex items-center gap-2 rounded-md border border-border bg-background px-4 py-2 text-sm font-semibold shadow-sm; } - .form-modal-iv-box { - @apply flex-max-width mt-2 h-[80vh]; - } .form-modal-iv-size { @apply mr-6 flex h-full w-2/6 flex-col justify-start overflow-auto scrollbar-hide; } @@ -1012,7 +1050,7 @@ } .beta-badge-wrapper { - @apply absolute right-0 top-0 h-16 w-16 overflow-hidden rounded-tr-lg; + @apply pointer-events-none absolute right-0 top-0 h-16 w-16 overflow-hidden rounded-tr-lg; } .beta-badge-content { @apply mt-2 w-24 rotate-45 bg-beta-background text-center text-xs font-semibold text-beta-foreground; diff --git a/src/frontend/src/style/index.css b/src/frontend/src/style/index.css index 0185d25b7..e744fb503 100644 --- a/src/frontend/src/style/index.css +++ b/src/frontend/src/style/index.css @@ -27,6 +27,7 @@ --radius: 0.5rem; --ring: 215 20.2% 65.1%; /* hsl(215 20% 65%) */ --round-btn-shadow: #00000063; + --ice: #31a3cc; --error-background: #fef2f2; --error-foreground: #991b1b; @@ -49,6 +50,9 @@ --component-icon: #d8598a; --flow-icon: #2f67d0; + --hover: #F2F4F5; + --disabled-run: #6366f1; + /* Colors that are shared in dark and light mode */ --blur-shared: #151923de; @@ -60,12 +64,16 @@ --chat-send: #059669; --status-green: #4ade80; --status-blue: #2563eb; + --status-gray: #6b7280; --connection: #555; } .dark { --background: 224 35% 7.5%; /* hsl(224 40% 10%) */ --foreground: 213 31% 80%; /* hsl(213 31% 91%) */ + --ice: #60A5FA; + --hover: #1A202E; + --disabled-run: #6366f1; --muted: 223 27% 11%; /* hsl(223 27% 11%) */ --muted-foreground: 215.4 16.3% 56.9%; /* hsl(215 16% 56%) */ diff --git a/src/frontend/src/types/api/index.ts b/src/frontend/src/types/api/index.ts index 46e62376c..faef230a5 100644 --- a/src/frontend/src/types/api/index.ts +++ b/src/frontend/src/types/api/index.ts @@ -18,6 +18,8 @@ export type APIClassType = { template: APITemplateType; display_name: string; icon?: string; + is_input?: boolean; + is_output?: boolean; input_types?: Array; output_types?: Array; custom_fields?: CustomFieldsType; @@ -25,7 +27,9 @@ export type APIClassType = { documentation: string; error?: string; official?: boolean; + frozen?: boolean; flow?: FlowType; + field_order?: string[]; [key: string]: | Array | string @@ -51,6 +55,9 @@ export type TemplateVariableType = { input_types?: Array; display_name?: string; name?: string; + real_time_refresh?: boolean; + refresh_button?: boolean; + refresh_button_text?: string; [key: string]: any; }; export type sendAllProps = { @@ -129,3 +136,47 @@ export type Component = { data: Object; tags: [string]; }; + +export type VerticesOrderTypeAPI = { + ids: Array; + vertices_to_run: Array; + run_id: string; +}; + +export type VertexBuildTypeAPI = { + id: string; + inactivated_vertices: Array | null; + next_vertices_ids: Array; + top_level_vertices: Array; + run_id: string; + valid: boolean; + params: string; + data: VertexDataTypeAPI; + timestamp: string; +}; + +// data is the object received by the API +// it has results, artifacts, timedelta, duration +export type VertexDataTypeAPI = { + results: { [key: string]: { [key: string]: string } }; + artifacts: { [key: string]: string }; + timedelta?: number; + duration?: string; +}; + +export type CodeErrorDataTypeAPI = { + error: string | undefined; + traceback: string | undefined; +}; + +// the error above is inside this error.response.data.detail.error +// which comes from a request to the API +// to type the error we need to know the structure of the object + +// error that has a response, that has a data, that has a detail, that has an error +export type ResponseErrorTypeAPI = { + response: { data: { detail: CodeErrorDataTypeAPI } }; +}; +export type ResponseErrorDetailAPI = { + response: { data: { detail: string } }; +}; diff --git a/src/frontend/src/types/chat/index.ts b/src/frontend/src/types/chat/index.ts index 62cb4eb78..e24c6e891 100644 --- a/src/frontend/src/types/chat/index.ts +++ b/src/frontend/src/types/chat/index.ts @@ -8,5 +8,27 @@ export type ChatMessageType = { thought?: string; files?: Array<{ data: string; type: string; data_type: string }>; prompt?: string; - chatKey: string; + chatKey?: string; + componentId: string; + stream_url?: string | null; + sender_name?: string; +}; + +export type ChatOutputType = { + message: string; + sender: string; + sender_name: string; + stream_url?: string; +}; + +export type chatInputType = { + result: string; +}; + +export type FlowPoolObjectType = { + timestamp: string; + valid: boolean; + params: any; + data: { artifacts: any; results: any | ChatOutputType | chatInputType }; + id: string; }; diff --git a/src/frontend/src/types/components/index.ts b/src/frontend/src/types/components/index.ts index d78ce867c..469749b88 100644 --- a/src/frontend/src/types/components/index.ts +++ b/src/frontend/src/types/components/index.ts @@ -1,5 +1,6 @@ -import { ReactElement, ReactNode } from "react"; -import { ReactFlowJsonObject, XYPosition } from "reactflow"; +import { ReactElement, ReactNode, SetStateAction } from "react"; +import { ReactFlowJsonObject } from "reactflow"; +import { InputOutput } from "../../constants/enums"; import { APIClassType, APITemplateType, TemplateVariableType } from "../api"; import { ChatMessageType } from "../chat"; import { FlowStyleType, FlowType, NodeDataType, NodeType } from "../flow/index"; @@ -7,9 +8,9 @@ import { sourceHandleType, targetHandleType } from "./../flow/index"; export type InputComponentType = { autoFocus?: boolean; onBlur?: (event: React.FocusEvent) => void; - value: string; + value?: string; disabled?: boolean; - onChange: (value: string) => void; + onChange?: (value: string) => void; password: boolean; required?: boolean; isForm?: boolean; @@ -20,6 +21,13 @@ export type InputComponentType = { className?: string; id?: string; blurOnEnter?: boolean; + optionsIcon?: string; + optionsPlaceholder?: string; + options?: string[]; + optionsButton?: ReactElement; + optionButton?: (option: string) => ReactElement; + selectedOption?: string; + setSelectedOption?: (value: string) => void; }; export type ToggleComponentType = { enabled: boolean; @@ -27,14 +35,15 @@ export type ToggleComponentType = { disabled: boolean | undefined; size: "small" | "medium" | "large"; id?: string; + editNode?: boolean; }; export type DropDownComponentType = { + disabled?: boolean; + isLoading?: boolean; value: string; options: string[]; onSelect: (value: string) => void; editNode?: boolean; - apiModal?: boolean; - numberOfOptions?: number; id?: string; }; export type ParameterComponentType = { @@ -61,6 +70,15 @@ export type InputListComponentType = { editNode?: boolean; }; +export type InputGlobalComponentType = { + disabled: boolean; + onChange: (value: string) => void; + setDb: (value: boolean) => void; + name: string; + data: NodeDataType; + editNode?: boolean; +}; + export type KeyPairListComponentType = { value: any; onChange: (value: Object[]) => void; @@ -68,6 +86,7 @@ export type KeyPairListComponentType = { editNode?: boolean; duplicateKey?: boolean; editNodeModal?: boolean; + isList?: boolean; }; export type DictComponentType = { @@ -103,6 +122,7 @@ export type PromptAreaComponentType = { }; export type CodeAreaComponentType = { + setOpenModal?: (bool: boolean) => void; disabled: boolean; onChange: (value: string[] | string) => void; value: string; @@ -112,9 +132,12 @@ export type CodeAreaComponentType = { dynamic?: boolean; id?: string; readonly?: boolean; + open?: boolean; + setOpen?: (open: boolean) => void; }; export type FileComponentType = { + IOInputProps?; disabled: boolean; onChange: (value: string[] | string) => void; value: string; @@ -126,6 +149,7 @@ export type FileComponentType = { export type DisclosureComponentType = { children: ReactNode; openDisc: boolean; + isChild?: boolean; button: { title: string; Icon: React.ElementType; @@ -146,6 +170,7 @@ export type RangeSpecType = { export type IntComponentType = { value: string; disabled?: boolean; + rangeSpec: RangeSpecType; onChange: (value: string) => void; editNode?: boolean; id?: string; @@ -194,6 +219,8 @@ export type AccordionComponentType = { open?: string[]; trigger?: string | ReactElement; keyValue?: string; + openDisc?: boolean; + sideBar?: boolean; }; export type Side = "top" | "right" | "bottom" | "left"; @@ -444,7 +471,7 @@ export type chatInputType = { }; lockChat: boolean; noInput: boolean; - sendMessage: () => void; + sendMessage: (count?: number) => void; setChatValue: (value: string) => void; }; @@ -475,10 +502,20 @@ export type fileCardPropsType = { export type nodeToolbarPropsType = { data: NodeDataType; deleteNode: (idx: string) => void; - position: XYPosition; setShowNode: (boolean: any) => void; numberOfHandles: number; showNode: boolean; + name?: string; + openAdvancedModal?: boolean; + onCloseAdvancedModal?: (close: boolean) => void; + selected: boolean; + updateNodeCode?: ( + newNodeClass: APIClassType, + code: string, + name: string + ) => void; + setShowState: (show: boolean | SetStateAction) => void; + isOutdated?: boolean; }; export type parsedDataType = { @@ -506,24 +543,27 @@ export type modalHeaderType = { export type codeAreaModalPropsType = { setValue: (value: string) => void; + setOpenModal?: (bool: boolean) => void; value: string; nodeClass: APIClassType | undefined; setNodeClass: (Class: APIClassType, code?: string) => void | undefined; children: ReactNode; dynamic?: boolean; readonly?: boolean; + open?: boolean; + setOpen?: (open: boolean) => void; }; export type chatMessagePropsType = { chat: ChatMessageType; lockChat: boolean; lastMessage: boolean; -}; - -export type formModalPropsType = { - open: boolean; - setOpen: Function; - flow: FlowType; + setLockChat: (lock: boolean) => void; + updateChat: ( + chat: ChatMessageType, + message: string, + stream_url?: string + ) => void; }; export type genericModalPropsType = { @@ -540,6 +580,18 @@ export type genericModalPropsType = { readonly?: boolean; }; +export type newFlowModalPropsType = { + open: boolean; + setOpen: (open: boolean) => void; +}; + +export type IOModalPropsType = { + children: JSX.Element; + open: boolean; + setOpen: (open: boolean) => void; + disable?: boolean; +}; + export type buttonBoxPropsType = { onClick: () => void; title: string; @@ -619,10 +671,11 @@ export type crashComponentPropsType = { export type validationStatusType = { id: string; + data: object | any; params: string; - progress: number; + progress?: number; valid: boolean; - duration: string; + duration?: string; }; export type ApiKey = { @@ -636,10 +689,55 @@ export type ApiKey = { export type fetchErrorComponentType = { message: string; description: string; + openModal?: boolean; + setRetry: () => void; + isLoadingHealth: boolean; }; export type dropdownButtonPropsType = { firstButtonName: string; onFirstBtnClick: () => void; options: Array<{ name: string; onBtnClick: () => void }>; + plusButton?: boolean; + dropdownOptions?: boolean; +}; + +export type IOFieldViewProps = { + type: InputOutput; + fieldType: string; + fieldId: string; + left?: boolean; +}; + +export type UndrawCardComponentProps = { flow: FlowType }; + +export type chatViewProps = { + sendMessage: (count?: number) => void; + chatValue: string; + setChatValue: (value: string) => void; + lockChat: boolean; + setLockChat: (lock: boolean) => void; +}; + +export type IOFileInputProps = { + field: TemplateVariableType; + updateValue: (e: any, type: string) => void; +}; + +export type toolbarSelectItemProps = { + isMac: boolean; + shift: boolean; + keyboardKey: string; + value: string; + icon: string; + styleObj?: { + iconClasses?: string; + commandClasses?: string; + shiftClasses?: string; + ctrlClasses?: string; + keyClasses?: string; + valueClasses?: string; + }; + dataTestId: string; + ping?: boolean; }; diff --git a/src/frontend/src/types/flow/index.ts b/src/frontend/src/types/flow/index.ts index 9f1821a08..8b259186a 100644 --- a/src/frontend/src/types/flow/index.ts +++ b/src/frontend/src/types/flow/index.ts @@ -1,4 +1,5 @@ import { ReactFlowJsonObject, XYPosition } from "reactflow"; +import { BuildStatus } from "../../constants/enums"; import { APIClassType } from "../api/index"; export type FlowType = { @@ -7,12 +8,17 @@ export type FlowType = { data: ReactFlowJsonObject | null; description: string; style?: FlowStyleType; - is_component: boolean; - parent?: string; - date_created?: string; - updated_at?: string; + is_component?: boolean; last_tested_version?: string; + updated_at?: string; + date_created?: string; + parent?: string; + folder?: string; + user_id?: string; + icon?: string; + icon_bg_color?: string; }; + export type NodeType = { id: string; type?: string; @@ -27,6 +33,7 @@ export type NodeDataType = { node?: APIClassType; id: string; output_types?: string[]; + buildStatus?: BuildStatus; }; // FlowStyleType is the type of the style object that is used to style the // Flow card with an emoji and a color. diff --git a/src/frontend/src/types/store/index.ts b/src/frontend/src/types/store/index.ts index 331e16e30..b116bbc3d 100644 --- a/src/frontend/src/types/store/index.ts +++ b/src/frontend/src/types/store/index.ts @@ -18,3 +18,8 @@ export type StoreComponentResponse = { authorized: boolean; results: storeComponent[]; }; + +export type shortcutsStoreType = { + openCodeModalWShortcut: boolean; + handleModalWShortcut: (str: string) => void; +}; diff --git a/src/frontend/src/types/zustand/alert/index.ts b/src/frontend/src/types/zustand/alert/index.ts index 1e273cc4e..7b2837517 100644 --- a/src/frontend/src/types/zustand/alert/index.ts +++ b/src/frontend/src/types/zustand/alert/index.ts @@ -3,19 +3,16 @@ import { AlertItemType } from "../../alerts"; export type AlertStoreType = { errorData: { title: string; list?: Array }; setErrorData: (newState: { title: string; list?: Array }) => void; - errorOpen: boolean; - setErrorOpen: (newState: boolean) => void; noticeData: { title: string; link?: string }; setNoticeData: (newState: { title: string; link?: string }) => void; - noticeOpen: boolean; - setNoticeOpen: (newState: boolean) => void; successData: { title: string }; setSuccessData: (newState: { title: string }) => void; - successOpen: boolean; - setSuccessOpen: (newState: boolean) => void; notificationCenter: boolean; setNotificationCenter: (newState: boolean) => void; notificationList: Array; + tempNotificationList: Array; + clearTempNotificationList: () => void; + removeFromTempNotificationList: (index: string) => void; clearNotificationList: () => void; removeFromNotificationList: (index: string) => void; loading: boolean; diff --git a/src/frontend/src/types/zustand/flow/index.ts b/src/frontend/src/types/zustand/flow/index.ts index 166f6220f..d88504909 100644 --- a/src/frontend/src/types/zustand/flow/index.ts +++ b/src/frontend/src/types/zustand/flow/index.ts @@ -7,11 +7,51 @@ import { ReactFlowInstance, Viewport, } from "reactflow"; +import { BuildStatus } from "../../../constants/enums"; import { FlowState } from "../../tabs"; +export type chatInputType = { + result: string; +}; + +export type ChatOutputType = { + message: string; + sender: string; + sender_name: string; + stream_url?: string; +}; + +export type FlowPoolObjectType = { + timestamp: string; + valid: boolean; + params: any; + data: { + artifacts: any | ChatOutputType | chatInputType; + results: any | ChatOutputType | chatInputType; + }; + duration?: string; + progress?: number; + id: string; + buildId: string; +}; + +export type VertexLayerElementType = { + id: string; + reference?: string; +}; + +export type FlowPoolType = { + [key: string]: Array; +}; + export type FlowStoreType = { - updateSSEData: (sseData: object) => void; - sseData: object; + flowPool: FlowPoolType; + inputs: Array<{ type: string; id: string; displayName: string }>; + outputs: Array<{ type: string; id: string; displayName: string }>; + hasIO: boolean; + setFlowPool: (flowPool: FlowPoolType) => void; + addDataToFlowPool: (data: FlowPoolObjectType, nodeId: string) => void; + CleanFlowPool: () => void; isBuilding: boolean; isPending: boolean; setIsBuilding: (isBuilding: boolean) => void; @@ -49,11 +89,47 @@ export type FlowStoreType = { newSelection: { nodes: any; edges: any } | null, isCrop?: boolean ) => void; - isBuilt: boolean; - setIsBuilt: (isBuilt: boolean) => void; cleanFlow: () => void; setFilterEdge: (newState) => void; getFilterEdge: any[]; onConnect: (connection: Connection) => void; unselectAll: () => void; + buildFlow: ({ + startNodeId, + stopNodeId, + input_value, + }: { + nodeId?: string; + startNodeId?: string; + stopNodeId?: string; + input_value?: string; + }) => Promise; + getFlow: () => { nodes: Node[]; edges: Edge[]; viewport: Viewport }; + updateVerticesBuild: ( + vertices: { + verticesIds: string[]; + verticesLayers: VertexLayerElementType[][]; + runId: string; + verticesToRun: string[]; + } | null + ) => void; + addToVerticesBuild: (vertices: string[]) => void; + removeFromVerticesBuild: (vertices: string[]) => void; + verticesBuild: { + verticesIds: string[]; + verticesLayers: VertexLayerElementType[][]; + runId: string; + verticesToRun: string[]; + } | null; + updateBuildStatus: (nodeId: string[], status: BuildStatus) => void; + revertBuiltStatusFromBuilding: () => void; + flowBuildStatus: { + [key: string]: { status: BuildStatus; timestamp?: string }; + }; + updateFlowPool: ( + nodeId: string, + data: FlowPoolObjectType | ChatOutputType | chatInputType, + buildId?: string + ) => void; + getNodePosition: (nodeId: string) => { x: number; y: number }; }; diff --git a/src/frontend/src/types/zustand/flowIOStore/index.ts b/src/frontend/src/types/zustand/flowIOStore/index.ts new file mode 100644 index 000000000..65833ced9 --- /dev/null +++ b/src/frontend/src/types/zustand/flowIOStore/index.ts @@ -0,0 +1,21 @@ +export type chatInputType = { + result: string; +}; + +export type ChatOutputType = { + message: string; + sender: string; + sender_name: string; +}; + +export type FlowPoolObjectType = { + timestamp: string; + valid: boolean; + params: any; + data: { artifacts: any; results: any | ChatOutputType | chatInputType }; + id: string; +}; + +export type FlowPoolType = { + [key: string]: Array; +}; diff --git a/src/frontend/src/types/zustand/flowsManager/index.ts b/src/frontend/src/types/zustand/flowsManager/index.ts index 2a20c20d8..817ddd3d1 100644 --- a/src/frontend/src/types/zustand/flowsManager/index.ts +++ b/src/frontend/src/types/zustand/flowsManager/index.ts @@ -7,10 +7,15 @@ export type FlowsManagerStoreType = { currentFlow: FlowType | undefined; currentFlowId: string; setCurrentFlowId: (currentFlowId: string) => void; + saveLoading: boolean; isLoading: boolean; setIsLoading: (isLoading: boolean) => void; refreshFlows: () => Promise; - saveFlow: (flow: FlowType, silent?: boolean) => Promise; + saveFlow: (flow: FlowType, silent?: boolean) => Promise | undefined; + saveFlowDebounce: ( + flow: FlowType, + silent?: boolean + ) => Promise | undefined; autoSaveCurrentFlow: ( nodes: Node[], edges: Edge[], @@ -43,6 +48,8 @@ export type FlowsManagerStoreType = { undo: () => void; redo: () => void; takeSnapshot: () => void; + examples: Array; + setExamples: (examples: FlowType[]) => void; }; export type UseUndoRedoOptions = { diff --git a/src/frontend/src/types/zustand/globalVariables/index.ts b/src/frontend/src/types/zustand/globalVariables/index.ts new file mode 100644 index 000000000..3e651179e --- /dev/null +++ b/src/frontend/src/types/zustand/globalVariables/index.ts @@ -0,0 +1,10 @@ +export type GlobalVariablesStore = { + globalVariablesEntries: Array; + globalVariables: { [name: string]: { id: string; type?: string } }; + setGlobalVariables: (variables: { + [name: string]: { id: string; type?: string }; + }) => void; + addGlobalVariable: (name: string, id: string, type?: string) => void; + removeGlobalVariable: (name: string) => void; + getVariableId: (name: string) => string | undefined; +}; diff --git a/src/frontend/src/utils/buildUtils.ts b/src/frontend/src/utils/buildUtils.ts new file mode 100644 index 000000000..bc2cc62e7 --- /dev/null +++ b/src/frontend/src/utils/buildUtils.ts @@ -0,0 +1,257 @@ +import { AxiosError } from "axios"; +import { BuildStatus } from "../constants/enums"; +import { getVerticesOrder, postBuildVertex } from "../controllers/API"; +import useAlertStore from "../stores/alertStore"; +import useFlowStore from "../stores/flowStore"; +import { VertexBuildTypeAPI } from "../types/api"; +import { VertexLayerElementType } from "../types/zustand/flow"; + +type BuildVerticesParams = { + flowId: string; // Assuming FlowType is the type for your flow + input_value?: any; // Replace any with the actual type if it's not any + startNodeId?: string | null; // Assuming nodeId is of type string, and it's optional + stopNodeId?: string | null; // Assuming nodeId is of type string, and it's optional + onGetOrderSuccess?: () => void; + onBuildUpdate?: ( + data: VertexBuildTypeAPI, + status: BuildStatus, + buildId: string + ) => void; // Replace any with the actual type if it's not any + onBuildComplete?: (allNodesValid: boolean) => void; + onBuildError?: (title, list, idList: VertexLayerElementType[]) => void; + onBuildStart?: (idList: VertexLayerElementType[]) => void; + onValidateNodes?: (nodes: string[]) => void; +}; + +function getInactiveVertexData(vertexId: string): VertexBuildTypeAPI { + // Build VertexBuildTypeAPI + let inactiveData = { + results: {}, + artifacts: { repr: "Inactive" }, + }; + let inactiveVertexData = { + id: vertexId, + data: inactiveData, + params: "Inactive", + inactivated_vertices: null, + run_id: "", + next_vertices_ids: [], + top_level_vertices: [], + inactive_vertices: null, + valid: false, + timestamp: new Date().toISOString(), + }; + + return inactiveVertexData; +} + +export async function updateVerticesOrder( + flowId: string, + startNodeId?: string | null, + stopNodeId?: string | null +): Promise<{ + verticesLayers: VertexLayerElementType[][]; + verticesIds: string[]; + runId: string; + verticesToRun: string[]; +}> { + return new Promise(async (resolve, reject) => { + const setErrorData = useAlertStore.getState().setErrorData; + let orderResponse; + try { + orderResponse = await getVerticesOrder(flowId, startNodeId, stopNodeId); + } catch (error: any) { + setErrorData({ + title: "Oops! Looks like you missed something", + list: [error.response?.data?.detail ?? "Unknown Error"], + }); + useFlowStore.getState().setIsBuilding(false); + throw new Error("Invalid nodes"); + } + // orderResponse.data.ids, + // for each id we need to build the VertexLayerElementType object as + // {id: id, reference: id} + let verticesLayers: Array> = + orderResponse.data.ids.map((id: string) => { + return [{ id: id, reference: id }]; + }); + + const runId = orderResponse.data.run_id; + const verticesToRun = orderResponse.data.vertices_to_run; + + const verticesIds = orderResponse.data.ids; + useFlowStore.getState().updateVerticesBuild({ + verticesLayers, + verticesIds, + runId, + verticesToRun, + }); + resolve({ verticesLayers, verticesIds, runId, verticesToRun }); + }); +} + +export async function buildVertices({ + flowId, + input_value, + startNodeId, + stopNodeId, + onGetOrderSuccess, + onBuildUpdate, + onBuildComplete, + onBuildError, + onBuildStart, + onValidateNodes, +}: BuildVerticesParams) { + let verticesBuild = useFlowStore.getState().verticesBuild; + // if startNodeId and stopNodeId are provided + // something is wrong + if (startNodeId && stopNodeId) { + return; + } + + if (!verticesBuild || startNodeId || stopNodeId) { + let verticesOrderResponse = await updateVerticesOrder( + flowId, + startNodeId, + stopNodeId + ); + if (onValidateNodes) { + try { + onValidateNodes(verticesOrderResponse.verticesToRun); + } catch (e) { + useFlowStore.getState().setIsBuilding(false); + + return; + } + } + if (onGetOrderSuccess) onGetOrderSuccess(); + verticesBuild = useFlowStore.getState().verticesBuild; + } + + const verticesIds = verticesBuild?.verticesIds!; + const verticesLayers = verticesBuild?.verticesLayers!; + const runId = verticesBuild?.runId!; + let stop = false; + + useFlowStore.getState().updateBuildStatus(verticesIds, BuildStatus.TO_BUILD); + useFlowStore.getState().setIsBuilding(true); + let currentLayerIndex = 0; // Start with the first layer + // Set each vertex state to building + const buildResults: Array = []; + + // Build each layer + while ( + currentLayerIndex < + (useFlowStore.getState().verticesBuild?.verticesLayers! || []).length + ) { + // Get the current layer + const currentLayer = + useFlowStore.getState().verticesBuild?.verticesLayers![currentLayerIndex]; + // If there are no more layers, we are done + if (!currentLayer) { + if (onBuildComplete) { + const allNodesValid = buildResults.every((result) => result); + onBuildComplete(allNodesValid); + useFlowStore.getState().setIsBuilding(false); + } + return; + } + // If there is a callback for the start of the build, call it + if (onBuildStart) onBuildStart(currentLayer); + // Build each vertex in the current layer + await Promise.all( + currentLayer.map(async (element) => { + // Check if id is in the list of inactive nodes + if ( + !useFlowStore + .getState() + .verticesBuild?.verticesIds.includes(element.id) && + onBuildUpdate + ) { + // If it is, skip building and set the state to inactive + onBuildUpdate( + getInactiveVertexData(element.id), + BuildStatus.INACTIVE, + runId + ); + buildResults.push(false); + return; + } + + // Build the vertex + await buildVertex({ + flowId, + id: element.id, + input_value, + onBuildUpdate: (data: VertexBuildTypeAPI, status: BuildStatus) => { + if (onBuildUpdate) onBuildUpdate(data, status, runId); + }, + onBuildError, + verticesIds, + buildResults, + stopBuild: () => { + stop = true; + }, + }); + if (stop) { + return; + } + }) + ); + // Once the current layer is built, move to the next layer + currentLayerIndex += 1; + + if (stop) { + break; + } + } + if (onBuildComplete) { + const allNodesValid = buildResults.every((result) => result); + onBuildComplete(allNodesValid); + useFlowStore.getState().setIsBuilding(false); + } +} +async function buildVertex({ + flowId, + id, + input_value, + onBuildUpdate, + onBuildError, + verticesIds, + buildResults, + stopBuild, +}: { + flowId: string; + id: string; + input_value: string; + onBuildUpdate?: (data: any, status: BuildStatus) => void; + onBuildError?: (title, list, idList: VertexLayerElementType[]) => void; + verticesIds: string[]; + buildResults: boolean[]; + stopBuild: () => void; +}) { + try { + const buildRes = await postBuildVertex(flowId, id, input_value); + + const buildData: VertexBuildTypeAPI = buildRes.data; + if (onBuildUpdate) { + if (!buildData.valid) { + onBuildError!( + "Error Building Component", + [buildData.params], + verticesIds.map((id) => ({ id })) + ); + stopBuild(); + } + onBuildUpdate(buildData, BuildStatus.BUILT); + } + buildResults.push(buildData.valid); + } catch (error) { + onBuildError!( + "Error Building Component", + [(error as AxiosError).response?.data?.detail ?? "Unknown Error"], + verticesIds.map((id) => ({ id })) + ); + stopBuild(); + } +} diff --git a/src/frontend/src/utils/parameterUtils.ts b/src/frontend/src/utils/parameterUtils.ts new file mode 100644 index 000000000..5972c6599 --- /dev/null +++ b/src/frontend/src/utils/parameterUtils.ts @@ -0,0 +1,45 @@ +import { debounce } from "lodash"; +import { SAVE_DEBOUNCE_TIME } from "../constants/constants"; +import { postCustomComponentUpdate } from "../controllers/API"; +import { ResponseErrorTypeAPI } from "../types/api"; +import { NodeDataType } from "../types/flow"; + +export const handleUpdateValues = async (name: string, data: NodeDataType) => { + const code = data.node?.template["code"]?.value; + if (!code) { + console.error("Code not found in the template"); + return; + } + const template = data.node?.template; + if (!template) { + console.error("No template found in the node."); + return; + } + try { + let newTemplate = await postCustomComponentUpdate( + code, + template, + name, + data.node?.template[name]?.value + ) + .then((res) => { + console.log("res", res); + if (res.status === 200 && data.node?.template) { + return res.data.template; + } + }) + .catch((error) => { + throw error; + }); + return newTemplate; + } catch (error) { + console.error("Error occurred while updating the node:", error); + let errorType = error as ResponseErrorTypeAPI; + throw errorType; + } +}; + +export const debouncedHandleUpdateValues = debounce( + handleUpdateValues, + SAVE_DEBOUNCE_TIME +); diff --git a/src/frontend/src/utils/reactflowUtils.ts b/src/frontend/src/utils/reactflowUtils.ts index 7d0e2a3ac..4cd19bf7e 100644 --- a/src/frontend/src/utils/reactflowUtils.ts +++ b/src/frontend/src/utils/reactflowUtils.ts @@ -9,7 +9,10 @@ import { } from "reactflow"; import ShortUniqueId from "short-unique-id"; import { + INPUT_TYPES, LANGFLOW_SUPPORTED_TYPES, + OUTPUT_TYPES, + SUCCESS_BUILD, specialCharsRegex, } from "../constants/constants"; import { downloadFlowsFromDatabase } from "../controllers/API"; @@ -41,6 +44,10 @@ import { } from "./utils"; const uid = new ShortUniqueId({ length: 5 }); +export function checkChatInput(nodes: Node[]) { + return nodes.some((node) => node.data.type === "ChatInput"); +} + export function cleanEdges(nodes: Node[], edges: Edge[]) { let newEdges = cloneDeep(edges); edges.forEach((edge) => { @@ -197,23 +204,33 @@ export const processDataFromFlow = (flow: FlowType, refreshIds = true) => { return data; }; -export function updateIds(newFlow: ReactFlowJsonObject) { +export function updateIds( + { edges, nodes }: { edges: Edge[]; nodes: Node[] }, + selection?: { edges: Edge[]; nodes: Node[] } +) { let idsMap = {}; - - if (newFlow.nodes) - newFlow.nodes.forEach((node: NodeType) => { + const selectionIds = selection?.nodes.map((n) => n.id); + if (nodes) { + nodes.forEach((node: NodeType) => { // Generate a unique node ID - let newId = getNodeId( - node.data.node?.flow ? "GroupNode" : node.data.type - ); + let newId = getNodeId(node.data.type); + if (selection && !selectionIds?.includes(node.id)) { + newId = node.id; + } idsMap[node.id] = newId; node.id = newId; node.data.id = newId; // Add the new node to the list of nodes in state }); - - if (newFlow.edges) - newFlow.edges.forEach((edge: Edge) => { + selection?.nodes.forEach((sNode: NodeType) => { + let newId = idsMap[sNode.id]; + sNode.id = newId; + sNode.data.id = newId; + }); + } + const concatedEdges = [...edges, ...(selection?.edges ?? [])]; + if (concatedEdges) + concatedEdges.forEach((edge: Edge) => { edge.source = idsMap[edge.source]; edge.target = idsMap[edge.target]; const sourceHandleObject: sourceHandleType = scapeJSONParse( @@ -257,7 +274,7 @@ export function buildTweaks(flow: FlowType) { export function validateNode(node: NodeType, edges: Edge[]): Array { if (!node.data?.node?.template || !Object.keys(node.data.node.template)) { return [ - "We've noticed a potential issue with a node in the flow. Please review it and, if necessary, submit a bug report with your exported flow file. Thank you for your help!", + "We've noticed a potential issue with a Component in the flow. Please review it and, if necessary, submit a bug report with your exported flow file. Thank you for your help!", ]; } @@ -266,6 +283,8 @@ export function validateNode(node: NodeType, edges: Edge[]): Array { node: { template }, } = node.data; + const displayName = node.data.node.display_name; + return Object.keys(template).reduce((errors: Array, t) => { if ( template[t].required && @@ -281,7 +300,9 @@ export function validateNode(node: NodeType, edges: Edge[]): Array { node.id ) ) { - errors.push(`${type} is missing ${getFieldTitle(template, t)}.`); + errors.push( + `${displayName || type} is missing ${getFieldTitle(template, t)}.` + ); } else if ( template[t].type === "dict" && template[t].required && @@ -292,27 +313,40 @@ export function validateNode(node: NodeType, edges: Edge[]): Array { ) { if (hasDuplicateKeys(template[t].value)) errors.push( - `${type} (${getFieldTitle( + `${displayName || type} (${getFieldTitle( template, t )}) contains duplicate keys with the same values.` ); if (hasEmptyKey(template[t].value)) errors.push( - `${type} (${getFieldTitle(template, t)}) field must not be empty.` + `${displayName || type} (${getFieldTitle( + template, + t + )}) field must not be empty.` ); } return errors; }, [] as string[]); } -export function validateNodes(nodes: Node[], edges: Edge[]) { +export function validateNodes( + nodes: Node[], + edges: Edge[] +): // this returns an array of tuples with the node id and the errors +Array<{ id: string; errors: Array }> { if (nodes.length === 0) { return [ - "No nodes found in the flow. Please add at least one node to the flow.", + { + id: "", + errors: [ + "No nodes found in the flow. Please add at least one node to the flow.", + ], + }, ]; } - return nodes.flatMap((n: NodeType) => validateNode(n, edges)); + // validateNode(n, edges) returns an array of errors for the node + return nodes.map((n) => ({ id: n.id, errors: validateNode(n, edges) })); } export function updateEdges(edges: Edge[]) { @@ -321,11 +355,7 @@ export function updateEdges(edges: Edge[]) { const targetHandleObject: targetHandleType = scapeJSONParse( edge.targetHandle! ); - edge.className = - (targetHandleObject.type === "Text" - ? "stroke-gray-800 " - : "stroke-gray-900 ") + " stroke-connection"; - edge.animated = targetHandleObject.type === "Text"; + edge.className = "stroke-gray-900 stroke-connection"; }); } @@ -630,10 +660,13 @@ export function generateFlow( } export function reconnectEdges(groupNode: NodeType, excludedEdges: Edge[]) { - let newEdges = cloneDeep(excludedEdges); if (!groupNode.data.node!.flow) return []; + let newEdges = cloneDeep(excludedEdges); const { nodes, edges } = groupNode.data.node!.flow!.data!; const lastNode = findLastNode(groupNode.data.node!.flow!.data!); + newEdges = newEdges.filter( + (e) => !(nodes.some((n) => n.id === e.source) && e.source !== lastNode?.id) + ); newEdges.forEach((edge) => { if (lastNode && edge.source === lastNode.id) { edge.source = groupNode.id; @@ -718,6 +751,7 @@ export function validateSelection( if (selection.edges.length === 0) { selection.edges = edges; } + // get only edges that are connected to the nodes in the selection // first creates a set of all the nodes ids let nodesSet = new Set(selection.nodes.map((n) => n.id)); @@ -733,7 +767,17 @@ export function validateSelection( if (selection.nodes.length < 2) { errorsArray.push("Please select more than one node"); } - + if ( + selection.nodes.some( + (node) => + isInputNode(node.data as NodeDataType) || + isOutputNode(node.data as NodeDataType) + ) + ) { + errorsArray.push( + "Please select only nodes that are not input or output nodes" + ); + } //check if there are two or more nodes with free outputs if ( selection.nodes.filter( @@ -769,7 +813,7 @@ function updateGroupNodeTemplate(template: APITemplateType) { template[key].advanced = true; } //prevent code fields from showing on the group node - if (type === "code") { + if (type === "code" && key === "code") { template[key].show = false; } }); @@ -937,7 +981,7 @@ export function connectedInputNodesOnHandle( return connectedNodes; } -function updateProxyIdsOnTemplate( +export function updateProxyIdsOnTemplate( template: APITemplateType, idsMap: { [key: string]: string } ) { @@ -948,12 +992,16 @@ function updateProxyIdsOnTemplate( }); } -function updateEdgesIds(edges: Edge[], idsMap: { [key: string]: string }) { +export function updateEdgesIds( + edges: Edge[], + idsMap: { [key: string]: string } +) { edges.forEach((edge) => { let targetHandle: targetHandleType = edge.data.targetHandle; if (targetHandle.proxy && idsMap[targetHandle.proxy!.id]) { targetHandle.proxy!.id = idsMap[targetHandle.proxy!.id]; } + console.log("edge", edge); edge.data.targetHandle = targetHandle; edge.targetHandle = scapedJSONStringfy(targetHandle); }); @@ -1078,7 +1126,7 @@ export function getGroupStatus( flow: FlowType, ssData: { [key: string]: { valid: boolean; params: string } } ) { - let status = { valid: true, params: "Built sucessfully ✨" }; + let status = { valid: true, params: SUCCESS_BUILD }; const { nodes } = flow.data!; const ids = nodes.map((n: NodeType) => n.data.id); ids.forEach((id) => { @@ -1125,7 +1173,7 @@ export function downloadNode(NodeFLow: FlowType) { type: "application/json", }); element.href = URL.createObjectURL(file); - element.download = `${NodeFLow.name}.json`; + element.download = `${NodeFLow?.name ?? "node"}.json`; element.click(); } @@ -1147,6 +1195,20 @@ export function removeFileNameFromComponents(flow: FlowType) { }); } +export function removeGlobalVariableFromComponents(flow: FlowType) { + flow.data!.nodes.forEach((node: NodeType) => { + Object.keys(node.data.node!.template).forEach((field) => { + if (node.data?.node?.template[field]?.load_from_db) { + node.data.node!.template[field].value = ""; + node.data.node!.template[field].load_from_db = false; + } + }); + if (node.data.node?.flow) { + removeGlobalVariableFromComponents(node.data.node.flow); + } + }); +} + export function typesGenerator(data: APIObjectType) { return Object.keys(data) .reverse() @@ -1225,3 +1287,34 @@ export const createNewFlow = ( is_component: flow?.is_component ?? false, }; }; + +export function isInputNode(nodeData: NodeDataType): boolean { + return INPUT_TYPES.has(nodeData.type); +} + +export function isOutputNode(nodeData: NodeDataType): boolean { + return OUTPUT_TYPES.has(nodeData.type); +} + +export function isInputType(type: string): boolean { + return INPUT_TYPES.has(type); +} + +export function isOutputType(type: string): boolean { + return OUTPUT_TYPES.has(type); +} + +export function updateGroupRecursion(groupNode: NodeType, edges: Edge[]) { + if (groupNode.data.node?.flow) { + groupNode.data.node.flow.data!.nodes.forEach((node) => { + if (node.data.node?.flow) { + updateGroupRecursion(node, node.data.node.flow.data!.edges); + } + }); + let newFlow = groupNode.data.node!.flow; + const idsMap = updateIds(newFlow.data!); + updateProxyIdsOnTemplate(groupNode.data.node!.template, idsMap); + let flowEdges = edges; + updateEdgesIds(flowEdges, idsMap); + } +} diff --git a/src/frontend/src/utils/storeUtils.ts b/src/frontend/src/utils/storeUtils.ts index 8738bac75..e51cf8933 100644 --- a/src/frontend/src/utils/storeUtils.ts +++ b/src/frontend/src/utils/storeUtils.ts @@ -1,5 +1,7 @@ import { cloneDeep } from "lodash"; -import { FlowType } from "../types/flow"; +import { Node } from "reactflow"; +import { FlowType, NodeDataType } from "../types/flow"; +import { isInputNode, isOutputNode } from "./reactflowUtils"; export default function cloneFLowWithParent( flow: FlowType, @@ -21,3 +23,26 @@ export function getTagsIds( .map((tag) => tagListId.find((tagObj) => tagObj.name === tag))! .map((tag) => tag!.id); } + +export function getInputsAndOutputs(nodes: Node[]) { + let inputs: { type: string; id: string; displayName: string }[] = []; + let outputs: { type: string; id: string; displayName: string }[] = []; + nodes.forEach((node) => { + const nodeData: NodeDataType = node.data as NodeDataType; + if (isOutputNode(nodeData)) { + outputs.push({ + type: nodeData.type, + id: nodeData.id, + displayName: nodeData.node?.display_name ?? nodeData.id, + }); + } + if (isInputNode(nodeData)) { + inputs.push({ + type: nodeData.type, + id: nodeData.id, + displayName: nodeData.node?.display_name ?? nodeData.id, + }); + } + }); + return { inputs, outputs }; +} diff --git a/src/frontend/src/utils/styleUtils.ts b/src/frontend/src/utils/styleUtils.ts index b0432fe7e..a0c9c9756 100644 --- a/src/frontend/src/utils/styleUtils.ts +++ b/src/frontend/src/utils/styleUtils.ts @@ -1,25 +1,40 @@ import { + AlertCircle, + ArrowBigUp, + ArrowLeft, ArrowUpToLine, Bell, + Binary, BookMarked, BookmarkPlus, + Bot, Boxes, + Braces, + BrainCircuit, Check, CheckCircle2, ChevronDown, ChevronLeft, ChevronRight, + ChevronRightSquare, ChevronUp, ChevronsLeft, ChevronsRight, ChevronsUpDown, + ChevronsUpDownIcon, Circle, + CircleDot, Clipboard, + Code, Code2, Combine, + Command, Compass, Copy, Cpu, + Database, + Delete, + Dot, Download, DownloadCloud, Edit, @@ -28,18 +43,23 @@ import { Eye, EyeOff, File, + FileClock, FileDown, FileSearch, FileSearch2, + FileSliders, FileText, + FileType2, FileUp, - Fingerprint, + FlaskConical, FolderPlus, + FormInput, Forward, Gift, GitBranchPlus, GitFork, GithubIcon, + Globe, Group, Hammer, Heart, @@ -49,7 +69,6 @@ import { Key, Laptop2, Layers, - Lightbulb, Link, Loader2, Lock, @@ -59,34 +78,49 @@ import { Menu, MessageCircle, MessageSquare, + MessageSquareMore, MessagesSquare, Minimize2, Minus, MoonIcon, MoreHorizontal, Network, + Package2, Paperclip, Pencil, + PencilLine, + Pin, + Play, Plus, + PlusCircle, + PlusSquare, + PocketKnife, Redo, RefreshCcw, - Rocket, + Repeat, Save, SaveAll, Scissors, + ScreenShare, Search, Settings2, Share, Share2, Shield, + Sliders, + Snowflake, Sparkles, Square, + SquarePen, Store, SunIcon, TerminalIcon, TerminalSquare, + TextCursorInput, + TextSearch, ToyBrick, Trash2, + Type, Undo, Ungroup, Unplug, @@ -99,7 +133,6 @@ import { Variable, Wand2, Workflow, - Wrench, X, XCircle, Zap, @@ -108,7 +141,10 @@ import { FaApple, FaGithub } from "react-icons/fa"; import { AWSIcon } from "../icons/AWS"; import { AirbyteIcon } from "../icons/Airbyte"; import { AnthropicIcon } from "../icons/Anthropic"; +import { AstraDBIcon } from "../icons/AstraDB"; +import { AzureIcon } from "../icons/Azure"; import { BingIcon } from "../icons/Bing"; +import { BotMessageSquareIcon } from "../icons/BotMessageSquare"; import { ChromaIcon } from "../icons/ChromaIcon"; import { CohereIcon } from "../icons/Cohere"; import { ElasticsearchIcon } from "../icons/ElasticsearchStore"; @@ -116,6 +152,7 @@ import { EvernoteIcon } from "../icons/Evernote"; import { FBIcon } from "../icons/FacebookMessenger"; import { GitBookIcon } from "../icons/GitBook"; import { GoogleIcon } from "../icons/Google"; +import { GoogleGenerativeAIIcon } from "../icons/GoogleGenerativeAI"; import { GradientInfinity, GradientSave, @@ -127,9 +164,14 @@ import { MetaIcon } from "../icons/Meta"; import { MidjourneyIcon } from "../icons/Midjorney"; import { MongoDBIcon } from "../icons/MongoDB"; import { NotionIcon } from "../icons/Notion"; +import { OllamaIcon } from "../icons/Ollama"; import { OpenAiIcon } from "../icons/OpenAi"; import { PineconeIcon } from "../icons/Pinecone"; +import { PostgresIcon } from "../icons/Postgres"; +import { PythonIcon } from "../icons/Python"; import { QDrantIcon } from "../icons/QDrant"; +import { QianFanChatIcon } from "../icons/QianFanChat"; +import { RedisIcon } from "../icons/Redis"; import { SearxIcon } from "../icons/Searx"; import { ShareIcon } from "../icons/Share"; import { Share2Icon } from "../icons/Share2"; @@ -178,8 +220,12 @@ export const gradients = [ ]; export const nodeColors: { [char: string]: string } = { + inputs: "#10B981", + outputs: "#AA2411", + data: "#4367BF", prompts: "#4367BF", - llms: "#6344BE", + models: "#6344BE", + model_specs: "#6344BE", chains: "#FE7500", Document: "#7AAE42", list: "#9AAE42", @@ -193,20 +239,30 @@ export const nodeColors: { [char: string]: string } = { embeddings: "#42BAA7", documentloaders: "#7AAE42", vectorstores: "#AA8742", + vectorsearch: "#AA8742", textsplitters: "#B47CB5", toolkits: "#DB2C2C", wrappers: "#E6277A", - utilities: "#31A3CC", + helpers: "#31A3CC", + experimental: "#E6277A", + langchain_utilities: "#31A3CC", output_parsers: "#E6A627", - str: "#049524", + str: "#31a3cc", + Text: "#31a3cc", retrievers: "#e6b25a", unknown: "#9CA3AF", custom_components: "#ab11ab", + Records: "#31a3cc", + Record: "#31a3cc", }; export const nodeNames: { [char: string]: string } = { + inputs: "Inputs", + outputs: "Outputs", + data: "Data", prompts: "Prompts", - llms: "LLMs", + models: "Models", + model_specs: "Model Specs", chains: "Chains", agents: "Agents", tools: "Tools", @@ -217,27 +273,58 @@ export const nodeNames: { [char: string]: string } = { embeddings: "Embeddings", documentloaders: "Loaders", vectorstores: "Vector Stores", + vectorsearch: "Vector Search", toolkits: "Toolkits", wrappers: "Wrappers", textsplitters: "Text Splitters", retrievers: "Retrievers", - utilities: "Utilities", + helpers: "Helpers", + experimental: "Experimental", + langchain_utilities: "Utilities", output_parsers: "Output Parsers", custom_components: "Custom", unknown: "Other", }; export const nodeIconsLucide: iconsType = { + X: X, + Notify: Bell, + ListFlows: Group, + ClearMessageHistory: FileClock, + Python: PythonIcon, + ChatOutput: BotMessageSquareIcon, + ChatInput: MessagesSquare, + inputs: Download, + outputs: Upload, + data: Database, + AzureChatOpenAi: AzureIcon, + Ollama: OllamaIcon, + ChatOllama: OllamaIcon, + AzureOpenAiEmbeddings: AzureIcon, + Azure: AzureIcon, + OllamaEmbeddings: OllamaIcon, + ChatOllamaModel: OllamaIcon, + Faiss: MetaIcon, + FaissSearch: MetaIcon, + AzureOpenAiModel: AzureIcon, + Redis: RedisIcon, + RedisSearch: RedisIcon, + PostgresChatMessageHistory: PostgresIcon, + BaiduQianfan: QianFanChatIcon, + Play, Vectara: VectaraIcon, ArrowUpToLine: ArrowUpToLine, Chroma: ChromaIcon, AirbyteJSONLoader: AirbyteIcon, AmazonBedrockEmbeddings: AWSIcon, + Amazon: AWSIcon, Anthropic: AnthropicIcon, ChatAnthropic: AnthropicIcon, + AstraDB: AstraDBIcon, BingSearchAPIWrapper: BingIcon, BingSearchRun: BingIcon, Cohere: CohereIcon, + ChevronsUpDownIcon, CohereEmbeddings: CohereIcon, EverNoteLoader: EvernoteIcon, FacebookChatLoader: FBIcon, @@ -245,13 +332,18 @@ export const nodeIconsLucide: iconsType = { GoogleSearchAPIWrapper: GoogleIcon, GoogleSearchResults: GoogleIcon, GoogleSearchRun: GoogleIcon, + Google: GoogleIcon, + GoogleGenerativeAI: GoogleGenerativeAIIcon, HNLoader: HackerNewsIcon, HuggingFaceHub: HuggingFaceIcon, + HuggingFace: HuggingFaceIcon, HuggingFaceEmbeddings: HuggingFaceIcon, IFixitLoader: IFixIcon, Meta: MetaIcon, Midjorney: MidjourneyIcon, MongoDBAtlasVectorSearch: MongoDBIcon, + MongoDB: MongoDBIcon, + MongoDBChatMessageHistory: MongoDBIcon, NotionDirectoryLoader: NotionIcon, ChatOpenAI: OpenAiIcon, AzureChatOpenAI: OpenAiIcon, @@ -264,30 +356,35 @@ export const nodeIconsLucide: iconsType = { Searx: SearxIcon, SlackDirectoryLoader: SvgSlackIcon, SupabaseVectorStore: SupabaseIcon, + Supabase: SupabaseIcon, VertexAI: VertexAIIcon, ChatVertexAI: VertexAIIcon, VertexAIEmbeddings: VertexAIIcon, Share3: ShareIcon, Share4: Share2Icon, - agents: Rocket, + agents: Bot, Workflow, User, WikipediaAPIWrapper: SvgWikipedia, chains: Link, memories: Cpu, - llms: Lightbulb, + models: BrainCircuit, + model_specs: FileSliders, prompts: TerminalSquare, - tools: Wrench, + tools: Hammer, advanced: Laptop2, chat: MessageCircle, - embeddings: Fingerprint, + embeddings: Binary, saved_components: GradientSave, documentloaders: Paperclip, vectorstores: Layers, - toolkits: Hammer, + vectorsearch: TextSearch, + toolkits: Package2, textsplitters: Scissors, wrappers: Gift, - utilities: Wand2, + helpers: Wand2, + experimental: FlaskConical, + langchain_utilities: PocketKnife, WolframAlphaAPIWrapper: SvgWolfram, output_parsers: Compass, retrievers: FileSearch, @@ -300,10 +397,10 @@ export const nodeIconsLucide: iconsType = { Trash2, Boxes, Network, - X, XCircle, Info, CheckCircle2, + SquarePen, Zap, MessagesSquare, ExternalLink, @@ -316,17 +413,24 @@ export const nodeIconsLucide: iconsType = { Bell, ChevronLeft, ChevronDown, + ArrowLeft, Shield, Plus, Redo, Settings2, + FileType2, Undo, FileSearch2, ChevronRight, Circle, + CircleDot, Clipboard, + PlusCircle, + PlusSquare, Code2, + Globe, Variable, + Snowflake, Store, Download, Eraser, @@ -363,13 +467,17 @@ export const nodeIconsLucide: iconsType = { Group, LogIn, ChevronUp, + PencilLine, Ungroup, BookMarked, Minus, Square, Minimize2, Maximize2, + FormInput, + ChevronRightSquare, SaveAll, + MessageSquareMore, Forward, Share2, Share, @@ -377,9 +485,25 @@ export const nodeIconsLucide: iconsType = { Loader2, BookmarkPlus, Heart, + Pin, Link, ToyBrick, RefreshCcw, Combine, TerminalIcon, + TerminalSquare, + TextCursorInput, + Repeat, + Sliders, + ScreenShare, + Code, + Type, + Braces, + FlaskConical, + AlertCircle, + Bot, + Delete, + Command, + ArrowBigUp, + Dot, }; diff --git a/src/frontend/src/utils/utils.ts b/src/frontend/src/utils/utils.ts index f1e534c1a..91d7a72f9 100644 --- a/src/frontend/src/utils/utils.ts +++ b/src/frontend/src/utils/utils.ts @@ -1,5 +1,6 @@ import clsx, { ClassValue } from "clsx"; import { twMerge } from "tailwind-merge"; +import { priorityFields } from "../constants/constants"; import { ADJECTIVES, DESCRIPTIONS, NOUNS } from "../flow_constants"; import { APIDataType, @@ -13,7 +14,7 @@ import { tweakType, } from "../types/components"; import { FlowType, NodeType } from "../types/flow"; -import { FlowState, FlowsState } from "../types/tabs"; +import { FlowState } from "../types/tabs"; import { buildTweaks } from "./reactflowUtils"; export function classNames(...classes: Array): string { @@ -58,11 +59,15 @@ export function normalCaseToSnakeCase(str: string): string { .join("_"); } -export function toTitleCase(str: string | undefined): string { +export function toTitleCase( + str: string | undefined, + isNodeField?: boolean +): string { if (!str) return ""; let result = str .split("_") .map((word, index) => { + if (isNodeField) return word; if (index === 0) { return checkUpperWords( word[0].toUpperCase() + word.slice(1).toLowerCase() @@ -75,6 +80,7 @@ export function toTitleCase(str: string | undefined): string { return result .split("-") .map((word, index) => { + if (isNodeField) return word; if (index === 0) { return checkUpperWords( word[0].toUpperCase() + word.slice(1).toLowerCase() @@ -119,15 +125,7 @@ export function groupByFamily( display_name?: string; }> = []; let checkedNodes = new Map(); - const excludeTypes = new Set([ - "str", - "bool", - "float", - "code", - "prompt", - "file", - "int", - ]); + const excludeTypes = new Set(["bool", "float", "code", "file", "int"]); const checkBaseClass = (template: TemplateVariableType) => { return ( @@ -146,7 +144,7 @@ export function groupByFamily( // se existir o flow for (const node of flow) { // para cada node do flow - if (node!.data!.node!.flow) break; // não faz nada se o node for um group + if (node!.data!.node!.flow || !node!.data!.node!.template) break; // não faz nada se o node for um group const nodeData = node.data; const foundNode = checkedNodes.get(nodeData.type); // verifica se o tipo do node já foi checado @@ -220,12 +218,8 @@ export function groupByFamily( })); } -export function buildInputs(flowState?: FlowState): string { - return flowState && - flowState.input_keys && - Object.keys(flowState.input_keys!).length > 0 - ? JSON.stringify(flowState.input_keys) - : '{"input": "message"}'; +export function buildInputs(): string { + return '{"input_value": "message"}'; } export function getRandomElement(array: T[]): T { @@ -295,11 +289,10 @@ export function buildTweakObject(tweak: tweakType) { /** * Function to get Chat Input Field - * @param {FlowType} flow - The current flow. * @param {FlowsState} tabsState - The current tabs state. * @returns {string} - The chat input field */ -export function getChatInputField(flow: FlowType, flowState?: FlowState) { +export function getChatInputField(flowState?: FlowState) { let chat_input_field = "text"; if (flowState && flowState.input_keys) { @@ -311,13 +304,14 @@ export function getChatInputField(flow: FlowType, flowState?: FlowState) { /** * Function to get the python code for the API * @param {string} flowId - The id of the flow + * @param {boolean} isAuth - If the API is authenticated + * @param {any[]} tweak - The tweaks * @returns {string} - The python code */ export function getPythonApiCode( flow: FlowType, isAuth: boolean, - tweak?: any[], - flowState?: FlowState + tweak?: any[] ): string { const flowId = flow.id; @@ -326,13 +320,10 @@ export function getPythonApiCode( // node.data.id // } const tweaks = buildTweaks(flow); - const inputs = buildInputs(flowState); return `import requests from typing import Optional -BASE_API_URL = "${window.location.protocol}//${ - window.location.host - }/api/v1/process" +BASE_API_URL = "${window.location.protocol}//${window.location.host}/api/v1/run" FLOW_ID = "${flowId}" # You can tweak the flow by adding a tweaks dictionary # e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}} @@ -342,9 +333,12 @@ TWEAKS = ${ : JSON.stringify(tweaks, null, 2) } -def run_flow(inputs: dict, flow_id: str, tweaks: Optional[dict] = None${ - !isAuth ? `, api_key: Optional[str] = None` : "" - }) -> dict: +def run_flow(message: str, + flow_id: str, + output_type: str = "chat", + input_type: str = "chat", + tweaks: Optional[dict] = None, + api_key: Optional[str] = None) -> dict: """ Run a flow with a given message and optional tweaks. @@ -355,7 +349,11 @@ def run_flow(inputs: dict, flow_id: str, tweaks: Optional[dict] = None${ """ api_url = f"{BASE_API_URL}/{flow_id}" - payload = {"inputs": inputs} + payload = { + "input_value": message, + "output_type": output_type, + "input_type": input_type, + } headers = None if tweaks: payload["tweaks"] = tweaks @@ -365,9 +363,9 @@ def run_flow(inputs: dict, flow_id: str, tweaks: Optional[dict] = None${ return response.json() # Setup any tweaks you want to apply to the flow -inputs = ${inputs} +message = "message" ${!isAuth ? `api_key = ""` : ""} -print(run_flow(inputs, flow_id=FLOW_ID, tweaks=TWEAKS${ +print(run_flow(message=message, flow_id=FLOW_ID, tweaks=TWEAKS${ !isAuth ? `, api_key=api_key` : "" }))`; } @@ -375,55 +373,72 @@ print(run_flow(inputs, flow_id=FLOW_ID, tweaks=TWEAKS${ /** * Function to get the curl code for the API * @param {string} flowId - The id of the flow + * @param {boolean} isAuth - If the API is authenticated * @returns {string} - The curl code */ export function getCurlCode( flow: FlowType, isAuth: boolean, - tweak?: any[], - flowState?: FlowState + tweak?: any[] ): string { const flowId = flow.id; const tweaks = buildTweaks(flow); - const inputs = buildInputs(flowState); return `curl -X POST \\ ${window.location.protocol}//${ window.location.host - }/api/v1/process/${flowId} \\ + }/api/v1/run/${flowId}?stream=false \\ -H 'Content-Type: application/json'\\${ !isAuth ? `\n -H 'x-api-key: '\\` : "" } - -d '{"inputs": ${inputs}, "tweaks": ${ + -d '{"input_value": "message", + "output_type": "chat", + "input_type": "chat", + "tweaks": ${ tweak && tweak.length > 0 ? buildTweakObject(tweak) : JSON.stringify(tweaks, null, 2) - }}'`; + }}' + `; +} + +export function getOutputIds(flow) { + const nodes = flow.data!.nodes; + + const arrayOfOutputs = nodes.reduce((acc: string[], node) => { + if (node.data.type.toLowerCase().includes("output")) { + acc.push(node.id); + } + return acc; + }, []); + + const arrayOfOutputsJoin = arrayOfOutputs + .map((output) => `"${output}"`) + .join(", "); + + return arrayOfOutputsJoin; } /** * Function to get the python code for the API * @param {string} flow - The current flow + * @param {any[]} tweak - The tweaks * @returns {string} - The python code */ -export function getPythonCode( - flow: FlowType, - tweak?: any[], - flowState?: FlowState -): string { +export function getPythonCode(flow: FlowType, tweak?: any[]): string { const flowName = flow.name; const tweaks = buildTweaks(flow); - const inputs = buildInputs(flowState); - return `from langflow import load_flow_from_json + + return `from langflow.load import run_flow_from_json TWEAKS = ${ tweak && tweak.length > 0 ? buildTweakObject(tweak) : JSON.stringify(tweaks, null, 2) } -flow = load_flow_from_json("${flowName}.json", tweaks=TWEAKS) -# Now you can use it like any chain -inputs = ${inputs} -flow(inputs)`; + +result = run_flow_from_json(flow="${flowName}.json", + input_value="message", + tweaks=TWEAKS)`; } /** @@ -438,24 +453,15 @@ export function getWidgetCode( ): string { const flowId = flow.id; const flowName = flow.name; - const inputs = buildInputs(flowState); - let chat_input_field = getChatInputField(flow, flowState); + const inputs = buildInputs(); + let chat_input_field = getChatInputField(flowState); - return ` + return ` - Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Message", + "advanced": false, + "input_types": ["Text"], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "return_record": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_record", + "display_name": "Return Record", + "advanced": false, + "dynamic": false, + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "load_from_db": false, + "title_case": false + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "User", + "fileTypes": [], + "file_path": "", + "password": false, + "options": ["Machine", "User"], + "name": "sender", + "display_name": "Sender Type", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "User", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": true, + "dynamic": false, + "info": "If provided, the message will be stored in the memory.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" + }, + "description": "Used to get user input from the chat.", + "icon": "ChatInput", + "base_classes": ["Text", "object", "str", "Record"], + "display_name": "Chat Input", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "input_value": null, + "session_id": null, + "return_record": null + }, + "output_types": ["Text", "Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "ChatInput-7hPDT" + }, + "selected": false, + "width": 384, + "height": 569, + "positionAbsolute": { + "x": 687, + "y": 41.9375 + }, + "dragging": false + }, + { + "id": "ChatOutput-4sm9Z", + "type": "genericNode", + "position": { + "x": 4.939451516507972, + "y": 103.3695231904992 + }, + "data": { + "type": "ChatOutput", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.field_typing import Text\nfrom langflow.schema import Record\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Used to send a message to the chat.\"\n icon = \"ChatOutput\"\n\n def build(\n self,\n sender: Optional[str] = \"Machine\",\n sender_name: Optional[str] = \"AI\",\n input_value: Optional[str] = None,\n session_id: Optional[str] = None,\n return_record: Optional[bool] = False,\n ) -> Union[Text, Record]:\n return super().build(\n sender=sender,\n sender_name=sender_name,\n input_value=input_value,\n session_id=session_id,\n return_record=return_record,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "load_from_db": false, + "title_case": false + }, + "input_value": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "input_value", + "display_name": "Message", + "advanced": false, + "input_types": ["Text"], + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false + }, + "return_record": { + "type": "bool", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_record", + "display_name": "Return Record", + "advanced": false, + "dynamic": false, + "info": "Return the message as a record containing the sender, sender_name, and session_id.", + "load_from_db": false, + "title_case": false + }, + "sender": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "Machine", + "fileTypes": [], + "file_path": "", + "password": false, + "options": ["Machine", "User"], + "name": "sender", + "display_name": "Sender Type", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "sender_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "AI", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "sender_name", + "display_name": "Sender Name", + "advanced": false, + "dynamic": false, + "info": "", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "session_id": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "session_id", + "display_name": "Session ID", + "advanced": true, + "dynamic": false, + "info": "If provided, the message will be stored in the memory.", + "load_from_db": false, + "title_case": false, + "input_types": ["Text"] + }, + "_type": "CustomComponent" + }, + "description": "Used to send a message to the chat.", + "icon": "ChatOutput", + "base_classes": ["Text", "object", "str", "Record"], + "display_name": "Chat Output", + "documentation": "", + "custom_fields": { + "sender": null, + "sender_name": null, + "input_value": null, + "session_id": null, + "return_record": null + }, + "output_types": ["Text", "Record"], + "field_formatters": {}, + "frozen": false, + "field_order": [], + "beta": false + }, + "id": "ChatOutput-4sm9Z" + }, + "selected": true, + "width": 384, + "height": 569, + "positionAbsolute": { + "x": 4.939451516507972, + "y": 103.3695231904992 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "ChatOutput-4sm9Z", + "sourceHandle": "{œbaseClassesœ:[œTextœ,œobjectœ,œstrœ,œRecordœ],œdataTypeœ:œChatOutputœ,œidœ:œChatOutput-4sm9Zœ}", + "target": "ChatInput-7hPDT", + "targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatInput-7hPDTœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}", + "data": { + "targetHandle": { + "fieldName": "input_value", + "id": "ChatInput-7hPDT", + "inputTypes": ["Text"], + "type": "str" + }, + "sourceHandle": { + "baseClasses": ["Text", "object", "str", "Record"], + "dataType": "ChatOutput", + "id": "ChatOutput-4sm9Z" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 stroke-connection", + "id": "reactflow__edge-ChatOutput-4sm9Z{œbaseClassesœ:[œTextœ,œobjectœ,œstrœ,œRecordœ],œdataTypeœ:œChatOutputœ,œidœ:œChatOutput-4sm9Zœ}-ChatInput-7hPDT{œfieldNameœ:œinput_valueœ,œidœ:œChatInput-7hPDTœ,œinputTypesœ:[œTextœ],œtypeœ:œstrœ}" + } + ], + "viewport": { + "x": 316.78239920440234, + "y": 182.84104875768622, + "zoom": 0.6070974421975234 + } + }, + "description": "flow to test ChatInput and output", + "name": "ChatITest", + "last_tested_version": "1.0.0a0", + "is_component": false +} diff --git a/src/frontend/tests/onlyFront/assets/collection.json b/src/frontend/tests/end-to-end/assets/collection.json similarity index 88% rename from src/frontend/tests/onlyFront/assets/collection.json rename to src/frontend/tests/end-to-end/assets/collection.json index 3872089d0..269afc8c9 100644 --- a/src/frontend/tests/onlyFront/assets/collection.json +++ b/src/frontend/tests/end-to-end/assets/collection.json @@ -10,7 +10,10 @@ "height": 631, "id": "ChatOpenAI-Hz56M", "type": "genericNode", - "position": { "x": 543.1816229116944, "y": 942.891611351432 }, + "position": { + "x": 543.1816229116944, + "y": 942.891611351432 + }, "data": { "type": "ChatOpenAI", "node": { @@ -238,7 +241,10 @@ "height": 387, "id": "AgentInitializer-QiQ4x", "type": "genericNode", - "position": { "x": 1036.6064439140812, "y": 645.1919693466587 }, + "position": { + "x": 1036.6064439140812, + "y": 645.1919693466587 + }, "data": { "type": "AgentInitializer", "node": { @@ -316,7 +322,10 @@ "height": 437, "id": "PythonFunctionTool-kX99N", "type": "genericNode", - "position": { "x": 553.050119331742, "y": 412.9533535948685 }, + "position": { + "x": 553.050119331742, + "y": 412.9533535948685 + }, "data": { "type": "PythonFunctionTool", "node": { @@ -380,7 +389,9 @@ "sourceHandle": "{œbaseClassesœ:[œSerializableœ,œBaseChatModelœ,œChatOpenAIœ,œBaseLanguageModelœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-Hz56Mœ}", "target": "AgentInitializer-QiQ4x", "targetHandle": "{œfieldNameœ:œllmœ,œidœ:œAgentInitializer-QiQ4xœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}", - "style": { "stroke": "#555" }, + "style": { + "stroke": "#555" + }, "className": "stroke-gray-900 stroke-connection", "animated": false, "id": "reactflow__edge-ChatOpenAI-Hz56M{œbaseClassesœ:[œSerializableœ,œBaseChatModelœ,œChatOpenAIœ,œBaseLanguageModelœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-Hz56Mœ}-AgentInitializer-QiQ4x{œfieldNameœ:œllmœ,œidœ:œAgentInitializer-QiQ4xœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}", @@ -409,7 +420,9 @@ "sourceHandle": "{œbaseClassesœ:[œToolœ],œdataTypeœ:œPythonFunctionToolœ,œidœ:œPythonFunctionTool-kX99Nœ}", "target": "AgentInitializer-QiQ4x", "targetHandle": "{œfieldNameœ:œtoolsœ,œidœ:œAgentInitializer-QiQ4xœ,œinputTypesœ:null,œtypeœ:œToolœ}", - "style": { "stroke": "#555" }, + "style": { + "stroke": "#555" + }, "className": "stroke-gray-900 stroke-connection", "animated": false, "id": "reactflow__edge-PythonFunctionTool-kX99N{œbaseClassesœ:[œToolœ],œdataTypeœ:œPythonFunctionToolœ,œidœ:œPythonFunctionTool-kX99Nœ}-AgentInitializer-QiQ4x{œfieldNameœ:œtoolsœ,œidœ:œAgentInitializer-QiQ4xœ,œinputTypesœ:null,œtypeœ:œToolœ}", @@ -446,7 +459,10 @@ { "id": "CustomComponent-w4WCp", "type": "genericNode", - "position": { "x": 536, "y": 337 }, + "position": { + "x": 536, + "y": 337 + }, "data": { "type": "CustomComponent", "node": { @@ -456,7 +472,7 @@ "placeholder": "", "show": true, "multiline": true, - "value": "from langflow import CustomComponent\n\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\nfrom langchain.schema import Document\n\nimport requests\n\nclass YourComponent(CustomComponent):\n display_name: str = \"Custom Component\"\n description: str = \"Create any custom component you want!\"\n\n def build_config(self):\n return { \"url\": { \"multiline\": True, \"required\": True } }\n\n def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:\n response = requests.get(url)\n chain = LLMChain(llm=llm, prompt=prompt)\n result = chain.run(response.text[:300])\n return Document(page_content=str(result))\n", + "value": "from langflow.custom import CustomComponent\n\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\nfrom langchain.schema import Document\n\nimport requests\n\nclass YourComponent(CustomComponent):\n display_name: str = \"Custom Component\"\n description: str = \"Create any custom component you want!\"\n\n def build_config(self):\n return { \"url\": { \"multiline\": True, \"required\": True } }\n\n def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:\n response = requests.get(url)\n chain = LLMChain(llm=llm, prompt=prompt)\n result = chain.run(response.text[:300])\n return Document(page_content=str(result))\n", "password": false, "name": "code", "advanced": false, @@ -478,11 +494,18 @@ }, "id": "CustomComponent-w4WCp" }, - "positionAbsolute": { "x": 536, "y": 337 } + "positionAbsolute": { + "x": 536, + "y": 337 + } } ], "edges": [], - "viewport": { "x": 0, "y": 0, "zoom": 1 } + "viewport": { + "x": 0, + "y": 0, + "zoom": 1 + } }, "id": "e43cf2c8-cd64-4936-8170-b207a8b109c4", "user_id": "6f6ebc2c-9e7a-4c35-84fc-51760db10d9b" @@ -497,7 +520,10 @@ "height": 631, "id": "ChatOpenAI-7GFF0", "type": "genericNode", - "position": { "x": 543.1816229116944, "y": 942.891611351432 }, + "position": { + "x": 543.1816229116944, + "y": 942.891611351432 + }, "data": { "type": "ChatOpenAI", "node": { @@ -725,7 +751,10 @@ "height": 387, "id": "AgentInitializer-YJgqs", "type": "genericNode", - "position": { "x": 1036.6064439140812, "y": 645.1919693466587 }, + "position": { + "x": 1036.6064439140812, + "y": 645.1919693466587 + }, "data": { "type": "AgentInitializer", "node": { @@ -803,7 +832,10 @@ "height": 437, "id": "PythonFunctionTool-gqQDg", "type": "genericNode", - "position": { "x": 553.050119331742, "y": 412.9533535948685 }, + "position": { + "x": 553.050119331742, + "y": 412.9533535948685 + }, "data": { "type": "PythonFunctionTool", "node": { @@ -867,7 +899,9 @@ "sourceHandle": "{œbaseClassesœ:[œSerializableœ,œBaseChatModelœ,œChatOpenAIœ,œBaseLanguageModelœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-7GFF0œ}", "target": "AgentInitializer-YJgqs", "targetHandle": "{œfieldNameœ:œllmœ,œidœ:œAgentInitializer-YJgqsœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}", - "style": { "stroke": "#555" }, + "style": { + "stroke": "#555" + }, "className": "stroke-gray-900 stroke-connection", "animated": false, "id": "reactflow__edge-ChatOpenAI-7GFF0{œbaseClassesœ:[œSerializableœ,œBaseChatModelœ,œChatOpenAIœ,œBaseLanguageModelœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-7GFF0œ}-AgentInitializer-YJgqs{œfieldNameœ:œllmœ,œidœ:œAgentInitializer-YJgqsœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}", @@ -896,7 +930,9 @@ "sourceHandle": "{œbaseClassesœ:[œToolœ],œdataTypeœ:œPythonFunctionToolœ,œidœ:œPythonFunctionTool-gqQDgœ}", "target": "AgentInitializer-YJgqs", "targetHandle": "{œfieldNameœ:œtoolsœ,œidœ:œAgentInitializer-YJgqsœ,œinputTypesœ:null,œtypeœ:œToolœ}", - "style": { "stroke": "#555" }, + "style": { + "stroke": "#555" + }, "className": "stroke-gray-900 stroke-connection", "animated": false, "id": "reactflow__edge-PythonFunctionTool-gqQDg{œbaseClassesœ:[œToolœ],œdataTypeœ:œPythonFunctionToolœ,œidœ:œPythonFunctionTool-gqQDgœ}-AgentInitializer-YJgqs{œfieldNameœ:œtoolsœ,œidœ:œAgentInitializer-YJgqsœ,œinputTypesœ:null,œtypeœ:œToolœ}", @@ -933,7 +969,10 @@ { "id": "CustomComponent-iF9Zr", "type": "genericNode", - "position": { "x": 536, "y": 337 }, + "position": { + "x": 536, + "y": 337 + }, "data": { "type": "CustomComponent", "node": { @@ -943,7 +982,7 @@ "placeholder": "", "show": true, "multiline": true, - "value": "from langflow import CustomComponent\n\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\nfrom langchain.schema import Document\n\nimport requests\n\nclass YourComponent(CustomComponent):\n display_name: str = \"Custom Component\"\n description: str = \"Create any custom component you want!\"\n\n def build_config(self):\n return { \"url\": { \"multiline\": True, \"required\": True } }\n\n def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:\n response = requests.get(url)\n chain = LLMChain(llm=llm, prompt=prompt)\n result = chain.run(response.text[:300])\n return Document(page_content=str(result))\n", + "value": "from langflow.custom import CustomComponent\n\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\nfrom langchain.schema import Document\n\nimport requests\n\nclass YourComponent(CustomComponent):\n display_name: str = \"Custom Component\"\n description: str = \"Create any custom component you want!\"\n\n def build_config(self):\n return { \"url\": { \"multiline\": True, \"required\": True } }\n\n def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:\n response = requests.get(url)\n chain = LLMChain(llm=llm, prompt=prompt)\n result = chain.run(response.text[:300])\n return Document(page_content=str(result))\n", "password": false, "name": "code", "advanced": false, @@ -965,11 +1004,18 @@ }, "id": "CustomComponent-iF9Zr" }, - "positionAbsolute": { "x": 536, "y": 337 } + "positionAbsolute": { + "x": 536, + "y": 337 + } } ], "edges": [], - "viewport": { "x": 0, "y": 0, "zoom": 1 } + "viewport": { + "x": 0, + "y": 0, + "zoom": 1 + } }, "id": "88a0e6a1-8144-4bff-b28c-d312ae5c8a10", "user_id": "6f6ebc2c-9e7a-4c35-84fc-51760db10d9b" @@ -984,7 +1030,10 @@ "height": 631, "id": "ChatOpenAI-cljAK", "type": "genericNode", - "position": { "x": 543.1816229116944, "y": 942.891611351432 }, + "position": { + "x": 543.1816229116944, + "y": 942.891611351432 + }, "data": { "type": "ChatOpenAI", "node": { @@ -1212,7 +1261,10 @@ "height": 387, "id": "AgentInitializer-grV0u", "type": "genericNode", - "position": { "x": 1036.6064439140812, "y": 645.1919693466587 }, + "position": { + "x": 1036.6064439140812, + "y": 645.1919693466587 + }, "data": { "type": "AgentInitializer", "node": { @@ -1290,7 +1342,10 @@ "height": 437, "id": "PythonFunctionTool-SctM2", "type": "genericNode", - "position": { "x": 553.050119331742, "y": 412.9533535948685 }, + "position": { + "x": 553.050119331742, + "y": 412.9533535948685 + }, "data": { "type": "PythonFunctionTool", "node": { @@ -1354,7 +1409,9 @@ "sourceHandle": "{œbaseClassesœ:[œSerializableœ,œBaseChatModelœ,œChatOpenAIœ,œBaseLanguageModelœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-cljAKœ}", "target": "AgentInitializer-grV0u", "targetHandle": "{œfieldNameœ:œllmœ,œidœ:œAgentInitializer-grV0uœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}", - "style": { "stroke": "#555" }, + "style": { + "stroke": "#555" + }, "className": "stroke-gray-900 stroke-connection", "animated": false, "id": "reactflow__edge-ChatOpenAI-cljAK{œbaseClassesœ:[œSerializableœ,œBaseChatModelœ,œChatOpenAIœ,œBaseLanguageModelœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-cljAKœ}-AgentInitializer-grV0u{œfieldNameœ:œllmœ,œidœ:œAgentInitializer-grV0uœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}", @@ -1383,7 +1440,9 @@ "sourceHandle": "{œbaseClassesœ:[œToolœ],œdataTypeœ:œPythonFunctionToolœ,œidœ:œPythonFunctionTool-SctM2œ}", "target": "AgentInitializer-grV0u", "targetHandle": "{œfieldNameœ:œtoolsœ,œidœ:œAgentInitializer-grV0uœ,œinputTypesœ:null,œtypeœ:œToolœ}", - "style": { "stroke": "#555" }, + "style": { + "stroke": "#555" + }, "className": "stroke-gray-900 stroke-connection", "animated": false, "id": "reactflow__edge-PythonFunctionTool-SctM2{œbaseClassesœ:[œToolœ],œdataTypeœ:œPythonFunctionToolœ,œidœ:œPythonFunctionTool-SctM2œ}-AgentInitializer-grV0u{œfieldNameœ:œtoolsœ,œidœ:œAgentInitializer-grV0uœ,œinputTypesœ:null,œtypeœ:œToolœ}", @@ -1420,7 +1479,10 @@ { "id": "CustomComponent-z5kAP", "type": "genericNode", - "position": { "x": 536, "y": 337 }, + "position": { + "x": 536, + "y": 337 + }, "data": { "type": "CustomComponent", "node": { @@ -1430,7 +1492,7 @@ "placeholder": "", "show": true, "multiline": true, - "value": "from langflow import CustomComponent\n\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\nfrom langchain.schema import Document\n\nimport requests\n\nclass YourComponent(CustomComponent):\n display_name: str = \"Custom Component\"\n description: str = \"Create any custom component you want!\"\n\n def build_config(self):\n return { \"url\": { \"multiline\": True, \"required\": True } }\n\n def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:\n response = requests.get(url)\n chain = LLMChain(llm=llm, prompt=prompt)\n result = chain.run(response.text[:300])\n return Document(page_content=str(result))\n", + "value": "from langflow.custom import CustomComponent\n\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\nfrom langchain.schema import Document\n\nimport requests\n\nclass YourComponent(CustomComponent):\n display_name: str = \"Custom Component\"\n description: str = \"Create any custom component you want!\"\n\n def build_config(self):\n return { \"url\": { \"multiline\": True, \"required\": True } }\n\n def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:\n response = requests.get(url)\n chain = LLMChain(llm=llm, prompt=prompt)\n result = chain.run(response.text[:300])\n return Document(page_content=str(result))\n", "password": false, "name": "code", "advanced": false, @@ -1452,11 +1514,18 @@ }, "id": "CustomComponent-z5kAP" }, - "positionAbsolute": { "x": 536, "y": 337 } + "positionAbsolute": { + "x": 536, + "y": 337 + } } ], "edges": [], - "viewport": { "x": 0, "y": 0, "zoom": 1 } + "viewport": { + "x": 0, + "y": 0, + "zoom": 1 + } }, "id": "3128b556-35c7-4ced-a834-70fbeb1a410f", "user_id": "6f6ebc2c-9e7a-4c35-84fc-51760db10d9b" @@ -1469,7 +1538,10 @@ { "id": "CustomComponent-ZieNZ", "type": "genericNode", - "position": { "x": 536, "y": 337 }, + "position": { + "x": 536, + "y": 337 + }, "data": { "type": "CustomComponent", "node": { @@ -1479,7 +1551,7 @@ "placeholder": "", "show": true, "multiline": true, - "value": "from langflow import CustomComponent\n\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\nfrom langchain.schema import Document\n\nimport requests\n\nclass YourComponent(CustomComponent):\n display_name: str = \"Custom Component\"\n description: str = \"Create any custom component you want!\"\n\n def build_config(self):\n return { \"url\": { \"multiline\": True, \"required\": True } }\n\n def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:\n response = requests.get(url)\n chain = LLMChain(llm=llm, prompt=prompt)\n result = chain.run(response.text[:300])\n return Document(page_content=str(result))\n", + "value": "from langflow.custom import CustomComponent\n\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\nfrom langchain.schema import Document\n\nimport requests\n\nclass YourComponent(CustomComponent):\n display_name: str = \"Custom Component\"\n description: str = \"Create any custom component you want!\"\n\n def build_config(self):\n return { \"url\": { \"multiline\": True, \"required\": True } }\n\n def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:\n response = requests.get(url)\n chain = LLMChain(llm=llm, prompt=prompt)\n result = chain.run(response.text[:300])\n return Document(page_content=str(result))\n", "password": false, "name": "code", "advanced": false, @@ -1501,11 +1573,18 @@ }, "id": "CustomComponent-ZieNZ" }, - "positionAbsolute": { "x": 536, "y": 337 } + "positionAbsolute": { + "x": 536, + "y": 337 + } } ], "edges": [], - "viewport": { "x": 0, "y": 0, "zoom": 1 } + "viewport": { + "x": 0, + "y": 0, + "zoom": 1 + } }, "id": "d3f2c379-9df5-49a0-b33d-d855c2085949", "user_id": "6f6ebc2c-9e7a-4c35-84fc-51760db10d9b" @@ -1518,7 +1597,10 @@ { "id": "CustomComponent-CcHG0", "type": "genericNode", - "position": { "x": 536, "y": 337 }, + "position": { + "x": 536, + "y": 337 + }, "data": { "type": "CustomComponent", "node": { @@ -1528,7 +1610,7 @@ "placeholder": "", "show": true, "multiline": true, - "value": "from langflow import CustomComponent\n\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\nfrom langchain.schema import Document\n\nimport requests\n\nclass YourComponent(CustomComponent):\n display_name: str = \"Custom Component\"\n description: str = \"Create any custom component you want!\"\n\n def build_config(self):\n return { \"url\": { \"multiline\": True, \"required\": True } }\n\n def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:\n response = requests.get(url)\n chain = LLMChain(llm=llm, prompt=prompt)\n result = chain.run(response.text[:300])\n return Document(page_content=str(result))\n", + "value": "from langflow.custom import CustomComponent\n\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\nfrom langchain.schema import Document\n\nimport requests\n\nclass YourComponent(CustomComponent):\n display_name: str = \"Custom Component\"\n description: str = \"Create any custom component you want!\"\n\n def build_config(self):\n return { \"url\": { \"multiline\": True, \"required\": True } }\n\n def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:\n response = requests.get(url)\n chain = LLMChain(llm=llm, prompt=prompt)\n result = chain.run(response.text[:300])\n return Document(page_content=str(result))\n", "password": false, "name": "code", "advanced": false, @@ -1550,11 +1632,18 @@ }, "id": "CustomComponent-CcHG0" }, - "positionAbsolute": { "x": 536, "y": 337 } + "positionAbsolute": { + "x": 536, + "y": 337 + } } ], "edges": [], - "viewport": { "x": 0, "y": 0, "zoom": 1 } + "viewport": { + "x": 0, + "y": 0, + "zoom": 1 + } }, "id": "74afff1f-b540-43d5-bc66-d18f39d65d57", "user_id": "6f6ebc2c-9e7a-4c35-84fc-51760db10d9b" @@ -1567,7 +1656,10 @@ { "id": "CustomComponent-Q8qSr", "type": "genericNode", - "position": { "x": 536, "y": 337 }, + "position": { + "x": 536, + "y": 337 + }, "data": { "type": "CustomComponent", "node": { @@ -1577,7 +1669,7 @@ "placeholder": "", "show": true, "multiline": true, - "value": "from langflow import CustomComponent\n\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\nfrom langchain.schema import Document\n\nimport requests\n\nclass YourComponent(CustomComponent):\n display_name: str = \"Custom Component\"\n description: str = \"Create any custom component you want!\"\n\n def build_config(self):\n return { \"url\": { \"multiline\": True, \"required\": True } }\n\n def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:\n response = requests.get(url)\n chain = LLMChain(llm=llm, prompt=prompt)\n result = chain.run(response.text[:300])\n return Document(page_content=str(result))\n", + "value": "from langflow.custom import CustomComponent\n\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\nfrom langchain.schema import Document\n\nimport requests\n\nclass YourComponent(CustomComponent):\n display_name: str = \"Custom Component\"\n description: str = \"Create any custom component you want!\"\n\n def build_config(self):\n return { \"url\": { \"multiline\": True, \"required\": True } }\n\n def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:\n response = requests.get(url)\n chain = LLMChain(llm=llm, prompt=prompt)\n result = chain.run(response.text[:300])\n return Document(page_content=str(result))\n", "password": false, "name": "code", "advanced": false, @@ -1599,11 +1691,18 @@ }, "id": "CustomComponent-Q8qSr" }, - "positionAbsolute": { "x": 536, "y": 337 } + "positionAbsolute": { + "x": 536, + "y": 337 + } } ], "edges": [], - "viewport": { "x": 0, "y": 0, "zoom": 1 } + "viewport": { + "x": 0, + "y": 0, + "zoom": 1 + } }, "id": "b0b1c893-6194-4748-ae10-b5f604b271c2", "user_id": "6f6ebc2c-9e7a-4c35-84fc-51760db10d9b" @@ -1616,7 +1715,10 @@ { "id": "CustomComponent-Lgoca", "type": "genericNode", - "position": { "x": 536, "y": 337 }, + "position": { + "x": 536, + "y": 337 + }, "data": { "type": "CustomComponent", "node": { @@ -1626,7 +1728,7 @@ "placeholder": "", "show": true, "multiline": true, - "value": "from langflow import CustomComponent\n\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\nfrom langchain.schema import Document\n\nimport requests\n\nclass YourComponent(CustomComponent):\n display_name: str = \"Custom Component\"\n description: str = \"Create any custom component you want!\"\n\n def build_config(self):\n return { \"url\": { \"multiline\": True, \"required\": True } }\n\n def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:\n response = requests.get(url)\n chain = LLMChain(llm=llm, prompt=prompt)\n result = chain.run(response.text[:300])\n return Document(page_content=str(result))\n", + "value": "from langflow.custom import CustomComponent\n\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\nfrom langchain.schema import Document\n\nimport requests\n\nclass YourComponent(CustomComponent):\n display_name: str = \"Custom Component\"\n description: str = \"Create any custom component you want!\"\n\n def build_config(self):\n return { \"url\": { \"multiline\": True, \"required\": True } }\n\n def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:\n response = requests.get(url)\n chain = LLMChain(llm=llm, prompt=prompt)\n result = chain.run(response.text[:300])\n return Document(page_content=str(result))\n", "password": false, "name": "code", "advanced": false, @@ -1648,11 +1750,18 @@ }, "id": "CustomComponent-Lgoca" }, - "positionAbsolute": { "x": 536, "y": 337 } + "positionAbsolute": { + "x": 536, + "y": 337 + } } ], "edges": [], - "viewport": { "x": 0, "y": 0, "zoom": 1 } + "viewport": { + "x": 0, + "y": 0, + "zoom": 1 + } }, "id": "edf1051d-509b-46a2-84c6-b94bdcdc3a4f", "user_id": "6f6ebc2c-9e7a-4c35-84fc-51760db10d9b" @@ -1665,7 +1774,10 @@ { "id": "CustomComponent-6OJGW", "type": "genericNode", - "position": { "x": 536, "y": 337 }, + "position": { + "x": 536, + "y": 337 + }, "data": { "type": "CustomComponent", "node": { @@ -1675,7 +1787,7 @@ "placeholder": "", "show": true, "multiline": true, - "value": "from langflow import CustomComponent\n\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\nfrom langchain.schema import Document\n\nimport requests\n\nclass YourComponent(CustomComponent):\n display_name: str = \"Custom Component\"\n description: str = \"Create any custom component you want!\"\n\n def build_config(self):\n return { \"url\": { \"multiline\": True, \"required\": True } }\n\n def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:\n response = requests.get(url)\n chain = LLMChain(llm=llm, prompt=prompt)\n result = chain.run(response.text[:300])\n return Document(page_content=str(result))\n", + "value": "from langflow.custom import CustomComponent\n\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\nfrom langchain.schema import Document\n\nimport requests\n\nclass YourComponent(CustomComponent):\n display_name: str = \"Custom Component\"\n description: str = \"Create any custom component you want!\"\n\n def build_config(self):\n return { \"url\": { \"multiline\": True, \"required\": True } }\n\n def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:\n response = requests.get(url)\n chain = LLMChain(llm=llm, prompt=prompt)\n result = chain.run(response.text[:300])\n return Document(page_content=str(result))\n", "password": false, "name": "code", "advanced": false, @@ -1697,11 +1809,18 @@ }, "id": "CustomComponent-6OJGW" }, - "positionAbsolute": { "x": 536, "y": 337 } + "positionAbsolute": { + "x": 536, + "y": 337 + } } ], "edges": [], - "viewport": { "x": 0, "y": 0, "zoom": 1 } + "viewport": { + "x": 0, + "y": 0, + "zoom": 1 + } }, "id": "65b2a4e0-4084-418a-aa86-ee914acb0ab5", "user_id": "6f6ebc2c-9e7a-4c35-84fc-51760db10d9b" @@ -1714,7 +1833,10 @@ { "id": "CustomComponent-tcE83", "type": "genericNode", - "position": { "x": 536, "y": 337 }, + "position": { + "x": 536, + "y": 337 + }, "data": { "type": "CustomComponent", "node": { @@ -1724,7 +1846,7 @@ "placeholder": "", "show": true, "multiline": true, - "value": "from langflow import CustomComponent\n\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\nfrom langchain.schema import Document\n\nimport requests\n\nclass YourComponent(CustomComponent):\n display_name: str = \"Custom Component\"\n description: str = \"Create any custom component you want!\"\n\n def build_config(self):\n return { \"url\": { \"multiline\": True, \"required\": True } }\n\n def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:\n response = requests.get(url)\n chain = LLMChain(llm=llm, prompt=prompt)\n result = chain.run(response.text[:300])\n return Document(page_content=str(result))\n", + "value": "from langflow.custom import CustomComponent\n\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\nfrom langchain.schema import Document\n\nimport requests\n\nclass YourComponent(CustomComponent):\n display_name: str = \"Custom Component\"\n description: str = \"Create any custom component you want!\"\n\n def build_config(self):\n return { \"url\": { \"multiline\": True, \"required\": True } }\n\n def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:\n response = requests.get(url)\n chain = LLMChain(llm=llm, prompt=prompt)\n result = chain.run(response.text[:300])\n return Document(page_content=str(result))\n", "password": false, "name": "code", "advanced": false, @@ -1746,11 +1868,18 @@ }, "id": "CustomComponent-tcE83" }, - "positionAbsolute": { "x": 536, "y": 337 } + "positionAbsolute": { + "x": 536, + "y": 337 + } } ], "edges": [], - "viewport": { "x": 0, "y": 0, "zoom": 1 } + "viewport": { + "x": 0, + "y": 0, + "zoom": 1 + } }, "id": "534e412b-d37b-4133-8029-e5ed139fd55c", "user_id": "6f6ebc2c-9e7a-4c35-84fc-51760db10d9b" @@ -1765,7 +1894,10 @@ "height": 631, "id": "ChatOpenAI-mQEi3", "type": "genericNode", - "position": { "x": 543.1816229116944, "y": 942.891611351432 }, + "position": { + "x": 543.1816229116944, + "y": 942.891611351432 + }, "data": { "type": "ChatOpenAI", "node": { @@ -1993,7 +2125,10 @@ "height": 387, "id": "AgentInitializer-EE8R4", "type": "genericNode", - "position": { "x": 1036.6064439140812, "y": 645.1919693466587 }, + "position": { + "x": 1036.6064439140812, + "y": 645.1919693466587 + }, "data": { "type": "AgentInitializer", "node": { @@ -2071,7 +2206,10 @@ "height": 437, "id": "PythonFunctionTool-YKkDL", "type": "genericNode", - "position": { "x": 553.050119331742, "y": 412.9533535948685 }, + "position": { + "x": 553.050119331742, + "y": 412.9533535948685 + }, "data": { "type": "PythonFunctionTool", "node": { @@ -2135,7 +2273,9 @@ "sourceHandle": "{œbaseClassesœ:[œSerializableœ,œBaseChatModelœ,œChatOpenAIœ,œBaseLanguageModelœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-mQEi3œ}", "target": "AgentInitializer-EE8R4", "targetHandle": "{œfieldNameœ:œllmœ,œidœ:œAgentInitializer-EE8R4œ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}", - "style": { "stroke": "#555" }, + "style": { + "stroke": "#555" + }, "className": "stroke-gray-900 stroke-connection", "animated": false, "id": "reactflow__edge-ChatOpenAI-mQEi3{œbaseClassesœ:[œSerializableœ,œBaseChatModelœ,œChatOpenAIœ,œBaseLanguageModelœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-mQEi3œ}-AgentInitializer-EE8R4{œfieldNameœ:œllmœ,œidœ:œAgentInitializer-EE8R4œ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}", @@ -2164,7 +2304,9 @@ "sourceHandle": "{œbaseClassesœ:[œToolœ],œdataTypeœ:œPythonFunctionToolœ,œidœ:œPythonFunctionTool-YKkDLœ}", "target": "AgentInitializer-EE8R4", "targetHandle": "{œfieldNameœ:œtoolsœ,œidœ:œAgentInitializer-EE8R4œ,œinputTypesœ:null,œtypeœ:œToolœ}", - "style": { "stroke": "#555" }, + "style": { + "stroke": "#555" + }, "className": "stroke-gray-900 stroke-connection", "animated": false, "id": "reactflow__edge-PythonFunctionTool-YKkDL{œbaseClassesœ:[œToolœ],œdataTypeœ:œPythonFunctionToolœ,œidœ:œPythonFunctionTool-YKkDLœ}-AgentInitializer-EE8R4{œfieldNameœ:œtoolsœ,œidœ:œAgentInitializer-EE8R4œ,œinputTypesœ:null,œtypeœ:œToolœ}", @@ -2201,7 +2343,10 @@ { "id": "CustomComponent-T38LS", "type": "genericNode", - "position": { "x": 536, "y": 337 }, + "position": { + "x": 536, + "y": 337 + }, "data": { "type": "CustomComponent", "node": { @@ -2211,7 +2356,7 @@ "placeholder": "", "show": true, "multiline": true, - "value": "from langflow import CustomComponent\n\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\nfrom langchain.schema import Document\n\nimport requests\n\nclass YourComponent(CustomComponent):\n display_name: str = \"Custom Component\"\n description: str = \"Create any custom component you want!\"\n\n def build_config(self):\n return { \"url\": { \"multiline\": True, \"required\": True } }\n\n def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:\n response = requests.get(url)\n chain = LLMChain(llm=llm, prompt=prompt)\n result = chain.run(response.text[:300])\n return Document(page_content=str(result))\n", + "value": "from langflow.custom import CustomComponent\n\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\nfrom langchain.schema import Document\n\nimport requests\n\nclass YourComponent(CustomComponent):\n display_name: str = \"Custom Component\"\n description: str = \"Create any custom component you want!\"\n\n def build_config(self):\n return { \"url\": { \"multiline\": True, \"required\": True } }\n\n def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:\n response = requests.get(url)\n chain = LLMChain(llm=llm, prompt=prompt)\n result = chain.run(response.text[:300])\n return Document(page_content=str(result))\n", "password": false, "name": "code", "advanced": false, @@ -2233,11 +2378,18 @@ }, "id": "CustomComponent-T38LS" }, - "positionAbsolute": { "x": 536, "y": 337 } + "positionAbsolute": { + "x": 536, + "y": 337 + } } ], "edges": [], - "viewport": { "x": 0, "y": 0, "zoom": 1 } + "viewport": { + "x": 0, + "y": 0, + "zoom": 1 + } }, "id": "ec36a3b7-7b5f-4663-983c-833bcec4d851", "user_id": "6f6ebc2c-9e7a-4c35-84fc-51760db10d9b" @@ -2252,7 +2404,10 @@ "height": 631, "id": "ChatOpenAI-E1XSb", "type": "genericNode", - "position": { "x": 543.1816229116944, "y": 942.891611351432 }, + "position": { + "x": 543.1816229116944, + "y": 942.891611351432 + }, "data": { "type": "ChatOpenAI", "node": { @@ -2480,7 +2635,10 @@ "height": 387, "id": "AgentInitializer-goPm2", "type": "genericNode", - "position": { "x": 1036.6064439140812, "y": 645.1919693466587 }, + "position": { + "x": 1036.6064439140812, + "y": 645.1919693466587 + }, "data": { "type": "AgentInitializer", "node": { @@ -2558,7 +2716,10 @@ "height": 437, "id": "PythonFunctionTool-SQikY", "type": "genericNode", - "position": { "x": 553.050119331742, "y": 412.9533535948685 }, + "position": { + "x": 553.050119331742, + "y": 412.9533535948685 + }, "data": { "type": "PythonFunctionTool", "node": { @@ -2622,7 +2783,9 @@ "sourceHandle": "{œbaseClassesœ:[œSerializableœ,œBaseChatModelœ,œChatOpenAIœ,œBaseLanguageModelœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-E1XSbœ}", "target": "AgentInitializer-goPm2", "targetHandle": "{œfieldNameœ:œllmœ,œidœ:œAgentInitializer-goPm2œ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}", - "style": { "stroke": "#555" }, + "style": { + "stroke": "#555" + }, "className": "stroke-gray-900 stroke-connection", "animated": false, "id": "reactflow__edge-ChatOpenAI-E1XSb{œbaseClassesœ:[œSerializableœ,œBaseChatModelœ,œChatOpenAIœ,œBaseLanguageModelœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-E1XSbœ}-AgentInitializer-goPm2{œfieldNameœ:œllmœ,œidœ:œAgentInitializer-goPm2œ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}", @@ -2651,7 +2814,9 @@ "sourceHandle": "{œbaseClassesœ:[œToolœ],œdataTypeœ:œPythonFunctionToolœ,œidœ:œPythonFunctionTool-SQikYœ}", "target": "AgentInitializer-goPm2", "targetHandle": "{œfieldNameœ:œtoolsœ,œidœ:œAgentInitializer-goPm2œ,œinputTypesœ:null,œtypeœ:œToolœ}", - "style": { "stroke": "#555" }, + "style": { + "stroke": "#555" + }, "className": "stroke-gray-900 stroke-connection", "animated": false, "id": "reactflow__edge-PythonFunctionTool-SQikY{œbaseClassesœ:[œToolœ],œdataTypeœ:œPythonFunctionToolœ,œidœ:œPythonFunctionTool-SQikYœ}-AgentInitializer-goPm2{œfieldNameœ:œtoolsœ,œidœ:œAgentInitializer-goPm2œ,œinputTypesœ:null,œtypeœ:œToolœ}", diff --git a/src/frontend/tests/onlyFront/assets/flow.json b/src/frontend/tests/end-to-end/assets/flow.json similarity index 100% rename from src/frontend/tests/onlyFront/assets/flow.json rename to src/frontend/tests/end-to-end/assets/flow.json diff --git a/src/frontend/tests/end-to-end/assets/flow_group_test.json b/src/frontend/tests/end-to-end/assets/flow_group_test.json new file mode 100644 index 000000000..c88576305 --- /dev/null +++ b/src/frontend/tests/end-to-end/assets/flow_group_test.json @@ -0,0 +1,532 @@ +{ + "id": "8404c1fc-1bce-43b4-a8bc-3febea587fc8", + "data": { + "nodes": [ + { + "id": "PythonFunctionTool-RfJui", + "type": "genericNode", + "position": { + "x": 117.54690105175428, + "y": -84.2465475108354 + }, + "data": { + "type": "PythonFunctionTool", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "\ndef python_function(text: str) -> str:\n \"\"\"This is a default python function that returns the input text\"\"\"\n return text\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false + }, + "description": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "Returns the Text you send. This is a testing tool.", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "description", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": ["Text"] + }, + "name": { + "type": "str", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": "PythonFunction", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "name", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": ["Text"] + }, + "return_direct": { + "type": "bool", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "return_direct", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false + }, + "_type": "PythonFunctionTool" + }, + "description": "Python function to be executed.", + "base_classes": ["BaseTool", "Tool"], + "display_name": "PythonFunctionTool", + "documentation": "", + "custom_fields": {}, + "output_types": [], + "field_formatters": {}, + "pinned": false, + "beta": false + }, + "id": "PythonFunctionTool-RfJui" + }, + "selected": true, + "width": 384, + "height": 466, + "positionAbsolute": { + "x": 117.54690105175428, + "y": -84.2465475108354 + }, + "dragging": false + }, + { + "id": "AgentInitializer-tPdJw", + "type": "genericNode", + "position": { + "x": 677.68677055088, + "y": 127.19859565276168 + }, + "data": { + "type": "AgentInitializer", + "node": { + "template": { + "llm": { + "type": "BaseLanguageModel", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "llm", + "display_name": "Language Model", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false + }, + "memory": { + "type": "BaseChatMemory", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "memory", + "display_name": "Memory", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false + }, + "tools": { + "type": "Tool", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "tools", + "display_name": "Tools", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false + }, + "agent": { + "type": "str", + "required": true, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "zero-shot-react-description", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "zero-shot-react-description", + "react-docstore", + "self-ask-with-search", + "conversational-react-description", + "chat-zero-shot-react-description", + "chat-conversational-react-description", + "structured-chat-zero-shot-react-description", + "openai-functions", + "openai-multi-functions", + "JsonAgent", + "CSVAgent", + "VectorStoreAgent", + "VectorStoreRouterAgent", + "SQLAgent" + ], + "name": "agent", + "display_name": "Agent Type", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": ["Text"] + }, + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Callable, List, Optional, Union\n\nfrom langchain.agents import AgentExecutor, AgentType, initialize_agent, types\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import BaseChatMemory, BaseLanguageModel, Tool\n\n\nclass AgentInitializerComponent(CustomComponent):\n display_name: str = \"Agent Initializer\"\n description: str = \"Initialize a Langchain Agent.\"\n documentation: str = \"https://python.langchain.com/docs/modules/agents/agent_types/\"\n\n def build_config(self):\n agents = list(types.AGENT_TO_CLASS.keys())\n # field_type and required are optional\n return {\n \"agent\": {\"options\": agents, \"value\": agents[0], \"display_name\": \"Agent Type\"},\n \"max_iterations\": {\"display_name\": \"Max Iterations\", \"value\": 10},\n \"memory\": {\"display_name\": \"Memory\"},\n \"tools\": {\"display_name\": \"Tools\"},\n \"llm\": {\"display_name\": \"Language Model\"},\n \"code\": {\"advanced\": True},\n }\n\n def build(\n self,\n agent: str,\n llm: BaseLanguageModel,\n tools: List[Tool],\n max_iterations: int,\n memory: Optional[BaseChatMemory] = None,\n ) -> Union[AgentExecutor, Callable]:\n agent = AgentType(agent)\n if memory:\n return initialize_agent(\n tools=tools,\n llm=llm,\n agent=agent,\n memory=memory,\n return_intermediate_steps=True,\n handle_parsing_errors=True,\n max_iterations=max_iterations,\n )\n return initialize_agent(\n tools=tools,\n llm=llm,\n agent=agent,\n return_intermediate_steps=True,\n handle_parsing_errors=True,\n max_iterations=max_iterations,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": true, + "dynamic": true, + "info": "", + "title_case": false + }, + "max_iterations": { + "type": "int", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 10, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_iterations", + "display_name": "Max Iterations", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false + }, + "_type": "CustomComponent" + }, + "description": "Initialize a Langchain Agent.", + "base_classes": [ + "Runnable", + "Chain", + "Serializable", + "object", + "AgentExecutor", + "Generic", + "RunnableSerializable", + "Callable" + ], + "display_name": "Agent Initializer", + "documentation": "https://python.langchain.com/docs/modules/agents/agent_types/", + "custom_fields": { + "agent": null, + "llm": null, + "tools": null, + "max_iterations": null, + "memory": null + }, + "output_types": ["AgentExecutor", "Callable"], + "field_formatters": {}, + "pinned": false, + "beta": true + }, + "id": "AgentInitializer-tPdJw" + }, + "selected": false, + "width": 384, + "height": 522 + }, + { + "id": "ChatOpenAISpecs-stxRM", + "type": "genericNode", + "position": { + "x": 18.226716205350385, + "y": 432.6122491402193 + }, + "data": { + "type": "ChatOpenAISpecs", + "node": { + "template": { + "code": { + "type": "code", + "required": true, + "placeholder": "", + "list": false, + "show": true, + "multiline": true, + "value": "from typing import Optional, Union\n\nfrom langchain.llms import BaseLLM\nfrom langchain_community.chat_models.openai import ChatOpenAI\nfrom langflow.custom import CustomComponent\nfrom langflow.field_typing import BaseLanguageModel, NestedDict\n\n\nclass ChatOpenAIComponent(CustomComponent):\n display_name = \"ChatOpenAI\"\n description = \"`OpenAI` Chat large language models API.\"\n icon = \"OpenAI\"\n\n def build_config(self):\n return {\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"advanced\": False,\n \"required\": False,\n },\n \"model_kwargs\": {\n \"display_name\": \"Model Kwargs\",\n \"advanced\": True,\n \"required\": False,\n },\n \"model_name\": {\n \"display_name\": \"Model Name\",\n \"advanced\": False,\n \"required\": False,\n \"options\": [\n \"gpt-4-turbo-preview\",\n \"gpt-4-0125-preview\",\n \"gpt-4-1106-preview\",\n \"gpt-4-vision-preview\",\n \"gpt-3.5-turbo-0125\",\n \"gpt-3.5-turbo-1106\",\n ],\n },\n \"openai_api_base\": {\n \"display_name\": \"OpenAI API Base\",\n \"advanced\": False,\n \"required\": False,\n \"info\": (\n \"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\n\"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\"\n ),\n },\n \"openai_api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"advanced\": False,\n \"required\": False,\n \"password\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"advanced\": False,\n \"required\": False,\n \"value\": 0.7,\n },\n }\n\n def build(\n self,\n max_tokens: Optional[int] = 256,\n model_kwargs: NestedDict = {},\n model_name: str = \"gpt-4-1106-preview\",\n openai_api_base: Optional[str] = None,\n openai_api_key: Optional[str] = None,\n temperature: float = 0.7,\n ) -> Union[BaseLanguageModel, BaseLLM]:\n if not openai_api_base:\n openai_api_base = \"https://api.openai.com/v1\"\n return ChatOpenAI(\n max_tokens=max_tokens,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=openai_api_key,\n temperature=temperature,\n )\n", + "fileTypes": [], + "file_path": "", + "password": false, + "name": "code", + "advanced": false, + "dynamic": true, + "info": "", + "title_case": false + }, + "max_tokens": { + "type": "int", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 256, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "max_tokens", + "display_name": "Max Tokens", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false + }, + "model_kwargs": { + "type": "NestedDict", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": {}, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "model_kwargs", + "display_name": "Model Kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "title_case": false + }, + "model_name": { + "type": "str", + "required": false, + "placeholder": "", + "list": true, + "show": true, + "multiline": false, + "value": "gpt-4-1106-preview", + "fileTypes": [], + "file_path": "", + "password": false, + "options": [ + "gpt-4-turbo-preview", + "gpt-4-0125-preview", + "gpt-4-1106-preview", + "gpt-4-vision-preview", + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-1106" + ], + "name": "model_name", + "display_name": "Model Name", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": ["Text"] + }, + "openai_api_base": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": false, + "dynamic": false, + "info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.", + "title_case": false, + "input_types": ["Text"] + }, + "openai_api_key": { + "type": "str", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "fileTypes": [], + "file_path": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "", + "title_case": false, + "input_types": ["Text"] + }, + "temperature": { + "type": "float", + "required": false, + "placeholder": "", + "list": false, + "show": true, + "multiline": false, + "value": 0.7, + "fileTypes": [], + "file_path": "", + "password": false, + "name": "temperature", + "display_name": "Temperature", + "advanced": false, + "dynamic": false, + "info": "", + "rangeSpec": { + "min": -1, + "max": 1, + "step": 0.1 + }, + "title_case": false + }, + "_type": "CustomComponent" + }, + "description": "`OpenAI` Chat large language models API.", + "icon": "OpenAI", + "base_classes": [ + "Runnable", + "BaseLLM", + "Serializable", + "BaseLanguageModel", + "object", + "Generic", + "RunnableSerializable" + ], + "display_name": "ChatOpenAI", + "documentation": "", + "custom_fields": { + "max_tokens": null, + "model_kwargs": null, + "model_name": null, + "openai_api_base": null, + "openai_api_key": null, + "temperature": null + }, + "output_types": ["BaseLanguageModel", "BaseLLM"], + "field_formatters": {}, + "pinned": false, + "beta": true + }, + "id": "ChatOpenAISpecs-stxRM" + }, + "selected": false, + "width": 384, + "height": 666, + "positionAbsolute": { + "x": 18.226716205350385, + "y": 432.6122491402193 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "ChatOpenAISpecs-stxRM", + "sourceHandle": "{œbaseClassesœ:[œRunnableœ,œBaseLLMœ,œSerializableœ,œBaseLanguageModelœ,œobjectœ,œGenericœ,œRunnableSerializableœ],œdataTypeœ:œChatOpenAISpecsœ,œidœ:œChatOpenAISpecs-stxRMœ}", + "target": "AgentInitializer-tPdJw", + "targetHandle": "{œfieldNameœ:œllmœ,œidœ:œAgentInitializer-tPdJwœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}", + "data": { + "targetHandle": { + "fieldName": "llm", + "id": "AgentInitializer-tPdJw", + "inputTypes": null, + "type": "BaseLanguageModel" + }, + "sourceHandle": { + "baseClasses": [ + "Runnable", + "BaseLLM", + "Serializable", + "BaseLanguageModel", + "object", + "Generic", + "RunnableSerializable" + ], + "dataType": "ChatOpenAISpecs", + "id": "ChatOpenAISpecs-stxRM" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-foreground stroke-connection", + "id": "reactflow__edge-ChatOpenAISpecs-stxRM{œbaseClassesœ:[œRunnableœ,œBaseLLMœ,œSerializableœ,œBaseLanguageModelœ,œobjectœ,œGenericœ,œRunnableSerializableœ],œdataTypeœ:œChatOpenAISpecsœ,œidœ:œChatOpenAISpecs-stxRMœ}-AgentInitializer-tPdJw{œfieldNameœ:œllmœ,œidœ:œAgentInitializer-tPdJwœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}" + }, + { + "source": "PythonFunctionTool-RfJui", + "sourceHandle": "{œbaseClassesœ:[œBaseToolœ,œToolœ],œdataTypeœ:œPythonFunctionToolœ,œidœ:œPythonFunctionTool-RfJuiœ}", + "target": "AgentInitializer-tPdJw", + "targetHandle": "{œfieldNameœ:œtoolsœ,œidœ:œAgentInitializer-tPdJwœ,œinputTypesœ:null,œtypeœ:œToolœ}", + "data": { + "targetHandle": { + "fieldName": "tools", + "id": "AgentInitializer-tPdJw", + "inputTypes": null, + "type": "Tool" + }, + "sourceHandle": { + "baseClasses": ["BaseTool", "Tool"], + "dataType": "PythonFunctionTool", + "id": "PythonFunctionTool-RfJui" + } + }, + "style": { + "stroke": "#555" + }, + "className": "stroke-foreground stroke-connection", + "id": "reactflow__edge-PythonFunctionTool-RfJui{œbaseClassesœ:[œBaseToolœ,œToolœ],œdataTypeœ:œPythonFunctionToolœ,œidœ:œPythonFunctionTool-RfJuiœ}-AgentInitializer-tPdJw{œfieldNameœ:œtoolsœ,œidœ:œAgentInitializer-tPdJwœ,œinputTypesœ:null,œtypeœ:œToolœ}" + } + ], + "viewport": { + "x": 37.63043052737157, + "y": 71.47518177614131, + "zoom": 0.5140569133280332 + } + }, + "description": "Uncover Business Opportunities with NLP.", + "name": "Untitled document (20)", + "last_tested_version": "0.7.0a0", + "is_component": false +} diff --git a/src/frontend/tests/onlyFront/assets/flowtest.json b/src/frontend/tests/end-to-end/assets/flowtest.json similarity index 54% rename from src/frontend/tests/onlyFront/assets/flowtest.json rename to src/frontend/tests/end-to-end/assets/flowtest.json index 8b83ee270..c4de3d778 100644 --- a/src/frontend/tests/onlyFront/assets/flowtest.json +++ b/src/frontend/tests/end-to-end/assets/flowtest.json @@ -8,7 +8,10 @@ "height": 461, "id": "CustomComponent-MtJjl", "type": "genericNode", - "position": { "x": 534.3712097224906, "y": -135.01908566635723 }, + "position": { + "x": 534.3712097224906, + "y": -135.01908566635723 + }, "data": { "type": "CustomComponent", "node": { @@ -20,7 +23,7 @@ "list": false, "show": true, "multiline": true, - "value": "from langflow import CustomComponent\nfrom langflow.field_typing import Data\nfrom pathlib import Path\nfrom platformdirs import user_cache_dir\nimport os\n\nclass Component(CustomComponent):\n documentation: str = \"http://docs.langflow.org/components/custom\"\n\n def build_config(self):\n return {\"text_input\":{\"display_name\":\"Text Input\", \"input_types\":[\"str\"]},\"save_path\":{\"display_name\":\"Save Path\",\n \"info\":\"Put the full path with the file name and extension\",\"value\":Path(user_cache_dir(\"langflow\"))/\"text.t1.txt\"}}\n\n def build(self, text_input:str,save_path:str) -> str:\n try:\n # Create the directory if it doesn't exist\n os.makedirs(os.path.dirname(save_path), exist_ok=True)\n\n # Open the file in write mode and save the text\n with open(save_path, 'w') as file:\n file.write(text_input)\n except Exception as e:\n raise e\n self.status = text_input\n return text_input", + "value": "from langflow.custom import CustomComponent\nfrom langflow.field_typing import Data\nfrom pathlib import Path\nfrom platformdirs import user_cache_dir\nimport os\n\nclass Component(CustomComponent):\n documentation: str = \"http://docs.langflow.org/components/custom\"\n\n def build_config(self):\n return {\"text_input\":{\"display_name\":\"Text Input\", \"input_types\":[\"str\"]},\"save_path\":{\"display_name\":\"Save Path\",\n \"info\":\"Put the full path with the file name and extension\",\"value\":Path(user_cache_dir(\"langflow\"))/\"text.t1.txt\"}}\n\n def build(self, text_input:str,save_path:str) -> str:\n try:\n # Create the directory if it doesn't exist\n os.makedirs(os.path.dirname(save_path), exist_ok=True)\n\n # Open the file in write mode and save the text\n with open(save_path, 'w') as file:\n file.write(text_input)\n except Exception as e:\n raise e\n self.status = text_input\n return text_input", "fileTypes": [], "file_path": "", "password": false, @@ -69,7 +72,10 @@ "base_classes": ["str"], "display_name": "text checkpoint", "documentation": "http://docs.langflow.org/components/custom", - "custom_fields": { "save_path": null, "text_input": null }, + "custom_fields": { + "save_path": null, + "text_input": null + }, "output_types": ["str"], "field_formatters": {}, "beta": true @@ -78,14 +84,20 @@ }, "selected": false, "dragging": false, - "positionAbsolute": { "x": 534.3712097224906, "y": -135.01908566635723 } + "positionAbsolute": { + "x": 534.3712097224906, + "y": -135.01908566635723 + } }, { "width": 384, "height": 453, "id": "CustomComponent-7NQoq", "type": "genericNode", - "position": { "x": 27.487979888011637, "y": -414.43998171034826 }, + "position": { + "x": 27.487979888011637, + "y": -414.43998171034826 + }, "data": { "type": "CustomComponent", "node": { @@ -131,7 +143,7 @@ "list": false, "show": true, "multiline": true, - "value": "from langflow import CustomComponent\nfrom typing import Optional, List, Dict, Union\nfrom langflow.field_typing import (\n AgentExecutor,\n BaseChatMemory,\n BaseLanguageModel,\n BaseLLM,\n BaseLoader,\n BaseMemory,\n BaseOutputParser,\n BasePromptTemplate,\n BaseRetriever,\n Callable,\n Chain,\n ChatPromptTemplate,\n Data,\n Document,\n Embeddings,\n NestedDict,\n Object,\n PromptTemplate,\n TextSplitter,\n Tool,\n VectorStore,\n)\n\nfrom openai import OpenAI\nimport os\nimport ffmpeg\n\nclass Component(CustomComponent):\n display_name: str = \"Whisper Transcriber\"\n description: str = \"Converts audio to text using OpenAI's Whisper.\"\n\n def build_config(self):\n return {\"audio\": {\"field_type\": \"file\", \"suffixes\": [\".mp3\", \".mp4\", \".m4a\"]}, \"OpenAIKey\": {\"field_type\": \"str\", \"password\": True}}\n\n def calculate_segment_duration(self, audio_path, target_chunk_size_mb=24):\n # Calculate the target chunk size in bytes\n target_chunk_size_bytes = target_chunk_size_mb * 1024 * 1024\n\n # Use ffprobe to get the audio file information\n ffprobe_output = ffmpeg.probe(audio_path)\n print(ffprobe_output)\n # Convert duration to float\n duration = float(ffprobe_output[\"format\"][\"duration\"])\n\n # Calculate the approximate bitrate\n bitrate = os.path.getsize(audio_path) / duration\n\n # Calculate the segment duration to achieve the target chunk size\n segment_duration = target_chunk_size_bytes / bitrate\n\n return segment_duration\n\n def split_audio_into_chunks(self, audio_path, target_chunk_size_mb=24):\n # Calculate the segment duration\n segment_duration = self.calculate_segment_duration(audio_path, target_chunk_size_mb)\n\n # Create a directory to store the chunks\n output_directory = f\"{os.path.splitext(audio_path)[0]}_chunks\"\n os.makedirs(output_directory, exist_ok=True)\n\n # Use ffmpeg-python to split the audio file into chunks\n (\n ffmpeg.input(audio_path)\n .output(f\"{output_directory}/%03d{os.path.splitext(audio_path)[1]}\", codec=\"copy\", f=\"segment\", segment_time=segment_duration)\n .run()\n )\n\n # Get the list of generated chunk files\n chunks = [os.path.join(output_directory, file) for file in os.listdir(output_directory)]\n\n return chunks\n\n def build(self, audio: str, OpenAIKey: str) -> str:\n # Split audio into chunks\n audio_chunks = self.split_audio_into_chunks(audio)\n\n client = OpenAI(api_key=OpenAIKey)\n transcripts = []\n\n try:\n for chunk in audio_chunks:\n with open(chunk, \"rb\") as chunk_file:\n transcript = client.audio.transcriptions.create(\n model=\"whisper-1\",\n file=chunk_file,\n response_format=\"text\"\n )\n transcripts.append(transcript)\n finally:\n # Clean up temporary chunk files\n for chunk in audio_chunks:\n os.remove(chunk)\n\n # Concatenate transcripts into the final response\n final_response = \"\\n\".join(transcripts)\n self.status = final_response\n return final_response\n", + "value": "from langflow.custom import CustomComponent\nfrom typing import Optional, List, Dict, Union\nfrom langflow.field_typing import (\n AgentExecutor,\n BaseChatMemory,\n BaseLanguageModel,\n BaseLLM,\n BaseLoader,\n BaseMemory,\n BaseOutputParser,\n BasePromptTemplate,\n BaseRetriever,\n Callable,\n Chain,\n ChatPromptTemplate,\n Data,\n Document,\n Embeddings,\n NestedDict,\n Object,\n PromptTemplate,\n TextSplitter,\n Tool,\n VectorStore,\n)\n\nfrom openai import OpenAI\nimport os\nimport ffmpeg\n\nclass Component(CustomComponent):\n display_name: str = \"Whisper Transcriber\"\n description: str = \"Converts audio to text using OpenAI's Whisper.\"\n\n def build_config(self):\n return {\"audio\": {\"field_type\": \"file\", \"suffixes\": [\".mp3\", \".mp4\", \".m4a\"]}, \"OpenAIKey\": {\"field_type\": \"str\", \"password\": True}}\n\n def calculate_segment_duration(self, audio_path, target_chunk_size_mb=24):\n # Calculate the target chunk size in bytes\n target_chunk_size_bytes = target_chunk_size_mb * 1024 * 1024\n\n # Use ffprobe to get the audio file information\n ffprobe_output = ffmpeg.probe(audio_path)\n print(ffprobe_output)\n # Convert duration to float\n duration = float(ffprobe_output[\"format\"][\"duration\"])\n\n # Calculate the approximate bitrate\n bitrate = os.path.getsize(audio_path) / duration\n\n # Calculate the segment duration to achieve the target chunk size\n segment_duration = target_chunk_size_bytes / bitrate\n\n return segment_duration\n\n def split_audio_into_chunks(self, audio_path, target_chunk_size_mb=24):\n # Calculate the segment duration\n segment_duration = self.calculate_segment_duration(audio_path, target_chunk_size_mb)\n\n # Create a directory to store the chunks\n output_directory = f\"{os.path.splitext(audio_path)[0]}_chunks\"\n os.makedirs(output_directory, exist_ok=True)\n\n # Use ffmpeg-python to split the audio file into chunks\n (\n ffmpeg.input(audio_path)\n .output(f\"{output_directory}/%03d{os.path.splitext(audio_path)[1]}\", codec=\"copy\", f=\"segment\", segment_time=segment_duration)\n .run()\n )\n\n # Get the list of generated chunk files\n chunks = [os.path.join(output_directory, file) for file in os.listdir(output_directory)]\n\n return chunks\n\n def build(self, audio: str, OpenAIKey: str) -> str:\n # Split audio into chunks\n audio_chunks = self.split_audio_into_chunks(audio)\n\n client = OpenAI(api_key=OpenAIKey)\n transcripts = []\n\n try:\n for chunk in audio_chunks:\n with open(chunk, \"rb\") as chunk_file:\n transcript = client.audio.transcriptions.create(\n model=\"whisper-1\",\n file=chunk_file,\n response_format=\"text\"\n )\n transcripts.append(transcript)\n finally:\n # Clean up temporary chunk files\n for chunk in audio_chunks:\n os.remove(chunk)\n\n # Concatenate transcripts into the final response\n final_response = \"\\n\".join(transcripts)\n self.status = final_response\n return final_response\n", "fileTypes": [], "file_path": "", "password": false, @@ -146,7 +158,10 @@ "base_classes": ["str"], "display_name": "Whisper Transcriber", "documentation": "", - "custom_fields": { "OpenAIKey": null, "audio": null }, + "custom_fields": { + "OpenAIKey": null, + "audio": null + }, "output_types": ["str"], "field_formatters": {}, "beta": true @@ -180,13 +195,19 @@ "id": "CustomComponent-7NQoq" } }, - "style": { "stroke": "#555" }, + "style": { + "stroke": "#555" + }, "className": "stroke-gray-900 stroke-connection", "animated": false, "id": "reactflow__edge-CustomComponent-7NQoq{œbaseClassesœ:[œstrœ],œdataTypeœ:œCustomComponentœ,œidœ:œCustomComponent-7NQoqœ}-CustomComponent-MtJjl{œfieldNameœ:œtext_inputœ,œidœ:œCustomComponent-MtJjlœ,œinputTypesœ:[œstrœ],œtypeœ:œstrœ}" } ], - "viewport": { "x": 119.37759169012509, "y": 351.3082742479685, "zoom": 1 } + "viewport": { + "x": 119.37759169012509, + "y": 351.3082742479685, + "zoom": 1 + } }, "is_component": false, "updated_at": "2023-12-13T23:51:56.874099", diff --git a/src/frontend/tests/onlyFront/auto_login.spec.ts b/src/frontend/tests/end-to-end/auto_login.spec.ts similarity index 52% rename from src/frontend/tests/onlyFront/auto_login.spec.ts rename to src/frontend/tests/end-to-end/auto_login.spec.ts index cdceca5e5..582e52bb8 100644 --- a/src/frontend/tests/onlyFront/auto_login.spec.ts +++ b/src/frontend/tests/end-to-end/auto_login.spec.ts @@ -1,27 +1,29 @@ import { test } from "@playwright/test"; - +test.beforeEach(async ({ page }) => { + // await page.waitForTimeout(16000); + // test.setTimeout(140000); +}); test.describe("Auto_login tests", () => { test("auto_login sign in", async ({ page }) => { - await page.routeFromHAR("harFiles/langflow.har", { - url: "**/api/v1/**", - update: false, - }); - await page.goto("http:localhost:3000/"); + await page.goto("/"); await page.locator('//*[@id="new-project-btn"]').click(); }); test("auto_login block_admin", async ({ page }) => { - await page.routeFromHAR("harFiles/langflow.har", { - url: "**/api/v1/**", - update: false, - }); - await page.goto("http:localhost:3000/"); + await page.goto("/"); await page.locator('//*[@id="new-project-btn"]').click(); - await page.goto("http:localhost:3000/login"); + await page.waitForTimeout(5000); + + await page.goto("/login"); await page.locator('//*[@id="new-project-btn"]').click(); - await page.goto("http:localhost:3000/admin"); + await page.waitForTimeout(5000); + + await page.goto("/admin"); await page.locator('//*[@id="new-project-btn"]').click(); - await page.goto("http:localhost:3000/admin/login"); + await page.waitForTimeout(5000); + + await page.goto("/admin/login"); await page.locator('//*[@id="new-project-btn"]').click(); + await page.waitForTimeout(5000); }); }); diff --git a/src/frontend/tests/end-to-end/chat_io.spec.ts b/src/frontend/tests/end-to-end/chat_io.spec.ts new file mode 100644 index 000000000..b37ebfede --- /dev/null +++ b/src/frontend/tests/end-to-end/chat_io.spec.ts @@ -0,0 +1,46 @@ +import { test } from "@playwright/test"; +import { readFileSync } from "fs"; +test.beforeEach(async ({ page }) => { + // await page.waitForTimeout(20000); + // test.setTimeout(120000); +}); +test("chat_io_teste", async ({ page }) => { + await page.goto("/"); + await page.locator("span").filter({ hasText: "My Collection" }).isVisible(); + // Read your file into a buffer. + const jsonContent = readFileSync( + "tests/end-to-end/assets/ChatTest.json", + "utf-8" + ); + + // Create the DataTransfer and File + const dataTransfer = await page.evaluateHandle((data) => { + const dt = new DataTransfer(); + // Convert the buffer to a hex array + const file = new File([data], "ChatTest.json", { + type: "application/json", + }); + dt.items.add(file); + return dt; + }, jsonContent); + + await page.locator('//*[@id="new-project-btn"]').click(); + await page.waitForTimeout(2000); + + await page.getByTestId("blank-flow").click(); + await page.waitForTimeout(2000); + + // Now dispatch + await page.dispatchEvent( + '//*[@id="react-flow-id"]/div[1]/div[1]/div', + "drop", + { + dataTransfer, + } + ); + await page.getByLabel("fit view").click(); + await page.getByText("Run", { exact: true }).click(); + await page.getByPlaceholder("Send a message...").click(); + await page.getByPlaceholder("Send a message...").fill("teste"); + await page.getByRole("button").nth(1).click(); +}); diff --git a/src/frontend/tests/end-to-end/codeAreaModalComponent.spec.ts b/src/frontend/tests/end-to-end/codeAreaModalComponent.spec.ts index 8a604bd3d..322070faa 100644 --- a/src/frontend/tests/end-to-end/codeAreaModalComponent.spec.ts +++ b/src/frontend/tests/end-to-end/codeAreaModalComponent.spec.ts @@ -1,5 +1,8 @@ import { expect, test } from "@playwright/test"; - +test.beforeEach(async ({ page }) => { + await page.waitForTimeout(2000); + test.setTimeout(120000); +}); test("CodeAreaModalComponent", async ({ page }) => { await page.goto("http://localhost:3000/"); await page.waitForTimeout(2000); @@ -7,6 +10,9 @@ test("CodeAreaModalComponent", async ({ page }) => { await page.locator('//*[@id="new-project-btn"]').click(); await page.waitForTimeout(2000); + await page.getByTestId("blank-flow").click(); + await page.waitForTimeout(2000); + await page.getByPlaceholder("Search").click(); await page.getByPlaceholder("Search").fill("pythonfunctiontool"); @@ -18,117 +24,35 @@ test("CodeAreaModalComponent", async ({ page }) => { await page.mouse.up(); await page.mouse.down(); - await page.locator('//*[@id="code-input-0"]').click(); + await page.getByTestId("div-generic-node").click(); + await page.getByTestId("code-button-modal").click(); let value = await page.locator('//*[@id="codeValue"]').inputValue(); - - if ( - value != - 'def python_function(text: str) -> str: """This is a default python function that returns the input text""" return text' - ) { - expect(false).toBeTruthy(); - } - + const code = + 'def python_function(text: str) -> str:\n """This is a default python function that returns the input text"""\n return text'; + const wCode = + 'def python_function(text: str) -> st: """This is a default python function that returns the input text""" return text'; + const assertCode = + 'def python_function(text: str) -> str: """This is a default python function that returns the input text""" return text'; + await page + .locator("#CodeEditor div") + .filter({ hasText: "def python_function(text: str" }) + .nth(1) + .click(); + await page.locator("textarea").press("Control+a"); + await page.locator("textarea").fill(wCode); await page.locator('//*[@id="checkAndSaveBtn"]').click(); - - await page.getByTestId("more-options-modal").click(); - await page.getByTestId("edit-button-modal").click(); - - await page.locator('//*[@id="showcode"]').click(); - expect(await page.locator('//*[@id="showcode"]').isChecked()).toBeFalsy(); - - await page.locator('//*[@id="showdescription"]').click(); + await page.waitForTimeout(1000); expect( - await page.locator('//*[@id="showdescription"]').isChecked() - ).toBeFalsy(); - - await page.locator('//*[@id="showname"]').click(); - expect(await page.locator('//*[@id="showname"]').isChecked()).toBeFalsy(); - - await page.locator('//*[@id="showreturn_direct"]').click(); - expect( - await page.locator('//*[@id="showreturn_direct"]').isChecked() - ).toBeFalsy(); - - await page.locator('//*[@id="showcode"]').click(); - expect(await page.locator('//*[@id="showcode"]').isChecked()).toBeTruthy(); - - await page.locator('//*[@id="showdescription"]').click(); - expect( - await page.locator('//*[@id="showdescription"]').isChecked() + await page.getByText("invalid syntax (, line 1)").isVisible() ).toBeTruthy(); - - await page.locator('//*[@id="showname"]').click(); - expect(await page.locator('//*[@id="showname"]').isChecked()).toBeTruthy(); - - await page.locator('//*[@id="showreturn_direct"]').click(); - expect( - await page.locator('//*[@id="showreturn_direct"]').isChecked() - ).toBeTruthy(); - - await page.locator('//*[@id="showcode"]').click(); - expect(await page.locator('//*[@id="showcode"]').isChecked()).toBeFalsy(); - - await page.locator('//*[@id="showdescription"]').click(); - expect( - await page.locator('//*[@id="showdescription"]').isChecked() - ).toBeFalsy(); - - await page.locator('//*[@id="showname"]').click(); - expect(await page.locator('//*[@id="showname"]').isChecked()).toBeFalsy(); - - await page.locator('//*[@id="showreturn_direct"]').click(); - expect( - await page.locator('//*[@id="showreturn_direct"]').isChecked() - ).toBeFalsy(); - - await page.locator('//*[@id="showcode"]').click(); - expect(await page.locator('//*[@id="showcode"]').isChecked()).toBeTruthy(); - - await page.locator('//*[@id="showdescription"]').click(); - expect( - await page.locator('//*[@id="showdescription"]').isChecked() - ).toBeTruthy(); - - await page.locator('//*[@id="showname"]').click(); - expect(await page.locator('//*[@id="showname"]').isChecked()).toBeTruthy(); - - await page.locator('//*[@id="showreturn_direct"]').click(); - expect( - await page.locator('//*[@id="showreturn_direct"]').isChecked() - ).toBeTruthy(); - - await page.locator('//*[@id="showcode"]').click(); - expect(await page.locator('//*[@id="showcode"]').isChecked()).toBeFalsy(); - - await page.locator('//*[@id="saveChangesBtn"]').click(); - - const plusButtonLocator = page.locator('//*[@id="code-input-0"]'); - const elementCount = await plusButtonLocator.count(); - if (elementCount === 0) { - expect(true).toBeTruthy(); - - await page.getByTestId("more-options-modal").click(); - await page.getByTestId("edit-button-modal").click(); - - await page.locator('//*[@id="showcode"]').click(); - expect(await page.locator('//*[@id="showcode"]').isChecked()).toBeTruthy(); - - await page.locator('//*[@id="code-area-edit0"]').click(); - - let value = await page.locator('//*[@id="codeValue"]').inputValue(); - - if ( - value != - 'def python_function(text: str) -> str: """This is a default python function that returns the input text""" return text' - ) { - expect(false).toBeTruthy(); - } - - await page.locator('//*[@id="checkAndSaveBtn"]').click(); - - await page.locator('//*[@id="saveChangesBtn"]').click(); - - await page.locator('//*[@id="code-input-0"]').click(); - } + await page.locator("textarea").press("Control+a"); + await page.locator("textarea").fill(wCode); + await page.locator("textarea").fill(code); + await page.locator('//*[@id="checkAndSaveBtn"]').click(); + expect(await page.getByText("Code is ready to run").isVisible()).toBeTruthy(); + await page.getByTestId("code-button-modal").click(); + expect(await page.locator('//*[@id="codeValue"]').inputValue()).toBe( + assertCode + ); }); diff --git a/src/frontend/tests/end-to-end/curl_api_generation.spec.ts b/src/frontend/tests/end-to-end/curl_api_generation.spec.ts new file mode 100644 index 000000000..b725badb1 --- /dev/null +++ b/src/frontend/tests/end-to-end/curl_api_generation.spec.ts @@ -0,0 +1,17 @@ +import { expect, test } from "@playwright/test"; + +test("curl_api_generation", async ({ page, context }) => { + await page.goto("/"); + await page.locator('//*[@id="new-project-btn"]').click(); + await context.grantPermissions(["clipboard-read", "clipboard-write"]); + await page.getByRole("heading", { name: "Data Ingestion" }).click(); + await page.waitForTimeout(2000); + await page.getByText("API", { exact: true }).click(); + await page.getByRole("tab", { name: "cURL" }).click(); + await page.getByRole("button", { name: "Copy Code" }).click(); + const handle = await page.evaluateHandle(() => + navigator.clipboard.readText() + ); + const clipboardContent = await handle.jsonValue(); + expect(clipboardContent.length).toBeGreaterThan(0); +}); diff --git a/src/frontend/tests/onlyFront/dragAndDrop.spec.ts b/src/frontend/tests/end-to-end/dragAndDrop.spec.ts similarity index 68% rename from src/frontend/tests/onlyFront/dragAndDrop.spec.ts rename to src/frontend/tests/end-to-end/dragAndDrop.spec.ts index 09ed9b669..144835cd0 100644 --- a/src/frontend/tests/onlyFront/dragAndDrop.spec.ts +++ b/src/frontend/tests/end-to-end/dragAndDrop.spec.ts @@ -1,24 +1,17 @@ import { expect, test } from "@playwright/test"; import { readFileSync } from "fs"; - +test.beforeEach(async ({ page }) => { + // await page.waitForTimeout(3000); + // test.setTimeout(120000); +}); test.describe("drag and drop test", () => { /// test("drop collection", async ({ page }) => { - await page.routeFromHAR("harFiles/langflow.har", { - url: "**/api/v1/**", - update: false, - }); - await page.route("**/api/v1/flows/", async (route) => { - const json = { - id: "e9ac1bdc-429b-475d-ac03-d26f9a2a3210", - }; - await route.fulfill({ json, status: 201 }); - }); - await page.goto("http:localhost:3000/"); + await page.goto("/"); await page.locator("span").filter({ hasText: "My Collection" }).isVisible(); // Read your file into a buffer. const jsonContent = readFileSync( - "tests/onlyFront/assets/collection.json", + "tests/end-to-end/assets/collection.json", "utf-8" ); @@ -42,9 +35,7 @@ test.describe("drag and drop test", () => { } ); - await page - .getByTestId("edit-flow-button-e9ac1bdc-429b-475d-ac03-d26f9a2a3210-0") - .click(); + await page.getByText("Edit Flow").first().click(); await page.waitForTimeout(2000); const genericNoda = page.getByTestId("div-generic-node"); diff --git a/src/frontend/tests/end-to-end/dropdownComponent.spec.ts b/src/frontend/tests/end-to-end/dropdownComponent.spec.ts new file mode 100644 index 000000000..a0995a3e0 --- /dev/null +++ b/src/frontend/tests/end-to-end/dropdownComponent.spec.ts @@ -0,0 +1,260 @@ +import { expect, test } from "@playwright/test"; +test.beforeEach(async ({ page }) => { + // await page.waitForTimeout(4000); + // test.setTimeout(120000); +}); +test("dropDownComponent", async ({ page }) => { + await page.goto("/"); + await page.waitForTimeout(2000); + + await page.locator('//*[@id="new-project-btn"]').click(); + await page.waitForTimeout(2000); + + await page.getByTestId("blank-flow").click(); + await page.waitForTimeout(2000); + + await page.getByPlaceholder("Search").click(); + await page.getByPlaceholder("Search").fill("amazon"); + + await page.waitForTimeout(2000); + + await page + .getByTestId("model_specsAmazon Bedrock") + .first() + .dragTo(page.locator('//*[@id="react-flow-id"]')); + await page.mouse.up(); + await page.mouse.down(); + + await page.getByTestId("dropdown-model_id-display").click(); + await page.getByTestId("ai21.j2-grande-instruct-0-option").click(); + + let value = await page.getByTestId("dropdown-model_id-display").innerText(); + if (value !== "ai21.j2-grande-instruct") { + expect(false).toBeTruthy(); + } + + await page.getByTestId("dropdown-model_id-display").click(); + await page.getByTestId("ai21.j2-jumbo-instruct-1-option").click(); + + value = await page.getByTestId("dropdown-model_id-display").innerText(); + if (value !== "ai21.j2-jumbo-instruct") { + expect(false).toBeTruthy(); + } + + await page.getByTestId("more-options-modal").click(); + await page.getByTestId("edit-button-modal").click(); + + value = await page.getByTestId("dropdown-edit-model_id-display").innerText(); + if (value !== "ai21.j2-jumbo-instruct") { + expect(false).toBeTruthy(); + } + + await page.locator('//*[@id="showcache"]').click(); + expect(await page.locator('//*[@id="showcache"]').isChecked()).toBeFalsy(); + + await page.locator('//*[@id="showcache"]').click(); + expect(await page.locator('//*[@id="showcache"]').isChecked()).toBeTruthy(); + + await page.locator('//*[@id="showcredentials_profile_name"]').click(); + expect( + await page.locator('//*[@id="showcredentials_profile_name"]').isChecked() + ).toBeFalsy(); + + await page.locator('//*[@id="showcredentials_profile_name"]').click(); + expect( + await page.locator('//*[@id="showcredentials_profile_name"]').isChecked() + ).toBeTruthy(); + + await page.locator('//*[@id="showendpoint_url"]').click(); + expect( + await page.locator('//*[@id="showendpoint_url"]').isChecked() + ).toBeFalsy(); + + await page.locator('//*[@id="showendpoint_url"]').click(); + expect( + await page.locator('//*[@id="showendpoint_url"]').isChecked() + ).toBeTruthy(); + + await page.locator('//*[@id="showmodel_kwargs"]').click(); + expect( + await page.locator('//*[@id="showmodel_kwargs"]').isChecked() + ).toBeFalsy(); + + await page.locator('//*[@id="showmodel_kwargs"]').click(); + expect( + await page.locator('//*[@id="showmodel_kwargs"]').isChecked() + ).toBeTruthy(); + + await page.locator('//*[@id="showregion_name"]').click(); + expect( + await page.locator('//*[@id="showregion_name"]').isChecked() + ).toBeFalsy(); + + await page.locator('//*[@id="showregion_name"]').click(); + expect( + await page.locator('//*[@id="showregion_name"]').isChecked() + ).toBeTruthy(); + + await page.locator('//*[@id="showstreaming"]').click(); + expect( + await page.locator('//*[@id="showstreaming"]').isChecked() + ).toBeFalsy(); + + await page.locator('//*[@id="showstreaming"]').click(); + expect( + await page.locator('//*[@id="showstreaming"]').isChecked() + ).toBeTruthy(); + + // showmodel_id + await page.locator('//*[@id="showmodel_id"]').click(); + expect(await page.locator('//*[@id="showmodel_id"]').isChecked()).toBeFalsy(); + + // showmodel_id + await page.locator('//*[@id="showmodel_id"]').click(); + expect( + await page.locator('//*[@id="showmodel_id"]').isChecked() + ).toBeTruthy(); + + await page.locator('//*[@id="showcache"]').click(); + expect(await page.locator('//*[@id="showcache"]').isChecked()).toBeFalsy(); + + await page.locator('//*[@id="showcache"]').click(); + expect(await page.locator('//*[@id="showcache"]').isChecked()).toBeTruthy(); + + await page.locator('//*[@id="showcredentials_profile_name"]').click(); + expect( + await page.locator('//*[@id="showcredentials_profile_name"]').isChecked() + ).toBeFalsy(); + + await page.locator('//*[@id="showcredentials_profile_name"]').click(); + expect( + await page.locator('//*[@id="showcredentials_profile_name"]').isChecked() + ).toBeTruthy(); + + await page.locator('//*[@id="showendpoint_url"]').click(); + expect( + await page.locator('//*[@id="showendpoint_url"]').isChecked() + ).toBeFalsy(); + + await page.locator('//*[@id="showendpoint_url"]').click(); + expect( + await page.locator('//*[@id="showendpoint_url"]').isChecked() + ).toBeTruthy(); + + await page.locator('//*[@id="showmodel_kwargs"]').click(); + expect( + await page.locator('//*[@id="showmodel_kwargs"]').isChecked() + ).toBeFalsy(); + + await page.locator('//*[@id="showmodel_kwargs"]').click(); + expect( + await page.locator('//*[@id="showmodel_kwargs"]').isChecked() + ).toBeTruthy(); + + await page.locator('//*[@id="showregion_name"]').click(); + expect( + await page.locator('//*[@id="showregion_name"]').isChecked() + ).toBeFalsy(); + + await page.locator('//*[@id="showregion_name"]').click(); + expect( + await page.locator('//*[@id="showregion_name"]').isChecked() + ).toBeTruthy(); + + await page.locator('//*[@id="showstreaming"]').click(); + expect( + await page.locator('//*[@id="showstreaming"]').isChecked() + ).toBeFalsy(); + + await page.locator('//*[@id="showstreaming"]').click(); + expect( + await page.locator('//*[@id="showstreaming"]').isChecked() + ).toBeTruthy(); + + // showmodel_id + await page.locator('//*[@id="showmodel_id"]').click(); + expect(await page.locator('//*[@id="showmodel_id"]').isChecked()).toBeFalsy(); + + // showmodel_id + await page.locator('//*[@id="showmodel_id"]').click(); + expect( + await page.locator('//*[@id="showmodel_id"]').isChecked() + ).toBeTruthy(); + + await page.getByTestId("dropdown-edit-model_id-display").click(); + await page.getByTestId("ai21.j2-ultra-v1-5-option").click(); + + value = await page.getByTestId("dropdown-edit-model_id-display").innerText(); + if (value !== "ai21.j2-ultra-v1") { + expect(false).toBeTruthy(); + } + + await page.locator('//*[@id="saveChangesBtn"]').click(); + + value = await page.getByTestId("dropdown-model_id-display").innerText(); + if (value !== "ai21.j2-ultra-v1") { + expect(false).toBeTruthy(); + } + await page.getByTestId("code-button-modal").click(); + await page + .locator("#CodeEditor div") + .filter({ hasText: "from typing import" }) + .nth(1) + .click(); + await page.locator("textarea").press("Control+a"); + const emptyOptionsCode = `from typing import Optional +from langchain.llms.base import BaseLLM +from langchain_community.llms.bedrock import Bedrock + +from langflow.interface.custom.custom_component import CustomComponent + + +class AmazonBedrockComponent(CustomComponent): + display_name: str = "Amazon Bedrock" + description: str = "LLM model from Amazon Bedrock." + icon = "Amazon" + + def build_config(self): + return { + "model_id": { + "display_name": "Model Id", + "options": [], + }, + "credentials_profile_name": {"display_name": "Credentials Profile Name"}, + "streaming": {"display_name": "Streaming", "field_type": "bool"}, + "endpoint_url": {"display_name": "Endpoint URL"}, + "region_name": {"display_name": "Region Name"}, + "model_kwargs": {"display_name": "Model Kwargs"}, + "cache": {"display_name": "Cache"}, + "code": {"advanced": True}, + } + + def build( + self, + model_id: str = "anthropic.claude-instant-v1", + credentials_profile_name: Optional[str] = None, + region_name: Optional[str] = None, + model_kwargs: Optional[dict] = None, + endpoint_url: Optional[str] = None, + streaming: bool = False, + cache: Optional[bool] = None, + ) -> BaseLLM: + try: + output = Bedrock( + credentials_profile_name=credentials_profile_name, + model_id=model_id, + region_name=region_name, + model_kwargs=model_kwargs, + endpoint_url=endpoint_url, + streaming=streaming, + cache=cache, + ) # type: ignore + except Exception as e: + raise ValueError("Could not connect to AmazonBedrock API.") from e + return output + + `; + await page.locator("textarea").fill(emptyOptionsCode); + await page.getByRole("button", { name: "Check & Save" }).click(); + await page.getByText("No parameters are available for display.").isVisible(); +}); diff --git a/src/frontend/tests/onlyFront/floatComponent.spec.ts b/src/frontend/tests/end-to-end/floatComponent.spec.ts similarity index 91% rename from src/frontend/tests/onlyFront/floatComponent.spec.ts rename to src/frontend/tests/end-to-end/floatComponent.spec.ts index e3b55cbad..2e0a2a8ad 100644 --- a/src/frontend/tests/onlyFront/floatComponent.spec.ts +++ b/src/frontend/tests/end-to-end/floatComponent.spec.ts @@ -1,29 +1,25 @@ import { expect, test } from "@playwright/test"; - +test.beforeEach(async ({ page }) => { + // await page.waitForTimeout(5000); + // test.setTimeout(120000); +}); test("FloatComponent", async ({ page }) => { - await page.routeFromHAR("harFiles/backend_12112023.har", { - url: "**/api/v1/**", - update: false, - }); - await page.route("**/api/v1/flows/", async (route) => { - const json = { - id: "e9ac1bdc-429b-475d-ac03-d26f9a2a3210", - }; - await route.fulfill({ json, status: 201 }); - }); - await page.goto("http://localhost:3000/"); + await page.goto("/"); await page.waitForTimeout(2000); await page.locator('//*[@id="new-project-btn"]').click(); await page.waitForTimeout(2000); + await page.getByTestId("blank-flow").click(); + await page.waitForTimeout(2000); + await page.getByPlaceholder("Search").click(); await page.getByPlaceholder("Search").fill("llamacpp"); await page.waitForTimeout(2000); await page - .locator('//*[@id="llmsLlamaCpp"]') + .locator('//*[@id="model_specsLlamaCpp"]') .dragTo(page.locator('//*[@id="react-flow-id"]')); await page.mouse.up(); await page.mouse.down(); @@ -102,18 +98,6 @@ test("FloatComponent", async ({ page }) => { await page.locator('//*[@id="showmax_tokens"]').isChecked() ).toBeTruthy(); - // showmetadata - await page.locator('//*[@id="showmetadata"]').click(); - expect( - await page.locator('//*[@id="showmetadata"]').isChecked() - ).toBeTruthy(); - - // showmodel_kwargs - await page.locator('//*[@id="showmodel_kwargs"]').click(); - expect( - await page.locator('//*[@id="showmodel_kwargs"]').isChecked() - ).toBeTruthy(); - // showmodel_path await page.locator('//*[@id="showmodel_path"]').click(); expect( @@ -271,16 +255,6 @@ test("FloatComponent", async ({ page }) => { await page.locator('//*[@id="showmax_tokens"]').isChecked() ).toBeFalsy(); - // showmetadata - await page.locator('//*[@id="showmetadata"]').click(); - expect(await page.locator('//*[@id="showmetadata"]').isChecked()).toBeFalsy(); - - // showmodel_kwargs - await page.locator('//*[@id="showmodel_kwargs"]').click(); - expect( - await page.locator('//*[@id="showmodel_kwargs"]').isChecked() - ).toBeFalsy(); - // showmodel_path await page.locator('//*[@id="showmodel_path"]').click(); expect( diff --git a/src/frontend/tests/onlyFront/flowPage.spec.ts b/src/frontend/tests/end-to-end/flowPage.spec.ts similarity index 62% rename from src/frontend/tests/onlyFront/flowPage.spec.ts rename to src/frontend/tests/end-to-end/flowPage.spec.ts index 5c5d1a412..a5e628a53 100644 --- a/src/frontend/tests/onlyFront/flowPage.spec.ts +++ b/src/frontend/tests/end-to-end/flowPage.spec.ts @@ -1,35 +1,32 @@ import { Page, test } from "@playwright/test"; +test.beforeEach(async ({ page }) => { + // await page.waitForTimeout(6000); + // test.setTimeout(120000); +}); test.describe("Flow Page tests", () => { async function goToFlowPage(page: Page) { - await page.goto("http://localhost:3000/"); + await page.goto("/"); await page.getByRole("button", { name: "New Project" }).click(); } test("save", async ({ page }) => { - await page.routeFromHAR("harFiles/backend_12112023.har", { - url: "**/api/v1/**", - update: false, - }); - await page.route("**/api/v1/flows/", async (route) => { - const json = { - id: "e9ac1bdc-429b-475d-ac03-d26f9a2a3210", - }; - await route.fulfill({ json, status: 201 }); - }); - await page.goto("http://localhost:3000/"); + await page.goto("/"); await page.waitForTimeout(2000); await page.locator('//*[@id="new-project-btn"]').click(); await page.waitForTimeout(2000); + await page.getByTestId("blank-flow").click(); + await page.waitForTimeout(2000); + await page.getByPlaceholder("Search").click(); await page.getByPlaceholder("Search").fill("custom"); await page.waitForTimeout(2000); await page - .locator('//*[@id="custom_componentsCustomComponent"]') + .locator('//*[@id="helpersCustom Component"]') .dragTo(page.locator('//*[@id="react-flow-id"]')); await page.mouse.up(); await page.mouse.down(); diff --git a/src/frontend/tests/end-to-end/group.spec.ts b/src/frontend/tests/end-to-end/group.spec.ts new file mode 100644 index 000000000..633997915 --- /dev/null +++ b/src/frontend/tests/end-to-end/group.spec.ts @@ -0,0 +1,29 @@ +import { expect, test } from "@playwright/test"; + +test.describe("group node test", () => { + /// + test("group and ungroup updating values", async ({ page }) => { + await page.goto("/"); + await page.locator('//*[@id="new-project-btn"]').click(); + + await page.getByRole("heading", { name: "Data Ingestion" }).click(); + await page.waitForTimeout(2000); + await page.getByLabel("fit view").click(); + await page.keyboard.down("Control"); + await page + .getByTestId("title-OpenAIEmbeddings") + .click({ modifiers: ["Control"] }); + await page.getByTestId("title-URL").click({ modifiers: ["Control"] }); + await page + .getByTestId("title-Recursive Character Text Splitter") + .click({ modifiers: ["Control"] }); + await page.keyboard.up("Control"); + await page.getByRole("button", { name: "Group" }).click(); + await page.getByTestId(/input-collection_name_Chroma-.*/).click(); + await page.getByTestId(/input-collection_name_Chroma-.*/).fill("test"); + await page.getByTestId("title-Group").click(); + await page.keyboard.press("Control+g"); + const value = await page.getByTestId("input-collection_name").inputValue(); + expect(value).toBe("test"); + }); +}); diff --git a/src/frontend/tests/onlyFront/inputComponent.spec.ts b/src/frontend/tests/end-to-end/inputComponent.spec.ts similarity index 62% rename from src/frontend/tests/onlyFront/inputComponent.spec.ts rename to src/frontend/tests/end-to-end/inputComponent.spec.ts index 47f589059..c702da4f5 100644 --- a/src/frontend/tests/onlyFront/inputComponent.spec.ts +++ b/src/frontend/tests/end-to-end/inputComponent.spec.ts @@ -1,22 +1,18 @@ import { expect, test } from "@playwright/test"; - +test.beforeEach(async ({ page }) => { + // await page.waitForTimeout(8000); + // test.setTimeout(120000); +}); test("InputComponent", async ({ page }) => { - await page.routeFromHAR("harFiles/langflow.har", { - url: "**/api/v1/**", - update: false, - }); - await page.route("**/api/v1/flows/", async (route) => { - const json = { - id: "e9ac1bdc-429b-475d-ac03-d26f9a2a3210", - }; - await route.fulfill({ json, status: 201 }); - }); - await page.goto("http://localhost:3000/"); + await page.goto("/"); await page.waitForTimeout(2000); await page.locator('//*[@id="new-project-btn"]').click(); await page.waitForTimeout(2000); + await page.getByTestId("blank-flow").click(); + await page.waitForTimeout(2000); + await page.getByPlaceholder("Search").click(); await page.getByPlaceholder("Search").fill("Chroma"); @@ -28,12 +24,12 @@ test("InputComponent", async ({ page }) => { await page.mouse.up(); await page.mouse.down(); - await page.locator("#input-8").click(); + await page.getByTestId("input-collection_name").click(); await page - .locator("#input-8") + .getByTestId("input-collection_name") .fill("collection_name_test_123123123!@#$&*(&%$@"); - let value = await page.locator("#input-8").inputValue(); + let value = await page.getByTestId("input-collection_name").inputValue(); if (value != "collection_name_test_123123123!@#$&*(&%$@") { expect(false).toBeTruthy(); @@ -61,9 +57,9 @@ test("InputComponent", async ({ page }) => { await page.locator('//*[@id="showchroma_server_host"]').isChecked() ).toBeTruthy(); - await page.locator('//*[@id="showchroma_server_http_port"]').click(); + await page.locator('//*[@id="showchroma_server_port"]').click(); expect( - await page.locator('//*[@id="showchroma_server_http_port"]').isChecked() + await page.locator('//*[@id="showchroma_server_port"]').isChecked() ).toBeTruthy(); await page.locator('//*[@id="showchroma_server_ssl_enabled"]').click(); @@ -76,19 +72,11 @@ test("InputComponent", async ({ page }) => { await page.locator('//*[@id="showcollection_name"]').isChecked() ).toBeFalsy(); - await page.locator('//*[@id="showpersist"]').click(); - expect(await page.locator('//*[@id="showpersist"]').isChecked()).toBeFalsy(); - - await page.locator('//*[@id="showpersist_directory"]').click(); + await page.locator('//*[@id="showindex_directory"]').click(); expect( - await page.locator('//*[@id="showpersist_directory"]').isChecked() + await page.locator('//*[@id="showindex_directory"]').isChecked() ).toBeFalsy(); - await page.locator('//*[@id="showsearch_kwargs"]').click(); - expect( - await page.locator('//*[@id="showsearch_kwargs"]').isChecked() - ).toBeTruthy(); - await page.locator('//*[@id="showchroma_server_cors_allow_origins"]').click(); expect( await page @@ -106,9 +94,9 @@ test("InputComponent", async ({ page }) => { await page.locator('//*[@id="showchroma_server_host"]').isChecked() ).toBeFalsy(); - await page.locator('//*[@id="showchroma_server_http_port"]').click(); + await page.locator('//*[@id="showchroma_server_port"]').click(); expect( - await page.locator('//*[@id="showchroma_server_http_port"]').isChecked() + await page.locator('//*[@id="showchroma_server_port"]').isChecked() ).toBeFalsy(); await page.locator('//*[@id="showchroma_server_ssl_enabled"]').click(); @@ -116,33 +104,27 @@ test("InputComponent", async ({ page }) => { await page.locator('//*[@id="showchroma_server_ssl_enabled"]').isChecked() ).toBeFalsy(); - await page.locator('//*[@id="showpersist"]').click(); - expect(await page.locator('//*[@id="showpersist"]').isChecked()).toBeTruthy(); - - await page.locator('//*[@id="showpersist_directory"]').click(); + await page.locator('//*[@id="showindex_directory"]').click(); expect( - await page.locator('//*[@id="showpersist_directory"]').isChecked() + await page.locator('//*[@id="showindex_directory"]').isChecked() ).toBeTruthy(); - await page.locator('//*[@id="showsearch_kwargs"]').click(); - expect( - await page.locator('//*[@id="showsearch_kwargs"]').isChecked() - ).toBeFalsy(); - - let valueEditNode = await page.locator('//*[@id="input-5"]').inputValue(); + let valueEditNode = await page + .getByTestId("input-collection_name-edit") + .inputValue(); if (valueEditNode != "collection_name_test_123123123!@#$&*(&%$@") { expect(false).toBeTruthy(); } - await page.locator('//*[@id="input-5"]').click(); + await page.getByTestId("input-collection_name-edit").click(); await page - .locator('//*[@id="input-5"]') - .fill("NEW_collection_name_test_123123123!@#$&*(&%$@"); + .getByTestId("input-collection_name-edit") + .fill("NEW_collection_name_test_123123123!@#$&*(&%$@ÇÇÇÀõe"); await page.locator('//*[@id="saveChangesBtn"]').click(); - const plusButtonLocator = page.locator("#input-8"); + const plusButtonLocator = page.getByTestId("input-collection_name"); const elementCount = await plusButtonLocator.count(); if (elementCount === 0) { expect(true).toBeTruthy(); @@ -159,9 +141,9 @@ test("InputComponent", async ({ page }) => { await page.locator('//*[@id="saveChangesBtn"]').click(); - let value = await page.locator("#input-8").inputValue(); + let value = await page.getByTestId("input-collection_name").inputValue(); - if (value != "NEW_collection_name_test_123123123!@#$&*(&%$@") { + if (value != "NEW_collection_name_test_123123123!@#$&*(&%$@ÇÇÇÀõe") { expect(false).toBeTruthy(); } } diff --git a/src/frontend/tests/end-to-end/intComponent.spec.ts b/src/frontend/tests/end-to-end/intComponent.spec.ts new file mode 100644 index 000000000..a802e898a --- /dev/null +++ b/src/frontend/tests/end-to-end/intComponent.spec.ts @@ -0,0 +1,203 @@ +import { expect, test } from "@playwright/test"; +test.beforeEach(async ({ page }) => { + // await page.waitForTimeout(9000); + // test.setTimeout(120000); +}); +test("IntComponent", async ({ page }) => { + await page.goto("/"); + await page.waitForTimeout(2000); + + await page.locator('//*[@id="new-project-btn"]').click(); + await page.waitForTimeout(2000); + + await page.getByTestId("blank-flow").click(); + await page.waitForTimeout(2000); + + await page.getByPlaceholder("Search").click(); + await page.getByPlaceholder("Search").fill("openai"); + + await page.waitForTimeout(2000); + + await page + .getByTestId("modelsOpenAI") + .first() + .dragTo(page.locator('//*[@id="react-flow-id"]')); + await page.mouse.up(); + await page.mouse.down(); + + await page.getByTestId("int-input-max_tokens").click(); + await page + .getByTestId("int-input-max_tokens") + .fill("123456789123456789123456789"); + + let value = await page.getByTestId("int-input-max_tokens").inputValue(); + + if (value != "123456789123456789123456789") { + expect(false).toBeTruthy(); + } + + await page.getByTestId("int-input-max_tokens").click(); + await page.getByTestId("int-input-max_tokens").fill("0"); + + value = await page.getByTestId("int-input-max_tokens").inputValue(); + + if (value != "0") { + expect(false).toBeTruthy(); + } + + await page.getByTestId("more-options-modal").click(); + await page.getByTestId("edit-button-modal").click(); + + value = await page.getByTestId("edit-int-input-max_tokens").inputValue(); + + if (value != "0") { + expect(false).toBeTruthy(); + } + + await page.getByTestId("edit-int-input-max_tokens").click(); + await page + .getByTestId("edit-int-input-max_tokens") + .fill("123456789123456789123456789"); + + await page.locator('//*[@id="showinput_value"]').click(); + expect( + await page.locator('//*[@id="showinput_value"]').isChecked() + ).toBeFalsy(); + + await page.locator('//*[@id="showmodel_kwargs"]').click(); + expect( + await page.locator('//*[@id="showmodel_kwargs"]').isChecked() + ).toBeTruthy(); + + await page.locator('//*[@id="showmodel_name"]').click(); + expect( + await page.locator('//*[@id="showmodel_name"]').isChecked() + ).toBeFalsy(); + + await page.locator('//*[@id="showopenai_api_base"]').click(); + expect( + await page.locator('//*[@id="showopenai_api_base"]').isChecked() + ).toBeFalsy(); + + await page.locator('//*[@id="showopenai_api_key"]').click(); + expect( + await page.locator('//*[@id="showopenai_api_key"]').isChecked() + ).toBeFalsy(); + + await page.locator('//*[@id="showstream"]').click(); + expect(await page.locator('//*[@id="showstream"]').isChecked()).toBeFalsy(); + + await page.locator('//*[@id="showtemperature"]').click(); + expect( + await page.locator('//*[@id="showtemperature"]').isChecked() + ).toBeFalsy(); + + await page.locator('//*[@id="showinput_value"]').click(); + expect( + await page.locator('//*[@id="showinput_value"]').isChecked() + ).toBeTruthy(); + + await page.locator('//*[@id="showmodel_kwargs"]').click(); + expect( + await page.locator('//*[@id="showmodel_kwargs"]').isChecked() + ).toBeFalsy(); + + await page.locator('//*[@id="showmodel_name"]').click(); + expect( + await page.locator('//*[@id="showmodel_name"]').isChecked() + ).toBeTruthy(); + + await page.locator('//*[@id="showopenai_api_base"]').click(); + expect( + await page.locator('//*[@id="showopenai_api_base"]').isChecked() + ).toBeTruthy(); + + await page.locator('//*[@id="showopenai_api_key"]').click(); + expect( + await page.locator('//*[@id="showopenai_api_key"]').isChecked() + ).toBeTruthy(); + + await page.locator('//*[@id="showstream"]').click(); + expect(await page.locator('//*[@id="showstream"]').isChecked()).toBeTruthy(); + + await page.locator('//*[@id="showtemperature"]').click(); + expect( + await page.locator('//*[@id="showtemperature"]').isChecked() + ).toBeTruthy(); + + await page.locator('//*[@id="showinput_value"]').click(); + expect( + await page.locator('//*[@id="showinput_value"]').isChecked() + ).toBeFalsy(); + + await page.locator('//*[@id="showmodel_kwargs"]').click(); + expect( + await page.locator('//*[@id="showmodel_kwargs"]').isChecked() + ).toBeTruthy(); + + await page.locator('//*[@id="showmodel_name"]').click(); + expect( + await page.locator('//*[@id="showmodel_name"]').isChecked() + ).toBeFalsy(); + + await page.locator('//*[@id="showopenai_api_base"]').click(); + expect( + await page.locator('//*[@id="showopenai_api_base"]').isChecked() + ).toBeFalsy(); + + await page.locator('//*[@id="showopenai_api_key"]').click(); + expect( + await page.locator('//*[@id="showopenai_api_key"]').isChecked() + ).toBeFalsy(); + + await page.locator('//*[@id="showstream"]').click(); + expect(await page.locator('//*[@id="showstream"]').isChecked()).toBeFalsy(); + + await page.locator('//*[@id="showtemperature"]').click(); + expect( + await page.locator('//*[@id="showtemperature"]').isChecked() + ).toBeFalsy(); + + await page.locator('//*[@id="saveChangesBtn"]').click(); + + const plusButtonLocator = page.getByTestId("int-input-max_tokens"); + const elementCount = await plusButtonLocator.count(); + if (elementCount === 0) { + expect(true).toBeTruthy(); + + await page.getByTestId("more-options-modal").click(); + await page.getByTestId("edit-button-modal").click(); + + await page.locator('//*[@id="showtimeout"]').click(); + expect( + await page.locator('//*[@id="showtimeout"]').isChecked() + ).toBeTruthy(); + + const valueEditNode = await page + .getByTestId("edit-int-input-max_tokens") + .inputValue(); + + if (valueEditNode != "123456789123456789123456789") { + expect(false).toBeTruthy(); + } + + await page.locator('//*[@id="saveChangesBtn"]').click(); + await page.getByTestId("int-input-max_tokens").click(); + await page.getByTestId("int-input-max_tokens").fill("3"); + + let value = await page.getByTestId("int-input-max_tokens").inputValue(); + + if (value != "3") { + expect(false).toBeTruthy(); + } + + await page.getByTestId("int-input-max_tokens").click(); + await page.getByTestId("int-input-max_tokens").fill("-3"); + + value = await page.getByTestId("int-input-max_tokens").inputValue(); + + if (value != "0") { + expect(false).toBeTruthy(); + } + } +}); diff --git a/src/frontend/tests/onlyFront/keyPairListComponent.spec.ts b/src/frontend/tests/end-to-end/keyPairListComponent.spec.ts similarity index 55% rename from src/frontend/tests/onlyFront/keyPairListComponent.spec.ts rename to src/frontend/tests/end-to-end/keyPairListComponent.spec.ts index 96530698e..1ef6eeb2f 100644 --- a/src/frontend/tests/onlyFront/keyPairListComponent.spec.ts +++ b/src/frontend/tests/end-to-end/keyPairListComponent.spec.ts @@ -1,22 +1,18 @@ import { expect, test } from "@playwright/test"; - +test.beforeEach(async ({ page }) => { + // await page.waitForTimeout(20000); + // test.setTimeout(120000); +}); test("KeypairListComponent", async ({ page }) => { - await page.routeFromHAR("harFiles/backend_12112023.har", { - url: "**/api/v1/**", - update: false, - }); - await page.route("**/api/v1/flows/", async (route) => { - const json = { - id: "e9ac1bdc-429b-475d-ac03-d26f9a2a3210", - }; - await route.fulfill({ json, status: 201 }); - }); - await page.goto("http://localhost:3000/"); + await page.goto("/"); await page.waitForTimeout(2000); await page.locator('//*[@id="new-project-btn"]').click(); await page.waitForTimeout(2000); + await page.getByTestId("blank-flow").click(); + await page.waitForTimeout(2000); + await page.getByPlaceholder("Search").click(); await page.getByPlaceholder("Search").fill("csv"); @@ -51,32 +47,14 @@ test("KeypairListComponent", async ({ page }) => { } await page.getByTestId("div-generic-node").click(); - await page.locator('//*[@id="keypair1"]').click(); - await page.locator('//*[@id="keypair1"]').fill("testtesttesttest1"); + await page.locator('//*[@id="keypair0"]').click(); + await page.locator('//*[@id="keypair0"]').fill("testtesttesttest1"); await page.getByTestId("div-generic-node").click(); - await page.locator('//*[@id="keypair101"]').click(); - await page.locator('//*[@id="keypair101"]').fill("testtesttesttesttesttest1"); - await page.getByTestId("div-generic-node").click(); - - await page.locator('//*[@id="plusbtn1"]').click(); - await page.getByTestId("div-generic-node").click(); - - await page.locator('//*[@id="keypair2"]').click(); - await page.locator('//*[@id="keypair2"]').fill("testtesttesttest2"); - await page.getByTestId("div-generic-node").click(); - - await page.locator('//*[@id="keypair102"]').click(); - await page.locator('//*[@id="keypair102"]').fill("testtesttesttesttesttest2"); - await page.getByTestId("div-generic-node").click(); - - await page.locator('//*[@id="minusbtn1"]').click(); - await page.getByTestId("div-generic-node").click(); - - const keyPairVerification = page.locator('//*[@id="keypair102"]'); + const keyPairVerification = page.locator('//*[@id="keypair100"]'); const elementKeyCount = await keyPairVerification.count(); - if (elementKeyCount === 0) { + if (elementKeyCount === 1) { expect(true).toBeTruthy(); } else { expect(false).toBeTruthy(); @@ -113,53 +91,22 @@ test("KeypairListComponent", async ({ page }) => { await page.locator('//*[@id="editNodekeypair0"]').click(); await page.locator('//*[@id="editNodekeypair0"]').fill("testtesttesttest"); - await page.locator('//*[@id="editNodekeypair100"]').click(); - await page - .locator('//*[@id="editNodekeypair100"]') - .fill("test test test test test test"); - const plusButtonLocator = page.locator('//*[@id="plusbtn0"]'); - const elementCount = await plusButtonLocator.count(); - if (elementCount > 0) { - await plusButtonLocator.click(); - } - - await page.locator('//*[@id="editNodekeypair1"]').click(); - await page.locator('//*[@id="editNodekeypair1"]').fill("testtesttesttest1"); - await page.locator('//*[@id="editNodekeypair101"]').first().click(); - await page - .locator('//*[@id="editNodekeypair101"]') - .fill("testtesttesttesttesttest1"); - await page.locator('//*[@id="editNodeplusbtn1"]').click(); - - await page.locator('//*[@id="editNodekeypair2"]').click(); - await page.locator('//*[@id="editNodekeypair2"]').fill("testtesttesttest2"); - await page.locator('//*[@id="editNodekeypair102"]').click(); - await page - .locator('//*[@id="editNodekeypair102"]') - .fill("testtesttesttesttesttest2"); - - await page.locator('//*[@id="editNodeminusbtn1"]').click(); - - const keyPairVerification = page.locator('//*[@id="editNodekeypair102"]'); + const keyPairVerification = page.locator('//*[@id="editNodekeypair0"]'); const elementKeyCount = await keyPairVerification.count(); - if (elementKeyCount === 0) { + if (elementKeyCount === 1) { await page.locator('//*[@id="saveChangesBtn"]').click(); await page.getByTestId("div-generic-node").click(); const key1 = await page.locator('//*[@id="keypair0"]').inputValue(); const value1 = await page.locator('//*[@id="keypair100"]').inputValue(); - const key2 = await page.locator('//*[@id="keypair1"]').inputValue(); - const value2 = await page.locator('//*[@id="keypair101"]').inputValue(); await page.getByTestId("div-generic-node").click(); if ( key1 === "testtesttesttest" && - value1 === "test test test test test test" && - key2 === "testtesttesttest2" && - value2 === "testtesttesttesttesttest2" + value1 === "test test test test test test" ) { expect(true).toBeTruthy(); } else { diff --git a/src/frontend/tests/end-to-end/langflowShortcuts.spec.ts b/src/frontend/tests/end-to-end/langflowShortcuts.spec.ts new file mode 100644 index 000000000..157fc4459 --- /dev/null +++ b/src/frontend/tests/end-to-end/langflowShortcuts.spec.ts @@ -0,0 +1,90 @@ +import { expect, test } from "@playwright/test"; +import uaParser from "ua-parser-js"; +test.beforeEach(async ({ page }) => { + // await page.waitForTimeout(11000); + // test.setTimeout(120000); +}); +test("LangflowShortcuts", async ({ page }) => { + const getUA = await page.evaluate(() => navigator.userAgent); + const userAgentInfo = uaParser(getUA); + let control = "Control"; + + if (userAgentInfo.os.name.includes("Mac")) { + control = "Meta"; + } + + await page.goto("/"); + await page.waitForTimeout(1000); + + await page.locator('//*[@id="new-project-btn"]').click(); + await page.waitForTimeout(1000); + + await page.getByTestId("blank-flow").click(); + await page.waitForTimeout(1000); + + await page.getByPlaceholder("Search").click(); + await page.getByPlaceholder("Search").fill("llamacpp"); + + await page.waitForTimeout(1000); + + await page + .locator('//*[@id="model_specsLlamaCpp"]') + .dragTo(page.locator('//*[@id="react-flow-id"]')); + await page.mouse.up(); + await page.mouse.down(); + + await page.getByTestId("title-LlamaCpp").click(); + await page.keyboard.press(`${control}+e`); + await page.locator('//*[@id="saveChangesBtn"]').click(); + + await page.getByTestId("title-LlamaCpp").click(); + await page.keyboard.press(`${control}+d`); + + let numberOfNodes = await page.getByTestId("title-LlamaCpp").count(); + if (numberOfNodes != 2) { + expect(false).toBeTruthy(); + } + + await page + .locator( + '//*[@id="react-flow-id"]/div[1]/div[1]/div[1]/div/div[2]/div[2]/div/div[1]/div/div[1]/div/div/div[1]' + ) + .click(); + await page.keyboard.press("Backspace"); + + numberOfNodes = await page.getByTestId("title-LlamaCpp").count(); + if (numberOfNodes != 1) { + expect(false).toBeTruthy(); + } + + await page.getByTestId("title-LlamaCpp").click(); + await page.keyboard.press(`${control}+c`); + + await page.getByTestId("title-LlamaCpp").click(); + await page.keyboard.press(`${control}+v`); + + numberOfNodes = await page.getByTestId("title-LlamaCpp").count(); + if (numberOfNodes != 2) { + expect(false).toBeTruthy(); + } + + await page + .locator( + '//*[@id="react-flow-id"]/div[1]/div[1]/div[1]/div/div[2]/div[2]/div/div[1]/div/div[1]/div/div/div[1]' + ) + .click(); + await page.keyboard.press("Backspace"); + + await page.getByTestId("title-LlamaCpp").click(); + await page.keyboard.press(`${control}+x`); + + numberOfNodes = await page.getByTestId("title-LlamaCpp").count(); + if (numberOfNodes != 0) { + expect(false).toBeTruthy(); + } + await page.keyboard.press(`${control}+v`); + numberOfNodes = await page.getByTestId("title-LlamaCpp").count(); + if (numberOfNodes != 1) { + expect(false).toBeTruthy(); + } +}); diff --git a/src/frontend/tests/onlyFront/nestedComponent.spec.ts b/src/frontend/tests/end-to-end/nestedComponent.spec.ts similarity index 80% rename from src/frontend/tests/onlyFront/nestedComponent.spec.ts rename to src/frontend/tests/end-to-end/nestedComponent.spec.ts index 0a90533f4..053c72db2 100644 --- a/src/frontend/tests/onlyFront/nestedComponent.spec.ts +++ b/src/frontend/tests/end-to-end/nestedComponent.spec.ts @@ -1,22 +1,18 @@ import { expect, test } from "@playwright/test"; - +test.beforeEach(async ({ page }) => { + // await page.waitForTimeout(12000); + // test.setTimeout(120000); +}); test("NestedComponent", async ({ page }) => { - await page.routeFromHAR("harFiles/backend_12112023.har", { - url: "**/api/v1/**", - update: false, - }); - await page.route("**/api/v1/flows/", async (route) => { - const json = { - id: "e9ac1bdc-429b-475d-ac03-d26f9a2a3210", - }; - await route.fulfill({ json, status: 201 }); - }); - await page.goto("http://localhost:3000/"); + await page.goto("/"); await page.waitForTimeout(2000); await page.locator('//*[@id="new-project-btn"]').click(); await page.waitForTimeout(2000); + await page.getByTestId("blank-flow").click(); + await page.waitForTimeout(2000); + await page.getByPlaceholder("Search").click(); await page.getByPlaceholder("Search").fill("pinecone"); @@ -31,75 +27,17 @@ test("NestedComponent", async ({ page }) => { await page.getByTestId("more-options-modal").click(); await page.getByTestId("edit-button-modal").click(); - // showindex_name - await page.locator('//*[@id="showindex_name"]').click(); + //showpool_threads + await page.locator('//*[@id="showpool_threads"]').click(); expect( - await page.locator('//*[@id="showindex_name"]').isChecked() - ).toBeFalsy(); - - // shownamespace - await page.locator('//*[@id="shownamespace"]').click(); - - expect( - await page.locator('//*[@id="shownamespace"]').isChecked() - ).toBeFalsy(); - - // showpinecone_api_key - await page.locator('//*[@id="showpinecone_api_key"]').click(); - - expect( - await page.locator('//*[@id="showpinecone_api_key"]').isChecked() + await page.locator('//*[@id="showpool_threads"]').isChecked() ).toBeTruthy(); - // showpinecone_env - await page.locator('//*[@id="showpinecone_env"]').click(); + //showtext_key + await page.locator('//*[@id="showtext_key"]').click(); - expect( - await page.locator('//*[@id="showpinecone_env"]').isChecked() - ).toBeTruthy(); - - // showsearch_kwargs - await page.locator('//*[@id="showsearch_kwargs"]').click(); - - expect( - await page.locator('//*[@id="showsearch_kwargs"]').isChecked() - ).toBeTruthy(); - - // showindex_name - await page.locator('//*[@id="showindex_name"]').click(); - - expect( - await page.locator('//*[@id="showindex_name"]').isChecked() - ).toBeTruthy(); - - // shownamespace - await page.locator('//*[@id="shownamespace"]').click(); - - expect( - await page.locator('//*[@id="shownamespace"]').isChecked() - ).toBeTruthy(); - - // showpinecone_api_key - await page.locator('//*[@id="showpinecone_api_key"]').click(); - - expect( - await page.locator('//*[@id="showpinecone_api_key"]').isChecked() - ).toBeFalsy(); - - // showpinecone_env - await page.locator('//*[@id="showpinecone_env"]').click(); - - expect( - await page.locator('//*[@id="showpinecone_env"]').isChecked() - ).toBeFalsy(); - - // showsearch_kwargs - await page.locator('//*[@id="showsearch_kwargs"]').click(); - - expect( - await page.locator('//*[@id="showsearch_kwargs"]').isChecked() - ).toBeFalsy(); + expect(await page.locator('//*[@id="showtext_key"]').isChecked()).toBeFalsy(); // showindex_name await page.locator('//*[@id="showindex_name"]').click(); @@ -120,21 +58,14 @@ test("NestedComponent", async ({ page }) => { expect( await page.locator('//*[@id="showpinecone_api_key"]').isChecked() - ).toBeTruthy(); + ).toBeFalsy(); // showpinecone_env await page.locator('//*[@id="showpinecone_env"]').click(); expect( await page.locator('//*[@id="showpinecone_env"]').isChecked() - ).toBeTruthy(); - - // showsearch_kwargs - await page.locator('//*[@id="showsearch_kwargs"]').click(); - - expect( - await page.locator('//*[@id="showsearch_kwargs"]').isChecked() - ).toBeTruthy(); + ).toBeFalsy(); // showindex_name await page.locator('//*[@id="showindex_name"]').click(); @@ -155,21 +86,14 @@ test("NestedComponent", async ({ page }) => { expect( await page.locator('//*[@id="showpinecone_api_key"]').isChecked() - ).toBeFalsy(); + ).toBeTruthy(); // showpinecone_env await page.locator('//*[@id="showpinecone_env"]').click(); expect( await page.locator('//*[@id="showpinecone_env"]').isChecked() - ).toBeFalsy(); - - // showsearch_kwargs - await page.locator('//*[@id="showsearch_kwargs"]').click(); - - expect( - await page.locator('//*[@id="showsearch_kwargs"]').isChecked() - ).toBeFalsy(); + ).toBeTruthy(); // showindex_name await page.locator('//*[@id="showindex_name"]').click(); @@ -190,21 +114,14 @@ test("NestedComponent", async ({ page }) => { expect( await page.locator('//*[@id="showpinecone_api_key"]').isChecked() - ).toBeTruthy(); + ).toBeFalsy(); // showpinecone_env await page.locator('//*[@id="showpinecone_env"]').click(); expect( await page.locator('//*[@id="showpinecone_env"]').isChecked() - ).toBeTruthy(); - - // showsearch_kwargs - await page.locator('//*[@id="showsearch_kwargs"]').click(); - - expect( - await page.locator('//*[@id="showsearch_kwargs"]').isChecked() - ).toBeTruthy(); + ).toBeFalsy(); // showindex_name await page.locator('//*[@id="showindex_name"]').click(); @@ -223,6 +140,34 @@ test("NestedComponent", async ({ page }) => { // showpinecone_api_key await page.locator('//*[@id="showpinecone_api_key"]').click(); + expect( + await page.locator('//*[@id="showpinecone_api_key"]').isChecked() + ).toBeTruthy(); + + // showpinecone_env + await page.locator('//*[@id="showpinecone_env"]').click(); + + expect( + await page.locator('//*[@id="showpinecone_env"]').isChecked() + ).toBeTruthy(); + + // showindex_name + await page.locator('//*[@id="showindex_name"]').click(); + + expect( + await page.locator('//*[@id="showindex_name"]').isChecked() + ).toBeFalsy(); + + // shownamespace + await page.locator('//*[@id="shownamespace"]').click(); + + expect( + await page.locator('//*[@id="shownamespace"]').isChecked() + ).toBeFalsy(); + + // showpinecone_api_key + await page.locator('//*[@id="showpinecone_api_key"]').click(); + expect( await page.locator('//*[@id="showpinecone_api_key"]').isChecked() ).toBeFalsy(); @@ -234,7 +179,47 @@ test("NestedComponent", async ({ page }) => { await page.locator('//*[@id="showpinecone_env"]').isChecked() ).toBeFalsy(); + // showindex_name + await page.locator('//*[@id="showindex_name"]').click(); + + expect( + await page.locator('//*[@id="showindex_name"]').isChecked() + ).toBeTruthy(); + + // shownamespace + await page.locator('//*[@id="shownamespace"]').click(); + + expect( + await page.locator('//*[@id="shownamespace"]').isChecked() + ).toBeTruthy(); + + // showpinecone_api_key + await page.locator('//*[@id="showpinecone_api_key"]').click(); + + expect( + await page.locator('//*[@id="showpinecone_api_key"]').isChecked() + ).toBeTruthy(); + + // showpinecone_env + await page.locator('//*[@id="showpinecone_env"]').click(); + + expect( + await page.locator('//*[@id="showpinecone_env"]').isChecked() + ).toBeTruthy(); + + //showpool_threads + await page.locator('//*[@id="showpool_threads"]').click(); + + expect( + await page.locator('//*[@id="showpool_threads"]').isChecked() + ).toBeFalsy(); + + //showtext_key + await page.locator('//*[@id="showtext_key"]').click(); + + expect( + await page.locator('//*[@id="showtext_key"]').isChecked() + ).toBeTruthy(); + await page.locator('//*[@id="saveChangesBtn"]').click(); - - await page.getByTestId("div-dict-input").click(); }); diff --git a/src/frontend/tests/end-to-end/promptModalComponent.spec.ts b/src/frontend/tests/end-to-end/promptModalComponent.spec.ts index 690ec2d05..f0d118094 100644 --- a/src/frontend/tests/end-to-end/promptModalComponent.spec.ts +++ b/src/frontend/tests/end-to-end/promptModalComponent.spec.ts @@ -1,32 +1,40 @@ import { expect, test } from "@playwright/test"; - +test.beforeEach(async ({ page }) => { + // await page.waitForTimeout(13000); + // test.setTimeout(120000); +}); test("PromptTemplateComponent", async ({ page }) => { - await page.goto("http://localhost:3000/"); + await page.goto("/"); await page.waitForTimeout(2000); await page.locator('//*[@id="new-project-btn"]').click(); await page.waitForTimeout(2000); + await page.getByTestId("blank-flow").click(); + await page.waitForTimeout(2000); + await page.getByPlaceholder("Search").click(); - await page.getByPlaceholder("Search").fill("promptTemplate"); + await page.getByPlaceholder("Search").fill("prompt"); await page.waitForTimeout(2000); await page - .locator('//*[@id="promptsPromptTemplate"]') + .locator('//*[@id="inputsPrompt"]') .dragTo(page.locator('//*[@id="react-flow-id"]')); await page.mouse.up(); await page.mouse.down(); - await page.getByTestId("prompt-input-0").click(); + await page.getByTestId("prompt-input-template").click(); // await page.getByTestId("edit-prompt-sanitized").click(); // await page.getByTestId("modal-title").click(); await page - .getByTestId("modal-prompt-input-0") + .getByTestId("modal-prompt-input-template") .fill("{prompt} example {prompt1}"); - let value = await page.getByTestId("modal-prompt-input-0").inputValue(); + let value = await page + .getByTestId("modal-prompt-input-template") + .inputValue(); if (value != "{prompt} example {prompt1}") { expect(false).toBeTruthy(); @@ -45,35 +53,38 @@ test("PromptTemplateComponent", async ({ page }) => { await page.getByTestId("genericModalBtnSave").click(); await page.getByTestId("div-textarea-prompt").click(); - await page.getByTestId("text-area-modal").fill("prompt_value_!@#!@#"); + await page.getByTestId("textarea-prompt").fill("prompt_value_!@#!@#"); - value = await page.getByTestId("text-area-modal").inputValue(); + value = await page.getByTestId("textarea-prompt").inputValue(); if (value != "prompt_value_!@#!@#") { expect(false).toBeTruthy(); } - await page.getByTestId("genericModalBtnSave").click(); + await page.getByTestId("save-button-modal").click(); + + const replace = await page.getByTestId("replace-button"); + if (replace) { + await page.getByTestId("replace-button").click(); + } await page.getByTestId("div-textarea-prompt1").click(); await page - .getByTestId("text-area-modal") + .getByTestId("textarea-prompt1") .fill("prompt_name_test_123123!@#!@#"); - value = await page.getByTestId("text-area-modal").inputValue(); + value = await page.getByTestId("textarea-prompt1").inputValue(); if (value != "prompt_name_test_123123!@#!@#") { expect(false).toBeTruthy(); } - value = await page.getByTestId("text-area-modal").inputValue(); + value = await page.getByTestId("textarea-prompt1").inputValue(); if (value != "prompt_name_test_123123!@#!@#") { expect(false).toBeTruthy(); } - await page.getByTestId("genericModalBtnSave").click(); - await page.getByTestId("more-options-modal").click(); await page.getByTestId("edit-button-modal").click(); diff --git a/src/frontend/tests/end-to-end/python_api_generation.spec.ts b/src/frontend/tests/end-to-end/python_api_generation.spec.ts new file mode 100644 index 000000000..2d7a6af0d --- /dev/null +++ b/src/frontend/tests/end-to-end/python_api_generation.spec.ts @@ -0,0 +1,17 @@ +import { expect, test } from "@playwright/test"; + +test("python_api_generation", async ({ page, context }) => { + await page.goto("/"); + await page.locator('//*[@id="new-project-btn"]').click(); + await context.grantPermissions(["clipboard-read", "clipboard-write"]); + await page.getByRole("heading", { name: "Data Ingestion" }).click(); + await page.waitForTimeout(2000); + await page.getByText("API", { exact: true }).click(); + await page.getByRole("tab", { name: "Python API" }).click(); + await page.getByRole("button", { name: "Copy Code" }).click(); + const handle = await page.evaluateHandle(() => + navigator.clipboard.readText() + ); + const clipboardContent = await handle.jsonValue(); + expect(clipboardContent.length).toBeGreaterThan(0); +}); diff --git a/src/frontend/tests/onlyFront/saveComponents.spec.ts b/src/frontend/tests/end-to-end/saveComponents.spec.ts similarity index 73% rename from src/frontend/tests/onlyFront/saveComponents.spec.ts rename to src/frontend/tests/end-to-end/saveComponents.spec.ts index 10a0c9c77..772894b5a 100644 --- a/src/frontend/tests/onlyFront/saveComponents.spec.ts +++ b/src/frontend/tests/end-to-end/saveComponents.spec.ts @@ -1,6 +1,9 @@ import { Page, expect, test } from "@playwright/test"; import { readFileSync } from "fs"; - +test.beforeEach(async ({ page }) => { + // await page.waitForTimeout(14000); + // test.setTimeout(120000); +}); test.describe("save component tests", () => { async function saveComponent(page: Page, pattern: RegExp, n: number) { for (let i = 0; i < n; i++) { @@ -11,22 +14,15 @@ test.describe("save component tests", () => { /// test("save group component tests", async ({ page }) => { - //make front work withoput backend - await page.routeFromHAR("harFiles/langflow.har", { - url: "**/api/v1/**", - update: false, - }); - await page.route("**/api/v1/flows/", async (route) => { - const json = { - id: "e9ac1bdc-429b-475d-ac03-d26f9a2a3210", - }; - await route.fulfill({ json, status: 201 }); - }); - await page.goto("http:localhost:3000/"); - await page.locator("span").filter({ hasText: "My Collection" }).isVisible(); + await page.goto("/"); + await page.locator('//*[@id="new-project-btn"]').click(); + + await page.getByTestId("blank-flow").click(); + await page.waitForTimeout(2000); + // Read your file into a buffer. const jsonContent = readFileSync( - "tests/onlyFront/assets/collection.json", + "tests/end-to-end/assets/flow_group_test.json", "utf-8" ); @@ -34,32 +30,32 @@ test.describe("save component tests", () => { const dataTransfer = await page.evaluateHandle((data) => { const dt = new DataTransfer(); // Convert the buffer to a hex array - const file = new File([data], "flowtest.json", { + const file = new File([data], "flow_group_test.json", { type: "application/json", }); dt.items.add(file); return dt; }, jsonContent); + page.waitForTimeout(2000); + // Now dispatch await page.dispatchEvent( - '//*[@id="root"]/div/div[1]/div[2]/div[3]/div/div', + "//*[@id='react-flow-id']/div[1]/div[1]/div", "drop", { dataTransfer, } ); - await page - .getByTestId("edit-flow-button-e9ac1bdc-429b-475d-ac03-d26f9a2a3210-0") - .click(); - await page.waitForTimeout(2000); - const genericNoda = page.getByTestId("div-generic-node"); const elementCount = await genericNoda.count(); if (elementCount > 0) { expect(true).toBeTruthy(); } + await page + .locator('//*[@id="react-flow-id"]/div[1]/div[2]/button[3]') + .click(); await page.getByTestId("title-PythonFunctionTool").click({ modifiers: ["Control"], @@ -68,7 +64,7 @@ test.describe("save component tests", () => { modifiers: ["Control"], }); - await page.getByTestId("title-AgentInitializer").click({ + await page.getByTestId("title-Agent Initializer").click({ modifiers: ["Control"], }); @@ -87,9 +83,13 @@ test.describe("save component tests", () => { } await page.getByTestId("title-Group").click(); - await page.getByTestId("more-options-modal").click(); - await page.getByTestId("save-button-modal").click(); - await page.getByTestId("delete-button-modal").click(); + await page.getByTestId("icon-SaveAll").click(); + + const replaceButton = page.getByTestId("replace-button"); + + if (replaceButton) { + await replaceButton.click(); + } await page.getByPlaceholder("Search").click(); await page.getByPlaceholder("Search").fill("group"); diff --git a/src/frontend/tests/onlyFront/toggleComponent.spec.ts b/src/frontend/tests/end-to-end/toggleComponent.spec.ts similarity index 67% rename from src/frontend/tests/onlyFront/toggleComponent.spec.ts rename to src/frontend/tests/end-to-end/toggleComponent.spec.ts index cdf360c57..cb29b6627 100644 --- a/src/frontend/tests/onlyFront/toggleComponent.spec.ts +++ b/src/frontend/tests/end-to-end/toggleComponent.spec.ts @@ -1,22 +1,18 @@ import { expect, test } from "@playwright/test"; - +test.beforeEach(async ({ page }) => { + // await page.waitForTimeout(15000); + // test.setTimeout(120000); +}); test("ToggleComponent", async ({ page }) => { - await page.routeFromHAR("harFiles/langflow.har", { - url: "**/api/v1/**", - update: false, - }); - await page.route("**/api/v1/flows/", async (route) => { - const json = { - id: "e9ac1bdc-429b-475d-ac03-d26f9a2a3210", - }; - await route.fulfill({ json, status: 201 }); - }); - await page.goto("http://localhost:3000/"); + await page.goto("/"); await page.waitForTimeout(2000); await page.locator('//*[@id="new-project-btn"]').click(); await page.waitForTimeout(2000); + await page.getByTestId("blank-flow").click(); + await page.waitForTimeout(2000); + await page.getByPlaceholder("Search").click(); await page.getByPlaceholder("Search").fill("directoryLoader"); @@ -38,29 +34,27 @@ test("ToggleComponent", async ({ page }) => { await page.locator('//*[@id="saveChangesBtn"]').click(); - await page.locator('//*[@id="toggle-1"]').click(); - expect(await page.locator('//*[@id="toggle-1"]').isChecked()).toBeFalsy(); + await page.getByTestId("toggle-load_hidden").click(); + expect(await page.getByTestId("toggle-load_hidden").isChecked()).toBeFalsy(); - await page.locator('//*[@id="toggle-1"]').click(); - expect(await page.locator('//*[@id="toggle-1"]').isChecked()).toBeTruthy(); + await page.getByTestId("toggle-load_hidden").click(); + expect(await page.getByTestId("toggle-load_hidden").isChecked()).toBeTruthy(); - await page.locator('//*[@id="toggle-1"]').click(); - expect(await page.locator('//*[@id="toggle-1"]').isChecked()).toBeFalsy(); + await page.getByTestId("toggle-load_hidden").click(); + expect(await page.getByTestId("toggle-load_hidden").isChecked()).toBeFalsy(); - await page.locator('//*[@id="toggle-1"]').click(); - expect(await page.locator('//*[@id="toggle-1"]').isChecked()).toBeTruthy(); + await page.getByTestId("toggle-load_hidden").click(); + expect(await page.getByTestId("toggle-load_hidden").isChecked()).toBeTruthy(); - await page.locator('//*[@id="toggle-1"]').click(); - expect(await page.locator('//*[@id="toggle-1"]').isChecked()).toBeFalsy(); + await page.getByTestId("toggle-load_hidden").click(); + expect(await page.getByTestId("toggle-load_hidden").isChecked()).toBeFalsy(); await page.getByTestId("div-generic-node").click(); await page.getByTestId("more-options-modal").click(); await page.getByTestId("edit-button-modal").click(); - expect( - await page.locator('//*[@id="toggle-edit-1"]').isChecked() - ).toBeFalsy(); + expect(await page.getByTestId("toggle-load_hidden").isChecked()).toBeFalsy(); await page.locator('//*[@id="showglob"]').click(); expect(await page.locator('//*[@id="showglob"]').isChecked()).toBeFalsy(); @@ -129,7 +123,7 @@ test("ToggleComponent", async ({ page }) => { await page.locator('//*[@id="saveChangesBtn"]').click(); - const plusButtonLocator = page.locator('//*[@id="toggle-1"]'); + const plusButtonLocator = page.getByTestId("toggle-load_hidden"); const elementCount = await plusButtonLocator.count(); if (elementCount === 0) { expect(true).toBeTruthy(); @@ -145,24 +139,34 @@ test("ToggleComponent", async ({ page }) => { ).toBeTruthy(); expect( - await page.locator('//*[@id="toggle-edit-1"]').isChecked() + await page.getByTestId("toggle-edit-load_hidden").isChecked() ).toBeFalsy(); await page.locator('//*[@id="saveChangesBtn"]').click(); - await page.locator('//*[@id="toggle-1"]').click(); - expect(await page.locator('//*[@id="toggle-1"]').isChecked()).toBeTruthy(); + await page.getByTestId("toggle-load_hidden").click(); + expect( + await page.getByTestId("toggle-load_hidden").isChecked() + ).toBeTruthy(); - await page.locator('//*[@id="toggle-1"]').click(); - expect(await page.locator('//*[@id="toggle-1"]').isChecked()).toBeFalsy(); + await page.getByTestId("toggle-load_hidden").click(); + expect( + await page.getByTestId("toggle-load_hidden").isChecked() + ).toBeFalsy(); - await page.locator('//*[@id="toggle-1"]').click(); - expect(await page.locator('//*[@id="toggle-1"]').isChecked()).toBeTruthy(); + await page.getByTestId("toggle-load_hidden").click(); + expect( + await page.getByTestId("toggle-load_hidden").isChecked() + ).toBeTruthy(); - await page.locator('//*[@id="toggle-1"]').click(); - expect(await page.locator('//*[@id="toggle-1"]').isChecked()).toBeFalsy(); + await page.getByTestId("toggle-load_hidden").click(); + expect( + await page.getByTestId("toggle-load_hidden").isChecked() + ).toBeFalsy(); - await page.locator('//*[@id="toggle-1"]').click(); - expect(await page.locator('//*[@id="toggle-1"]').isChecked()).toBeTruthy(); + await page.getByTestId("toggle-load_hidden").click(); + expect( + await page.getByTestId("toggle-load_hidden").isChecked() + ).toBeTruthy(); } }); diff --git a/src/frontend/tests/end-to-end/tweaks_test.spec.ts b/src/frontend/tests/end-to-end/tweaks_test.spec.ts new file mode 100644 index 000000000..acfe6f1cb --- /dev/null +++ b/src/frontend/tests/end-to-end/tweaks_test.spec.ts @@ -0,0 +1,38 @@ +import { expect, test } from "@playwright/test"; + +test("curl_api_generation", async ({ page, context }) => { + await page.goto("/"); + await page.locator('//*[@id="new-project-btn"]').click(); + await context.grantPermissions(["clipboard-read", "clipboard-write"]); + await page.getByRole("heading", { name: "Data Ingestion" }).click(); + await page.waitForTimeout(2000); + await page.getByText("API", { exact: true }).click(); + await page.getByRole("tab", { name: "cURL" }).click(); + await page.getByRole("button", { name: "Copy Code" }).click(); + const handle = await page.evaluateHandle(() => + navigator.clipboard.readText() + ); + const clipboardContent = await handle.jsonValue(); + const oldValue = clipboardContent; + expect(clipboardContent.length).toBeGreaterThan(0); + await page.getByRole("tab", { name: "Tweaks" }).click(); + await page + .getByRole("heading", { name: "URL" }) + .locator("div") + .first() + .click(); + await page.getByRole("textbox", { name: "Type something..." }).click(); + await page + .getByRole("textbox", { name: "Type something..." }) + .press("Control+a"); + await page.getByRole("textbox", { name: "Type something..." }).fill("teste"); + await page.getByRole("tab", { name: "cURL" }).click(); + await page.getByRole("button", { name: "Copy Code" }).click(); + const handle2 = await page.evaluateHandle(() => + navigator.clipboard.readText() + ); + const clipboardContent2 = await handle2.jsonValue(); + const newValue = clipboardContent2; + expect(oldValue).not.toBe(newValue); + expect(clipboardContent2.length).toBeGreaterThan(clipboardContent.length); +}); diff --git a/src/frontend/tests/globalTeardown.ts b/src/frontend/tests/globalTeardown.ts new file mode 100644 index 000000000..110978d42 --- /dev/null +++ b/src/frontend/tests/globalTeardown.ts @@ -0,0 +1,25 @@ +// tests/globalTeardown.ts + +import fs from "fs"; +import path from "path"; + +export default async () => { + try { + console.log("Removing the temp database"); + // Check if the file exists in the path + // this file is in src/frontend/tests/globalTeardown.ts + // temp is in src/frontend/temp + const tempDbPath = path.join(__dirname, "..", "temp"); + console.log("tempDbPath", tempDbPath); + // Remove the temp database + fs.rmSync(tempDbPath); + // Check if the file is removed + if (!fs.existsSync(tempDbPath)) { + console.log("Successfully removed the temp database"); + } else { + console.error("Error while removing the temp database"); + } + } catch (error) { + console.error("Error while removing the temp database:", error); + } +}; diff --git a/src/frontend/tests/onlyFront/dropdownComponent.spec.ts b/src/frontend/tests/onlyFront/dropdownComponent.spec.ts deleted file mode 100644 index ea397e68d..000000000 --- a/src/frontend/tests/onlyFront/dropdownComponent.spec.ts +++ /dev/null @@ -1,107 +0,0 @@ -import { expect, test } from "@playwright/test"; - -test("dropDownComponent", async ({ page }) => { - await page.routeFromHAR("harFiles/backend_12112023.har", { - url: "**/api/v1/**", - update: false, - }); - await page.route("**/api/v1/flows/", async (route) => { - const json = { - id: "e9ac1bdc-429b-475d-ac03-d26f9a2a3210", - }; - await route.fulfill({ json, status: 201 }); - }); - - await page.goto("http://localhost:3000/"); - await page.waitForTimeout(2000); - - await page.locator('//*[@id="new-project-btn"]').click(); - await page.waitForTimeout(2000); - - await page.getByPlaceholder("Search").click(); - await page.getByPlaceholder("Search").fill("amazon"); - - await page.waitForTimeout(2000); - - await page - .getByTestId("llmsAmazon Bedrock") - .first() - .dragTo(page.locator('//*[@id="react-flow-id"]')); - await page.mouse.up(); - await page.mouse.down(); - - await page.getByTestId("dropdown-2-display").click(); - await page.getByTestId("ai21.j2-grande-instruct-0-option").click(); - - let value = await page.getByTestId("dropdown-2-display").innerText(); - if (value !== "ai21.j2-grande-instruct") { - expect(false).toBeTruthy(); - } - - await page.getByTestId("dropdown-2-display").click(); - await page.getByTestId("ai21.j2-jumbo-instruct-1-option").click(); - - value = await page.getByTestId("dropdown-2-display").innerText(); - if (value !== "ai21.j2-jumbo-instruct") { - expect(false).toBeTruthy(); - } - - await page.getByTestId("more-options-modal").click(); - await page.getByTestId("edit-button-modal").click(); - - value = await page.getByTestId("dropdown-edit-1-display").innerText(); - if (value !== "ai21.j2-jumbo-instruct") { - expect(false).toBeTruthy(); - } - - // showcode - await page.locator('//*[@id="showcode"]').click(); - expect(await page.locator('//*[@id="showcode"]').isChecked()).toBeFalsy(); - - // showmodel_id - await page.locator('//*[@id="showmodel_id"]').click(); - expect(await page.locator('//*[@id="showmodel_id"]').isChecked()).toBeFalsy(); - - // showcode - await page.locator('//*[@id="showcode"]').click(); - expect(await page.locator('//*[@id="showcode"]').isChecked()).toBeTruthy(); - - // showmodel_id - await page.locator('//*[@id="showmodel_id"]').click(); - expect( - await page.locator('//*[@id="showmodel_id"]').isChecked() - ).toBeTruthy(); - - // showcode - await page.locator('//*[@id="showcode"]').click(); - expect(await page.locator('//*[@id="showcode"]').isChecked()).toBeFalsy(); - - // showmodel_id - await page.locator('//*[@id="showmodel_id"]').click(); - expect(await page.locator('//*[@id="showmodel_id"]').isChecked()).toBeFalsy(); - - // showcode - await page.locator('//*[@id="showcode"]').click(); - expect(await page.locator('//*[@id="showcode"]').isChecked()).toBeTruthy(); - - // showmodel_id - await page.locator('//*[@id="showmodel_id"]').click(); - expect( - await page.locator('//*[@id="showmodel_id"]').isChecked() - ).toBeTruthy(); - - await page.getByTestId("dropdown-edit-1-display").click(); - await page.getByTestId("ai21.j2-ultra-v1-5-option").click(); - - value = await page.getByTestId("dropdown-edit-1-display").innerText(); - if (value !== "ai21.j2-ultra-v1") { - expect(false).toBeTruthy(); - } - - await page.locator('//*[@id="saveChangesBtn"]').click(); - - value = await page.getByTestId("dropdown-2-display").innerText(); - if (value !== "ai21.j2-ultra-v1") { - expect(false).toBeTruthy(); - } -}); diff --git a/src/frontend/tests/onlyFront/group.spec.ts b/src/frontend/tests/onlyFront/group.spec.ts deleted file mode 100644 index dcd557279..000000000 --- a/src/frontend/tests/onlyFront/group.spec.ts +++ /dev/null @@ -1,84 +0,0 @@ -import { expect, test } from "@playwright/test"; -import { readFileSync } from "fs"; - -test.describe("group node test", () => { - /// - test("group and ungroup updating values", async ({ page }) => { - await page.routeFromHAR("harFiles/langflow.har", { - url: "**/api/v1/**", - update: false, - }); - await page.route("**/api/v1/flows/", async (route) => { - const json = { - id: "e9ac1bdc-429b-475d-ac03-d26f9a2a3210", - }; - await route.fulfill({ json, status: 201 }); - }); - await page.goto("http:localhost:3000/"); - await page.locator("span").filter({ hasText: "My Collection" }).isVisible(); - // Read your file into a buffer. - const jsonContent = readFileSync( - "tests/onlyFront/assets/collection.json", - "utf-8" - ); - - // Create the DataTransfer and File - const dataTransfer = await page.evaluateHandle((data) => { - const dt = new DataTransfer(); - // Convert the buffer to a hex array - const file = new File([data], "flowtest.json", { - type: "application/json", - }); - dt.items.add(file); - return dt; - }, jsonContent); - - page.waitForTimeout(2000); - - await page.dispatchEvent( - '//*[@id="root"]/div/div[1]/div[2]/div[3]/div/div', - "drop", - { - dataTransfer, - } - ); - - await page - .getByTestId("edit-flow-button-e9ac1bdc-429b-475d-ac03-d26f9a2a3210-0") - .click(); - await page.waitForTimeout(2000); - - const genericNoda = page.getByTestId("div-generic-node"); - const elementCount = await genericNoda.count(); - if (elementCount > 0) { - expect(true).toBeTruthy(); - } - - await page.getByTestId("title-PythonFunctionTool").click({ - modifiers: ["Control"], - }); - await page.getByTestId("title-ChatOpenAI").click({ - modifiers: ["Control"], - }); - - await page.getByTestId("title-AgentInitializer").click({ - modifiers: ["Control"], - }); - - await page.getByRole("button", { name: "Group" }).click(); - - const textArea = page.getByTestId("div-textarea-description"); - const elementCountText = await textArea.count(); - if (elementCountText > 0) { - expect(true).toBeTruthy(); - } - - const groupNode = page.getByTestId("title-Group"); - const elementGroup = await groupNode.count(); - if (elementGroup > 0) { - expect(true).toBeTruthy(); - } - - // Now dispatch - }); -}); diff --git a/src/frontend/tests/onlyFront/intComponent.spec.ts b/src/frontend/tests/onlyFront/intComponent.spec.ts deleted file mode 100644 index be94d82e8..000000000 --- a/src/frontend/tests/onlyFront/intComponent.spec.ts +++ /dev/null @@ -1,124 +0,0 @@ -import { expect, test } from "@playwright/test"; - -test("IntComponent", async ({ page }) => { - await page.routeFromHAR("harFiles/backend_12112023.har", { - url: "**/api/v1/**", - update: false, - }); - await page.route("**/api/v1/flows/", async (route) => { - const json = { - id: "e9ac1bdc-429b-475d-ac03-d26f9a2a3210", - }; - await route.fulfill({ json, status: 201 }); - }); - - await page.goto("http://localhost:3000/"); - await page.waitForTimeout(2000); - - await page.locator('//*[@id="new-project-btn"]').click(); - await page.waitForTimeout(2000); - - await page.getByPlaceholder("Search").click(); - await page.getByPlaceholder("Search").fill("getrequest"); - - await page.waitForTimeout(2000); - - await page - .getByTestId("utilitiesGET Request") - .first() - .dragTo(page.locator('//*[@id="react-flow-id"]')); - await page.mouse.up(); - await page.mouse.down(); - - await page.locator('//*[@id="int-input-2"]').click(); - await page - .locator('//*[@id="int-input-2"]') - .fill("123456789123456789123456789"); - - let value = await page.locator('//*[@id="int-input-2"]').inputValue(); - - if (value != "123456789123456789123456789") { - expect(false).toBeTruthy(); - } - - await page.locator('//*[@id="int-input-2"]').click(); - await page.locator('//*[@id="int-input-2"]').fill("0"); - - value = await page.locator('//*[@id="int-input-2"]').inputValue(); - - if (value != "0") { - expect(false).toBeTruthy(); - } - - await page.getByTestId("more-options-modal").click(); - await page.getByTestId("edit-button-modal").click(); - - value = await page.locator('//*[@id="edit-int-input-2"]').inputValue(); - - if (value != "0") { - expect(false).toBeTruthy(); - } - - await page.locator('//*[@id="edit-int-input-2"]').click(); - await page - .locator('//*[@id="edit-int-input-2"]') - .fill("123456789123456789123456789"); - - await page.locator('//*[@id="showheaders"]').click(); - expect(await page.locator('//*[@id="showheaders"]').isChecked()).toBeFalsy(); - - await page.locator('//*[@id="showtimeout"]').click(); - expect(await page.locator('//*[@id="showtimeout"]').isChecked()).toBeFalsy(); - - await page.locator('//*[@id="showurl"]').click(); - expect(await page.locator('//*[@id="showurl"]').isChecked()).toBeFalsy(); - - await page.locator('//*[@id="showheaders"]').click(); - expect(await page.locator('//*[@id="showheaders"]').isChecked()).toBeTruthy(); - - await page.locator('//*[@id="showurl"]').click(); - expect(await page.locator('//*[@id="showurl"]').isChecked()).toBeTruthy(); - - await page.locator('//*[@id="saveChangesBtn"]').click(); - - const plusButtonLocator = page.locator('//*[@id="int-input-2"]'); - const elementCount = await plusButtonLocator.count(); - if (elementCount === 0) { - expect(true).toBeTruthy(); - - await page.getByTestId("more-options-modal").click(); - await page.getByTestId("edit-button-modal").click(); - - await page.locator('//*[@id="showtimeout"]').click(); - expect( - await page.locator('//*[@id="showtimeout"]').isChecked() - ).toBeTruthy(); - - const valueEditNode = await page - .locator('//*[@id="edit-int-input-2"]') - .inputValue(); - - if (valueEditNode != "123456789123456789123456789") { - expect(false).toBeTruthy(); - } - - await page.locator('//*[@id="saveChangesBtn"]').click(); - await page.locator('//*[@id="int-input-2"]').click(); - await page.locator('//*[@id="int-input-2"]').fill("3"); - - let value = await page.locator('//*[@id="int-input-2"]').inputValue(); - - if (value != "3") { - expect(false).toBeTruthy(); - } - - await page.locator('//*[@id="int-input-2"]').click(); - await page.locator('//*[@id="int-input-2"]').fill("-3"); - - value = await page.locator('//*[@id="int-input-2"]').inputValue(); - - if (value != "0") { - expect(false).toBeTruthy(); - } - } -}); diff --git a/src/frontend/tests/onlyFront/login.spec.ts b/src/frontend/tests/onlyFront/login.spec.ts deleted file mode 100644 index 119d00a37..000000000 --- a/src/frontend/tests/onlyFront/login.spec.ts +++ /dev/null @@ -1,121 +0,0 @@ -import { test } from "@playwright/test"; - -test.describe("Login Tests", () => { - test("Login_Success", async ({ page }) => { - // await page.route("**/api/v1/login", async (route) => { - // const json = { - // access_token: - // "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJhMWNlM2FkOS1iZTE2LTRiNjgtOGRhYi1hYjA4YTVjMmZjZTkiLCJleHAiOjE2OTUyNTIwNTh9.MBYFwMhTcZnsW_L7p4qavUhSDylCllJQWUCJdU1wX8o", - // refresh_token: - // "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJhMWNlM2FkOS1iZTE2LTRiNjgtOGRhYi1hYjA4YTVjMmZjZTkiLCJ0eXBlIjoicmYiLCJleHAiOjE2OTUyNTI2NTh9.a4wL9-XK_zyTyrXduBFgCsODFXrqiByVr5HOeiCbiQA", - // token_type: "bearer", - // }; - // await route.fulfill({ json }); - // }); - // await page.goto("http://localhost:3000/"); - // await page.waitForURL("http://localhost:3000/login"); - // await page.waitForURL("http://localhost:3000/login", { timeout: 100 }); - // await page.getByPlaceholder("Username").click(); - // await page.getByPlaceholder("Username").fill("test"); - // await page.getByPlaceholder("Password").click(); - // await page.getByPlaceholder("Password").fill("test"); - // await page.getByRole("button", { name: "Sign in" }).click(); - // await page.getByRole("button", { name: "Community Examples" }).click(); - // await page.waitForSelector(".community-pages-flows-panel"); - // expect( - // await page - // .locator(".community-pages-flows-panel") - // .evaluate((el) => el.children) - // ).toBeTruthy(); - // }); - // test("Login Error", async ({ page }) => { - // await page.route("**/api/v1/login", async (route) => { - // const json = { detail: "Incorrect username or password" }; - // await route.fulfill({ json, status: 401 }); - // }); - // await page.goto("http://localhost:3000/"); - // await page.waitForURL("http://localhost:3000/login"); - // await page.waitForURL("http://localhost:3000/login", { timeout: 100 }); - // await page.getByPlaceholder("Username").click(); - // await page.getByPlaceholder("Username").fill("test"); - // await page.getByPlaceholder("Password").click(); - // await page.getByPlaceholder("Password").fill("test5"); - // await page.getByRole("button", { name: "Sign in" }).click(); - // await page.getByRole("heading", { name: "Error signing in" }).click(); - // }); - // test("Login create account wrong form", async ({ page }) => { - // const fullfillForm = async (username, password, confirmPassword) => { - // await page.getByPlaceholder("Username").click(); - // await page.getByPlaceholder("Username").fill(username); - // await page.getByPlaceholder("Password", { exact: true }).click(); - // await page.getByPlaceholder("Password", { exact: true }).fill(password); - // await page.getByPlaceholder("Confirm your password").click(); - // await page - // .getByPlaceholder("Confirm your password") - // .fill(confirmPassword); - // }; - // await page.goto("http://localhost:3000/"); - // await page.waitForURL("http://localhost:3000/login"); - // await page.waitForURL("http://localhost:3000/login", { timeout: 100 }); - // await page - // .getByRole("button", { name: "Don't have an account? Sign Up" }) - // .click(); - // await page.getByText("Sign up to Langflow").click(); - // await page.goto("http://localhost:3000/signup"); - // await page.getByText("Sign up to Langflow").click(); - // await fullfillForm("name", "vazz", "vazz5"); - // expect( - // await page.getByRole("button", { name: "Sign up" }).isDisabled() - // ).toBeTruthy(); - // await fullfillForm("", "vazz", "vazz"); - // expect( - // await page.getByRole("button", { name: "Sign up" }).isDisabled() - // ).toBeTruthy(); - // await fullfillForm("name", "", ""); - // expect( - // await page.getByRole("button", { name: "Sign up" }).isDisabled() - // ).toBeTruthy(); - // await fullfillForm("", "", ""); - // expect( - // await page.getByRole("button", { name: "Sign up" }).isDisabled() - // ).toBeTruthy(); - // }); - // test("Login create account success", async ({ page }) => { - // await page.route("**/api/v1/users/", async (route) => { - // const json = { - // id: "e9ac1bdc-429b-475d-ac03-d26f9a2a3210", - // username: "teste", - // profile_image: null, - // is_active: false, - // is_superuser: false, - // create_at: "2023-09-21T01:45:51.873303", - // updated_at: "2023-09-21T01:45:51.873305", - // last_login_at: null, - // }; - // await route.fulfill({ json, status: 201 }); - // }); - // const submitForm = async (username, password, confirmPassword) => { - // await page.getByPlaceholder("Username").click(); - // await page.getByPlaceholder("Username").fill(username); - // await page.getByPlaceholder("Password", { exact: true }).click(); - // await page.getByPlaceholder("Password", { exact: true }).fill(password); - // await page.getByPlaceholder("Confirm your password").click(); - // await page - // .getByPlaceholder("Confirm your password") - // .fill(confirmPassword); - // }; - // await page.goto("http://localhost:3000/"); - // await page.waitForURL("http://localhost:3000/login"); - // await page.waitForURL("http://localhost:3000/login", { timeout: 100 }); - // await page - // .getByRole("button", { name: "Don't have an account? Sign Up" }) - // .click(); - // await page.getByText("Sign up to Langflow").click(); - // await page.goto("http://localhost:3000/signup"); - // await page.getByText("Sign up to Langflow").click(); - // await submitForm("teste", "pass", "pass"); - // await page.getByRole("button", { name: "Sign up" }).click(); - // await page.waitForURL("http://localhost:3000/login", { timeout: 1000 }); - // await page.getByText("Account created! Await admin activation.").click(); - }); -}); diff --git a/tests/conftest.py b/tests/conftest.py index af9bbe169..7f7252243 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,12 +10,14 @@ import orjson import pytest from fastapi.testclient import TestClient from httpx import AsyncClient -from sqlmodel import Session, SQLModel, create_engine +from sqlmodel import Session, SQLModel, create_engine, select from sqlmodel.pool import StaticPool from typer.testing import CliRunner from langflow.graph.graph.base import Graph +from langflow.initial_setup.setup import STARTER_FOLDER_NAME from langflow.services.auth.utils import get_password_hash +from langflow.services.database.models.api_key.model import ApiKey from langflow.services.database.models.flow.model import Flow, FlowCreate from langflow.services.database.models.user.model import User, UserCreate from langflow.services.database.utils import session_getter @@ -28,14 +30,17 @@ if TYPE_CHECKING: def pytest_configure(): pytest.BASIC_EXAMPLE_PATH = Path(__file__).parent.absolute() / "data" / "basic_example.json" pytest.COMPLEX_EXAMPLE_PATH = Path(__file__).parent.absolute() / "data" / "complex_example.json" + pytest.COMPLEX_DEPS_EXAMPLE_PATH = Path(__file__).parent.absolute() / "data" / "complex_deps_example.json" pytest.OPENAPI_EXAMPLE_PATH = Path(__file__).parent.absolute() / "data" / "Openapi.json" pytest.GROUPED_CHAT_EXAMPLE_PATH = Path(__file__).parent.absolute() / "data" / "grouped_chat.json" pytest.ONE_GROUPED_CHAT_EXAMPLE_PATH = Path(__file__).parent.absolute() / "data" / "one_group_chat.json" pytest.VECTOR_STORE_GROUPED_EXAMPLE_PATH = Path(__file__).parent.absolute() / "data" / "vector_store_grouped.json" pytest.BASIC_CHAT_WITH_PROMPT_AND_HISTORY = ( - Path(__file__).parent.absolute() / "data" / "BasicChatwithPromptandHistory.json" + Path(__file__).parent.absolute() / "data" / "BasicChatWithPromptAndHistory.json" ) + pytest.CHAT_INPUT = Path(__file__).parent.absolute() / "data" / "ChatInputTest.json" + pytest.TWO_OUTPUTS = Path(__file__).parent.absolute() / "data" / "TwoOutputsTest.json" pytest.VECTOR_STORE_PATH = Path(__file__).parent.absolute() / "data" / "Vector_store.json" pytest.CODE_WITH_SYNTAX_ERROR = """ def get_text(): @@ -43,6 +48,13 @@ def get_text(): """ +@pytest.fixture(autouse=True) +def check_openai_api_key_in_environment_variables(): + import os + + assert os.environ.get("OPENAI_API_KEY") is not None, "OPENAI_API_KEY is not set in environment variables" + + @pytest.fixture() async def async_client() -> AsyncGenerator: from langflow.main import create_app @@ -181,6 +193,16 @@ def json_vector_store(): return f.read() +@pytest.fixture +def complex_graph_with_groups(): + with open(pytest.COMPLEX_DEPS_EXAMPLE_PATH, "r") as f: + flow_graph = json.load(f) + data_graph = flow_graph["data"] + nodes = data_graph["nodes"] + edges = data_graph["edges"] + return Graph(nodes, edges) + + @pytest.fixture(name="client", autouse=True) def client_fixture(session: Session, monkeypatch): # Set the database url to a test database @@ -225,7 +247,7 @@ def test_user(client): username="testuser", password="testpassword", ) - response = client.post("/api/v1/users", json=user_data.model_dump()) + response = client.post("/api/v1/users", json=user_data.dict()) assert response.status_code == 201 return response.json() @@ -241,7 +263,7 @@ def active_user(client): is_superuser=False, ) # check if user exists - if active_user := session.query(User).filter(User.username == user.username).first(): + if active_user := session.exec(select(User).where(User.username == user.username)).first(): return active_user session.add(user) session.commit() @@ -264,13 +286,9 @@ def flow(client, json_flow: str, active_user): from langflow.services.database.models.flow.model import FlowCreate loaded_json = json.loads(json_flow) - flow_data = FlowCreate( - name="test_flow", - data=loaded_json.get("data"), - user_id=active_user.id, - description="description", - ) - flow = Flow.model_validate(flow_data.model_dump()) + flow_data = FlowCreate(name="test_flow", data=loaded_json.get("data"), user_id=active_user.id) + + flow = Flow.model_validate(flow_data) with session_getter(get_db_service()) as session: session.add(flow) session.commit() @@ -280,11 +298,47 @@ def flow(client, json_flow: str, active_user): @pytest.fixture -def added_flow(client, json_flow_with_prompt_and_history, logged_in_headers): +def json_chat_input(): + with open(pytest.CHAT_INPUT, "r") as f: + return f.read() + + +@pytest.fixture +def json_two_outputs(): + with open(pytest.TWO_OUTPUTS, "r") as f: + return f.read() + + +@pytest.fixture +def added_flow_with_prompt_and_history(client, json_flow_with_prompt_and_history, logged_in_headers): flow = orjson.loads(json_flow_with_prompt_and_history) data = flow["data"] flow = FlowCreate(name="Basic Chat", description="description", data=data) - response = client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + response = client.post("api/v1/flows/", json=flow.dict(), headers=logged_in_headers) + assert response.status_code == 201 + assert response.json()["name"] == flow.name + assert response.json()["data"] == flow.data + return response.json() + + +@pytest.fixture +def added_flow_chat_input(client, json_chat_input, logged_in_headers): + flow = orjson.loads(json_chat_input) + data = flow["data"] + flow = FlowCreate(name="Chat Input", description="description", data=data) + response = client.post("api/v1/flows/", json=flow.dict(), headers=logged_in_headers) + assert response.status_code == 201 + assert response.json()["name"] == flow.name + assert response.json()["data"] == flow.data + return response.json() + + +@pytest.fixture +def added_flow_two_outputs(client, json_two_outputs, logged_in_headers): + flow = orjson.loads(json_two_outputs) + data = flow["data"] + flow = FlowCreate(name="Two Outputs", description="description", data=data) + response = client.post("api/v1/flows/", json=flow.dict(), headers=logged_in_headers) assert response.status_code == 201 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data @@ -296,7 +350,7 @@ def added_vector_store(client, json_vector_store, logged_in_headers): vector_store = orjson.loads(json_vector_store) data = vector_store["data"] vector_store = FlowCreate(name="Vector Store", description="description", data=data) - response = client.post("api/v1/flows/", json=vector_store.model_dump(), headers=logged_in_headers) + response = client.post("api/v1/flows/", json=vector_store.dict(), headers=logged_in_headers) assert response.status_code == 201 assert response.json()["name"] == vector_store.name assert response.json()["data"] == vector_store.data @@ -304,16 +358,43 @@ def added_vector_store(client, json_vector_store, logged_in_headers): @pytest.fixture -def test_component_code(): - path = Path(__file__).parent.absolute() / "data" / "component.py" - # load the content as a string - with open(path, "r") as f: - return f.read() +def created_api_key(active_user): + hashed = get_password_hash("random_key") + api_key = ApiKey( + name="test_api_key", + user_id=active_user.id, + api_key="random_key", + hashed_api_key=hashed, + ) + db_manager = get_db_service() + with session_getter(db_manager) as session: + if existing_api_key := session.exec(select(ApiKey).where(ApiKey.api_key == api_key.api_key)).first(): + return existing_api_key + session.add(api_key) + session.commit() + session.refresh(api_key) + return api_key -@pytest.fixture -def test_component_with_templatefield_code(): - path = Path(__file__).parent.absolute() / "data" / "component_with_templatefield.py" - # load the content as a string - with open(path, "r") as f: - return f.read() +@pytest.fixture(name="starter_project") +def get_starter_project(active_user): + # once the client is created, we can get the starter project + with session_getter(get_db_service()) as session: + flow = session.exec( + select(Flow).where(Flow.folder == STARTER_FOLDER_NAME).where(Flow.name == "Basic Prompting (Hello, world!)") + ).first() + if not flow: + raise ValueError("No starter project found") + + new_flow_create = FlowCreate( + name=flow.name, + description=flow.description, + data=flow.data, + user_id=active_user.id, + ) + new_flow = Flow.model_validate(new_flow_create, from_attributes=True) + session.add(new_flow) + session.commit() + session.refresh(new_flow) + new_flow_dict = new_flow.model_dump() + return new_flow_dict diff --git a/tests/data/ChatInputTest.json b/tests/data/ChatInputTest.json new file mode 100644 index 000000000..3c516067e --- /dev/null +++ b/tests/data/ChatInputTest.json @@ -0,0 +1,918 @@ +{ + "name": "ChatInputTest", + "description": "", + "data": { + "nodes": [ + { + "width": 384, + "height": 359, + "id": "PromptTemplate-IKKOx", + "type": "genericNode", + "position": { + "x": 880, + "y": 646.9375 + }, + "data": { + "type": "PromptTemplate", + "node": { + "template": { + "output_parser": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "output_parser", + "advanced": false, + "dynamic": false, + "info": "", + "type": "BaseOutputParser", + "list": false + }, + "input_variables": { + "required": true, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "input_variables", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": true, + "value": [ + "input" + ] + }, + "partial_variables": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "partial_variables", + "advanced": false, + "dynamic": false, + "info": "", + "type": "code", + "list": false + }, + "template": { + "required": true, + "placeholder": "", + "show": true, + "multiline": true, + "password": false, + "name": "template", + "advanced": false, + "dynamic": false, + "info": "", + "type": "prompt", + "list": false, + "value": "Input: {input}\nAI:" + }, + "template_format": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "value": "f-string", + "password": false, + "name": "template_format", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "validate_template": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "value": true, + "password": false, + "name": "validate_template", + "advanced": false, + "dynamic": false, + "info": "", + "type": "bool", + "list": false + }, + "_type": "PromptTemplate", + "input": { + "required": false, + "placeholder": "", + "show": true, + "multiline": true, + "value": "", + "password": false, + "name": "input", + "display_name": "input", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "str" + ], + "dynamic": false, + "info": "", + "type": "str", + "list": false + } + }, + "description": "A prompt template for a language model.", + "base_classes": [ + "BasePromptTemplate", + "PromptTemplate", + "StringPromptTemplate" + ], + "name": "", + "display_name": "PromptTemplate", + "documentation": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/", + "custom_fields": { + "": [ + "input" + ], + "template": [ + "input" + ] + }, + "output_types": [], + "field_formatters": { + "formatters": { + "openai_api_key": {} + }, + "base_formatters": { + "kwargs": {}, + "optional": {}, + "list": {}, + "dict": {}, + "union": {}, + "multiline": {}, + "show": {}, + "password": {}, + "default": {}, + "headers": {}, + "dict_code_file": {}, + "model_fields": { + "MODEL_DICT": { + "OpenAI": [ + "text-davinci-003", + "text-davinci-002", + "text-curie-001", + "text-babbage-001", + "text-ada-001" + ], + "ChatOpenAI": [ + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k-0613", + "gpt-3.5-turbo-16k", + "gpt-4-0613", + "gpt-4-32k-0613", + "gpt-4", + "gpt-4-32k" + ], + "Anthropic": [ + "claude-v1", + "claude-v1-100k", + "claude-instant-v1", + "claude-instant-v1-100k", + "claude-v1.3", + "claude-v1.3-100k", + "claude-v1.2", + "claude-v1.0", + "claude-instant-v1.1", + "claude-instant-v1.1-100k", + "claude-instant-v1.0" + ], + "ChatAnthropic": [ + "claude-v1", + "claude-v1-100k", + "claude-instant-v1", + "claude-instant-v1-100k", + "claude-v1.3", + "claude-v1.3-100k", + "claude-v1.2", + "claude-v1.0", + "claude-instant-v1.1", + "claude-instant-v1.1-100k", + "claude-instant-v1.0" + ] + } + } + } + }, + "beta": false, + "error": null + }, + "id": "PromptTemplate-IKKOx" + }, + "selected": false, + "positionAbsolute": { + "x": 880, + "y": 646.9375 + }, + "dragging": false + }, + { + "width": 384, + "height": 307, + "id": "LLMChain-e2dhN", + "type": "genericNode", + "position": { + "x": 1449.330344958542, + "y": 880.1760221487797 + }, + "data": { + "type": "LLMChain", + "node": { + "template": { + "callbacks": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "callbacks", + "advanced": false, + "dynamic": false, + "info": "", + "type": "langchain.callbacks.base.BaseCallbackHandler", + "list": true + }, + "llm": { + "required": true, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "llm", + "advanced": false, + "dynamic": false, + "info": "", + "type": "BaseLanguageModel", + "list": false + }, + "memory": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "memory", + "advanced": false, + "dynamic": false, + "info": "", + "type": "BaseMemory", + "list": false + }, + "output_parser": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "output_parser", + "advanced": false, + "dynamic": false, + "info": "", + "type": "BaseLLMOutputParser", + "list": false + }, + "prompt": { + "required": true, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "prompt", + "advanced": false, + "dynamic": false, + "info": "", + "type": "BasePromptTemplate", + "list": false + }, + "llm_kwargs": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "llm_kwargs", + "advanced": false, + "dynamic": false, + "info": "", + "type": "code", + "list": false + }, + "metadata": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "metadata", + "advanced": false, + "dynamic": false, + "info": "", + "type": "code", + "list": false + }, + "output_key": { + "required": true, + "placeholder": "", + "show": true, + "multiline": false, + "value": "text", + "password": false, + "name": "output_key", + "advanced": true, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "return_final_only": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "value": true, + "password": false, + "name": "return_final_only", + "advanced": false, + "dynamic": false, + "info": "", + "type": "bool", + "list": false + }, + "tags": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "tags", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": true + }, + "verbose": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "value": false, + "password": false, + "name": "verbose", + "advanced": true, + "dynamic": false, + "info": "", + "type": "bool", + "list": false + }, + "_type": "LLMChain" + }, + "description": "Chain to run queries against LLMs.", + "base_classes": [ + "Chain", + "LLMChain", + "function", + "Text" + ], + "display_name": "LLMChain", + "custom_fields": {}, + "output_types": [], + "documentation": "https://python.langchain.com/docs/modules/chains/foundational/llm_chain", + "beta": false, + "error": null + }, + "id": "LLMChain-e2dhN" + }, + "positionAbsolute": { + "x": 1449.330344958542, + "y": 880.1760221487797 + } + }, + { + "width": 384, + "height": 621, + "id": "ChatOpenAI-2I57f", + "type": "genericNode", + "position": { + "x": 393.3551923753797, + "y": 1061.025177453298 + }, + "data": { + "type": "ChatOpenAI", + "node": { + "template": { + "callbacks": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "callbacks", + "advanced": false, + "dynamic": false, + "info": "", + "type": "langchain.callbacks.base.BaseCallbackHandler", + "list": true + }, + "cache": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "cache", + "advanced": false, + "dynamic": false, + "info": "", + "type": "bool", + "list": false + }, + "client": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "client", + "advanced": false, + "dynamic": false, + "info": "", + "type": "Any", + "list": false + }, + "max_retries": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "value": 6, + "password": false, + "name": "max_retries", + "advanced": false, + "dynamic": false, + "info": "", + "type": "int", + "list": false + }, + "max_tokens": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": true, + "name": "max_tokens", + "advanced": false, + "dynamic": false, + "info": "", + "type": "int", + "list": false, + "value": "" + }, + "metadata": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "metadata", + "advanced": false, + "dynamic": false, + "info": "", + "type": "code", + "list": false + }, + "model_kwargs": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "model_kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "type": "code", + "list": false + }, + "model_name": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": "gpt-3.5-turbo-0613", + "password": false, + "options": [ + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k-0613", + "gpt-3.5-turbo-16k", + "gpt-4-0613", + "gpt-4-32k-0613", + "gpt-4", + "gpt-4-32k" + ], + "name": "model_name", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": true + }, + "n": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "value": 1, + "password": false, + "name": "n", + "advanced": false, + "dynamic": false, + "info": "", + "type": "int", + "list": false + }, + "openai_api_base": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": false, + "dynamic": false, + "info": "\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\n", + "type": "str", + "list": false + }, + "openai_api_key": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "openai_organization": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "openai_organization", + "display_name": "OpenAI Organization", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "openai_proxy": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "openai_proxy", + "display_name": "OpenAI Proxy", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "request_timeout": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "request_timeout", + "advanced": false, + "dynamic": false, + "info": "", + "type": "float", + "list": false + }, + "streaming": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "value": false, + "password": false, + "name": "streaming", + "advanced": false, + "dynamic": false, + "info": "", + "type": "bool", + "list": false + }, + "tags": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "tags", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": true + }, + "temperature": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": 0.7, + "password": false, + "name": "temperature", + "advanced": false, + "dynamic": false, + "info": "", + "type": "float", + "list": false + }, + "tiktoken_model_name": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "tiktoken_model_name", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "verbose": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "value": false, + "password": false, + "name": "verbose", + "advanced": false, + "dynamic": false, + "info": "", + "type": "bool", + "list": false + }, + "_type": "ChatOpenAI" + }, + "description": "`OpenAI` Chat large language models API.", + "base_classes": [ + "BaseChatModel", + "ChatOpenAI", + "BaseLanguageModel", + "BaseLLM" + ], + "display_name": "ChatOpenAI", + "custom_fields": {}, + "output_types": [], + "documentation": "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai", + "beta": false, + "error": null + }, + "id": "ChatOpenAI-2I57f" + }, + "selected": false, + "positionAbsolute": { + "x": 393.3551923753797, + "y": 1061.025177453298 + }, + "dragging": false + }, + { + "width": 384, + "height": 359, + "id": "ChatInput-207IY", + "type": "genericNode", + "position": { + "x": 415.1018926651509, + "y": 506.62736462360317 + }, + "data": { + "type": "ChatInput", + "node": { + "template": { + "code": { + "dynamic": true, + "required": true, + "placeholder": "", + "show": false, + "multiline": true, + "value": "from typing import Optional\nfrom langflow.custom import CustomComponent\n\n\nclass ChatInput(CustomComponent):\n display_name = \"Chat Input\"\n\n def build(self, message: Optional[str] = \"\") -> str:\n return message\n", + "password": false, + "name": "code", + "advanced": false, + "type": "code", + "list": false + }, + "_type": "CustomComponent", + "message": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": "", + "password": false, + "name": "message", + "display_name": "message", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + } + }, + "description": "Used to get user input from the chat.", + "base_classes": [ + "str" + ], + "display_name": "Chat Input", + "custom_fields": { + "message": null + }, + "output_types": [ + "ChatInput" + ], + "documentation": "", + "beta": true, + "error": null + }, + "id": "ChatInput-207IY" + }, + "positionAbsolute": { + "x": 415.1018926651509, + "y": 506.62736462360317 + } + }, + { + "width": 384, + "height": 389, + "id": "ChatOutput-1jlJy", + "type": "genericNode", + "position": { + "x": 2002.8008888732943, + "y": 926.1397178702218 + }, + "data": { + "type": "ChatOutput", + "node": { + "template": { + "code": { + "dynamic": true, + "required": true, + "placeholder": "", + "show": true, + "multiline": true, + "value": "from typing import Optional, Text\nfrom langflow.api.v1.schemas import ChatMessage\nfrom langflow.services.utils import get_chat_manager\nfrom langflow.custom import CustomComponent\nfrom anyio.from_thread import start_blocking_portal\nfrom loguru import logger\n\n\nclass ChatOutput(CustomComponent):\n display_name = \"Chat Output\"\n description = \"Used to send a message to the chat.\"\n\n field_config = {\n \"code\": {\n \"show\": False,\n }\n }\n\n def build_config(self):\n return {\"message\": {\"input_types\": [\"Text\"]}}\n\n def build(self, message: Optional[Text], is_ai: bool = False) -> Text:\n if not message:\n return \"\"\n try:\n chat_manager = get_chat_manager()\n chat_message = ChatMessage(message=message, is_bot=is_ai)\n # send_message is a coroutine\n # run in a thread safe manner\n with start_blocking_portal() as portal:\n portal.call(chat_manager.send_message, chat_message)\n chat_manager.chat_history.add_message(\n chat_manager.cache_manager.current_client_id, chat_message\n )\n except Exception as exc:\n logger.exception(exc)\n logger.debug(f\"Error sending message to chat: {exc}\")\n self.repr_value = message\n return message\n", + "password": false, + "name": "code", + "advanced": false, + "type": "code", + "list": false + }, + "_type": "CustomComponent", + "is_ai": { + "required": true, + "placeholder": "", + "show": true, + "multiline": false, + "value": true, + "password": false, + "name": "is_ai", + "display_name": "is_ai", + "advanced": false, + "dynamic": false, + "info": "", + "type": "bool", + "list": false + }, + "message": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "message", + "display_name": "message", + "advanced": false, + "input_types": [ + "Text" + ], + "dynamic": false, + "info": "", + "type": "Text", + "list": false + } + }, + "description": "Used to send a message to the chat.", + "base_classes": [ + "str" + ], + "display_name": "Chat Output", + "custom_fields": { + "is_ai": null, + "message": null + }, + "output_types": [ + "ChatOutput" + ], + "documentation": "", + "beta": true, + "error": null + }, + "id": "ChatOutput-1jlJy" + }, + "selected": true, + "dragging": false, + "positionAbsolute": { + "x": 2002.8008888732943, + "y": 926.1397178702218 + } + } + ], + "edges": [ + { + "source": "PromptTemplate-IKKOx", + "sourceHandle": "PromptTemplate|PromptTemplate-IKKOx|BasePromptTemplate|PromptTemplate|StringPromptTemplate", + "target": "LLMChain-e2dhN", + "targetHandle": "BasePromptTemplate|prompt|LLMChain-e2dhN", + "style": { + "stroke": "#555" + }, + "className": "", + "animated": false, + "id": "reactflow__edge-PromptTemplate-IKKOxPromptTemplate|PromptTemplate-IKKOx|StringPromptTemplate|BasePromptTemplate|PromptTemplate-LLMChain-e2dhNBasePromptTemplate|prompt|LLMChain-e2dhN" + }, + { + "source": "ChatOpenAI-2I57f", + "sourceHandle": "ChatOpenAI|ChatOpenAI-2I57f|BaseChatModel|ChatOpenAI|BaseLanguageModel|BaseLLM", + "target": "LLMChain-e2dhN", + "targetHandle": "BaseLanguageModel|llm|LLMChain-e2dhN", + "style": { + "stroke": "#555" + }, + "className": "", + "animated": false, + "id": "reactflow__edge-ChatOpenAI-2I57fChatOpenAI|ChatOpenAI-2I57f|BaseChatModel|ChatOpenAI|BaseLanguageModel|BaseLLM-LLMChain-e2dhNBaseLanguageModel|llm|LLMChain-e2dhN" + }, + { + "source": "ChatInput-207IY", + "sourceHandle": "ChatInput|ChatInput-207IY|str", + "target": "PromptTemplate-IKKOx", + "targetHandle": "Document;BaseOutputParser;str|input|PromptTemplate-IKKOx", + "style": { + "stroke": "#555" + }, + "className": "", + "animated": false, + "id": "reactflow__edge-ChatInput-207IYChatInput|ChatInput-207IY|str-PromptTemplate-IKKOxDocument;BaseOutputParser;str|input|PromptTemplate-IKKOx" + }, + { + "source": "LLMChain-e2dhN", + "sourceHandle": "LLMChain|LLMChain-e2dhN|Chain|LLMChain|function|Text", + "target": "ChatOutput-1jlJy", + "targetHandle": "Text|message|ChatOutput-1jlJy", + "style": { + "stroke": "#555" + }, + "className": "stroke-foreground stroke-connection", + "animated": true, + "id": "reactflow__edge-LLMChain-e2dhNLLMChain|LLMChain-e2dhN|Chain|LLMChain|function|Text-ChatOutput-1jlJyText|message|ChatOutput-1jlJy" + } + ], + "viewport": { + "x": -141.98308184453367, + "y": -104.98637616656356, + "zoom": 0.4788209787464315 + } + }, + "id": "b3388ab9-b5dc-4447-b560-79caef40faa5", + "user_id": "c65bfea3-3eea-4e71-8fc4-106238eb0583" +} \ No newline at end of file diff --git a/tests/data/TwoOutputsTest.json b/tests/data/TwoOutputsTest.json new file mode 100644 index 000000000..4cd060249 --- /dev/null +++ b/tests/data/TwoOutputsTest.json @@ -0,0 +1,1024 @@ +{ + "name": "TwoOutputsTest", + "description": "", + "data": { + "nodes": [ + { + "width": 384, + "height": 359, + "id": "PromptTemplate-CweKz", + "type": "genericNode", + "position": { + "x": 969.6448076246203, + "y": 528.7788853763968 + }, + "data": { + "type": "PromptTemplate", + "node": { + "template": { + "output_parser": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "output_parser", + "advanced": false, + "dynamic": false, + "info": "", + "type": "BaseOutputParser", + "list": false + }, + "input_variables": { + "required": true, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "input_variables", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": true, + "value": [ + "input" + ] + }, + "partial_variables": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "partial_variables", + "advanced": false, + "dynamic": false, + "info": "", + "type": "code", + "list": false + }, + "template": { + "required": true, + "placeholder": "", + "show": true, + "multiline": true, + "password": false, + "name": "template", + "advanced": false, + "dynamic": false, + "info": "", + "type": "prompt", + "list": false, + "value": "Input: {input}\nAI:" + }, + "template_format": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "value": "f-string", + "password": false, + "name": "template_format", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "validate_template": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "value": true, + "password": false, + "name": "validate_template", + "advanced": false, + "dynamic": false, + "info": "", + "type": "bool", + "list": false + }, + "_type": "PromptTemplate", + "input": { + "required": false, + "placeholder": "", + "show": true, + "multiline": true, + "value": "", + "password": false, + "name": "input", + "display_name": "input", + "advanced": false, + "input_types": [ + "Document", + "BaseOutputParser", + "str" + ], + "dynamic": false, + "info": "", + "type": "str", + "list": false + } + }, + "description": "A prompt template for a language model.", + "base_classes": [ + "BasePromptTemplate", + "StringPromptTemplate", + "PromptTemplate" + ], + "name": "", + "display_name": "PromptTemplate", + "documentation": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/", + "custom_fields": { + "": [ + "input" + ], + "template": [ + "input" + ] + }, + "output_types": [], + "field_formatters": { + "formatters": { + "openai_api_key": {} + }, + "base_formatters": { + "kwargs": {}, + "optional": {}, + "list": {}, + "dict": {}, + "union": {}, + "multiline": {}, + "show": {}, + "password": {}, + "default": {}, + "headers": {}, + "dict_code_file": {}, + "model_fields": { + "MODEL_DICT": { + "OpenAI": [ + "text-davinci-003", + "text-davinci-002", + "text-curie-001", + "text-babbage-001", + "text-ada-001" + ], + "ChatOpenAI": [ + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k-0613", + "gpt-3.5-turbo-16k", + "gpt-4-0613", + "gpt-4-32k-0613", + "gpt-4", + "gpt-4-32k" + ], + "Anthropic": [ + "claude-v1", + "claude-v1-100k", + "claude-instant-v1", + "claude-instant-v1-100k", + "claude-v1.3", + "claude-v1.3-100k", + "claude-v1.2", + "claude-v1.0", + "claude-instant-v1.1", + "claude-instant-v1.1-100k", + "claude-instant-v1.0" + ], + "ChatAnthropic": [ + "claude-v1", + "claude-v1-100k", + "claude-instant-v1", + "claude-instant-v1-100k", + "claude-v1.3", + "claude-v1.3-100k", + "claude-v1.2", + "claude-v1.0", + "claude-instant-v1.1", + "claude-instant-v1.1-100k", + "claude-instant-v1.0" + ] + } + } + } + }, + "beta": false, + "error": null + }, + "id": "PromptTemplate-CweKz" + }, + "selected": false, + "positionAbsolute": { + "x": 969.6448076246203, + "y": 528.7788853763968 + } + }, + { + "width": 384, + "height": 307, + "id": "LLMChain-HUM6g", + "type": "genericNode", + "position": { + "x": 1515.3241458756393, + "y": 732.4536491407735 + }, + "data": { + "type": "LLMChain", + "node": { + "template": { + "callbacks": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "callbacks", + "advanced": false, + "dynamic": false, + "info": "", + "type": "langchain.callbacks.base.BaseCallbackHandler", + "list": true + }, + "llm": { + "required": true, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "llm", + "advanced": false, + "dynamic": false, + "info": "", + "type": "BaseLanguageModel", + "list": false + }, + "memory": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "memory", + "advanced": false, + "dynamic": false, + "info": "", + "type": "BaseMemory", + "list": false + }, + "output_parser": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "output_parser", + "advanced": false, + "dynamic": false, + "info": "", + "type": "BaseLLMOutputParser", + "list": false + }, + "prompt": { + "required": true, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "prompt", + "advanced": false, + "dynamic": false, + "info": "", + "type": "BasePromptTemplate", + "list": false + }, + "llm_kwargs": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "llm_kwargs", + "advanced": false, + "dynamic": false, + "info": "", + "type": "code", + "list": false + }, + "metadata": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "metadata", + "advanced": false, + "dynamic": false, + "info": "", + "type": "code", + "list": false + }, + "output_key": { + "required": true, + "placeholder": "", + "show": true, + "multiline": false, + "value": "text", + "password": false, + "name": "output_key", + "advanced": true, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "return_final_only": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "value": true, + "password": false, + "name": "return_final_only", + "advanced": false, + "dynamic": false, + "info": "", + "type": "bool", + "list": false + }, + "tags": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "tags", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": true + }, + "verbose": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "value": false, + "password": false, + "name": "verbose", + "advanced": true, + "dynamic": false, + "info": "", + "type": "bool", + "list": false + }, + "_type": "LLMChain" + }, + "description": "Chain to run queries against LLMs.", + "base_classes": [ + "LLMChain", + "Chain", + "function", + "Text" + ], + "display_name": "LLMChain", + "custom_fields": {}, + "output_types": [], + "documentation": "https://python.langchain.com/docs/modules/chains/foundational/llm_chain", + "beta": false, + "error": null + }, + "id": "LLMChain-HUM6g" + }, + "selected": false, + "positionAbsolute": { + "x": 1515.3241458756393, + "y": 732.4536491407735 + }, + "dragging": false + }, + { + "width": 384, + "height": 621, + "id": "ChatOpenAI-02kOF", + "type": "genericNode", + "position": { + "x": 483, + "y": 942.8665628296949 + }, + "data": { + "type": "ChatOpenAI", + "node": { + "template": { + "callbacks": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "callbacks", + "advanced": false, + "dynamic": false, + "info": "", + "type": "langchain.callbacks.base.BaseCallbackHandler", + "list": true + }, + "cache": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "cache", + "advanced": false, + "dynamic": false, + "info": "", + "type": "bool", + "list": false + }, + "client": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "client", + "advanced": false, + "dynamic": false, + "info": "", + "type": "Any", + "list": false + }, + "max_retries": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "value": 6, + "password": false, + "name": "max_retries", + "advanced": false, + "dynamic": false, + "info": "", + "type": "int", + "list": false + }, + "max_tokens": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": true, + "name": "max_tokens", + "advanced": false, + "dynamic": false, + "info": "", + "type": "int", + "list": false, + "value": "" + }, + "metadata": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "metadata", + "advanced": false, + "dynamic": false, + "info": "", + "type": "code", + "list": false + }, + "model_kwargs": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "model_kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "type": "code", + "list": false + }, + "model_name": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": "gpt-3.5-turbo-0613", + "password": false, + "options": [ + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k-0613", + "gpt-3.5-turbo-16k", + "gpt-4-0613", + "gpt-4-32k-0613", + "gpt-4", + "gpt-4-32k" + ], + "name": "model_name", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": true + }, + "n": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "value": 1, + "password": false, + "name": "n", + "advanced": false, + "dynamic": false, + "info": "", + "type": "int", + "list": false + }, + "openai_api_base": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": false, + "dynamic": false, + "info": "\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\n", + "type": "str", + "list": false + }, + "openai_api_key": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "openai_organization": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "openai_organization", + "display_name": "OpenAI Organization", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "openai_proxy": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "openai_proxy", + "display_name": "OpenAI Proxy", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "request_timeout": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "request_timeout", + "advanced": false, + "dynamic": false, + "info": "", + "type": "float", + "list": false + }, + "streaming": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "value": false, + "password": false, + "name": "streaming", + "advanced": false, + "dynamic": false, + "info": "", + "type": "bool", + "list": false + }, + "tags": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "tags", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": true + }, + "temperature": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": 0.7, + "password": false, + "name": "temperature", + "advanced": false, + "dynamic": false, + "info": "", + "type": "float", + "list": false + }, + "tiktoken_model_name": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "tiktoken_model_name", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "verbose": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "value": false, + "password": false, + "name": "verbose", + "advanced": false, + "dynamic": false, + "info": "", + "type": "bool", + "list": false + }, + "_type": "ChatOpenAI" + }, + "description": "`OpenAI` Chat large language models API.", + "base_classes": [ + "ChatOpenAI", + "BaseLanguageModel", + "BaseChatModel", + "BaseLLM" + ], + "display_name": "ChatOpenAI", + "custom_fields": {}, + "output_types": [], + "documentation": "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai", + "beta": false, + "error": null + }, + "id": "ChatOpenAI-02kOF" + }, + "selected": false, + "positionAbsolute": { + "x": 483, + "y": 942.8665628296949 + } + }, + { + "width": 384, + "height": 389, + "id": "ChatOutput-8SWFf", + "type": "genericNode", + "position": { + "x": 2035.5749798606498, + "y": 651.0174452514373 + }, + "data": { + "type": "ChatOutput", + "node": { + "template": { + "code": { + "dynamic": true, + "required": true, + "placeholder": "", + "show": true, + "multiline": true, + "value": "from typing import Optional\nfrom langflow.api.v1.schemas import ChatMessage\nfrom langflow.services.utils import get_chat_manager\nfrom langflow.custom import CustomComponent\nfrom anyio.from_thread import start_blocking_portal\nfrom loguru import logger\nfrom langflow.field_typing import Text\n\n\nclass ChatOutput(CustomComponent):\n display_name = \"Chat Output\"\n\n def build_config(self):\n return {\"message\": {\"input_types\": [\"str\"]}}\n\n def build(self, message: Optional[Text], is_ai: bool = False) -> Text:\n if not message:\n return \"\"\n try:\n chat_manager = get_chat_manager()\n chat_message = ChatMessage(message=message, is_bot=is_ai)\n # send_message is a coroutine\n # run in a thread safe manner\n with start_blocking_portal() as portal:\n portal.call(chat_manager.send_message, chat_message)\n chat_manager.chat_history.add_message(\n chat_manager.cache_manager.current_client_id, chat_message\n )\n except Exception as exc:\n logger.exception(exc)\n logger.debug(f\"Error sending message to chat: {exc}\")\n\n return message\n", + "password": false, + "name": "code", + "advanced": false, + "type": "code", + "list": false + }, + "_type": "CustomComponent", + "is_ai": { + "required": true, + "placeholder": "", + "show": true, + "multiline": false, + "value": false, + "password": false, + "name": "is_ai", + "display_name": "is_ai", + "advanced": false, + "dynamic": false, + "info": "", + "type": "bool", + "list": false + }, + "message": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "message", + "display_name": "message", + "advanced": false, + "input_types": [ + "Text" + ], + "dynamic": false, + "info": "", + "type": "Text", + "list": false + } + }, + "description": "Used to send a message to the chat.", + "base_classes": [ + "str" + ], + "display_name": "Chat Output", + "custom_fields": { + "is_ai": null, + "message": null + }, + "output_types": [ + "ChatOutput" + ], + "documentation": "", + "beta": true, + "error": null + }, + "id": "ChatOutput-8SWFf" + }, + "selected": false, + "positionAbsolute": { + "x": 2035.5749798606498, + "y": 651.0174452514373 + } + }, + { + "width": 384, + "height": 273, + "id": "ChatInput-PqtHe", + "type": "genericNode", + "position": { + "x": 504.7467002897712, + "y": 388.46875 + }, + "data": { + "type": "ChatInput", + "node": { + "template": { + "code": { + "dynamic": true, + "required": true, + "placeholder": "", + "show": false, + "multiline": true, + "value": "from typing import Optional\nfrom langflow.custom import CustomComponent\n\n\nclass ChatInput(CustomComponent):\n display_name = \"Chat Input\"\n\n def build(self, message: Optional[str] = \"\") -> str:\n return message\n", + "password": false, + "name": "code", + "advanced": false, + "type": "code", + "list": false + }, + "_type": "CustomComponent", + "message": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": "", + "password": false, + "name": "message", + "display_name": "message", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + } + }, + "description": "Used to get user input from the chat.", + "base_classes": [ + "str" + ], + "display_name": "Chat Input", + "custom_fields": { + "message": null + }, + "output_types": [ + "ChatInput" + ], + "documentation": "", + "beta": true, + "error": null + }, + "id": "ChatInput-PqtHe" + }, + "selected": false, + "positionAbsolute": { + "x": 504.7467002897712, + "y": 388.46875 + } + }, + { + "width": 384, + "height": 475, + "id": "Tool-jyI4N", + "type": "genericNode", + "position": { + "x": 2044.485030617051, + "y": 1131.4250055845532 + }, + "data": { + "type": "Tool", + "node": { + "template": { + "func": { + "required": true, + "placeholder": "", + "show": true, + "multiline": true, + "password": false, + "name": "func", + "advanced": false, + "dynamic": false, + "info": "", + "type": "function", + "list": false + }, + "description": { + "required": true, + "placeholder": "", + "show": true, + "multiline": true, + "value": "Test tool", + "password": false, + "name": "description", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "name": { + "required": true, + "placeholder": "", + "show": true, + "multiline": true, + "value": "Tool", + "password": false, + "name": "name", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "return_direct": { + "required": true, + "placeholder": "", + "show": true, + "multiline": false, + "value": false, + "password": false, + "name": "return_direct", + "advanced": false, + "dynamic": false, + "info": "", + "type": "bool", + "list": false + }, + "_type": "Tool" + }, + "description": "Converts a chain, agent or function into a tool.", + "base_classes": [ + "Tool", + "BaseTool" + ], + "display_name": "Tool", + "custom_fields": {}, + "output_types": [], + "documentation": "", + "beta": false, + "error": null + }, + "id": "Tool-jyI4N" + }, + "selected": true, + "positionAbsolute": { + "x": 2044.485030617051, + "y": 1131.4250055845532 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "PromptTemplate-CweKz", + "target": "LLMChain-HUM6g", + "sourceHandle": "PromptTemplate|PromptTemplate-CweKz|BasePromptTemplate|StringPromptTemplate|PromptTemplate", + "targetHandle": "BasePromptTemplate|prompt|LLMChain-HUM6g", + "id": "reactflow__edge-PromptTemplate-CweKzPromptTemplate|PromptTemplate-CweKz|BasePromptTemplate|StringPromptTemplate|PromptTemplate-LLMChain-HUM6gBasePromptTemplate|prompt|LLMChain-HUM6g", + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 ", + "animated": false, + "selected": false + }, + { + "source": "ChatOpenAI-02kOF", + "target": "LLMChain-HUM6g", + "sourceHandle": "ChatOpenAI|ChatOpenAI-02kOF|ChatOpenAI|BaseLanguageModel|BaseChatModel|BaseLLM", + "targetHandle": "BaseLanguageModel|llm|LLMChain-HUM6g", + "id": "reactflow__edge-ChatOpenAI-02kOFChatOpenAI|ChatOpenAI-02kOF|ChatOpenAI|BaseLanguageModel|BaseChatModel|BaseLLM-LLMChain-HUM6gBaseLanguageModel|llm|LLMChain-HUM6g", + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 ", + "animated": false, + "selected": false + }, + { + "source": "ChatInput-PqtHe", + "target": "PromptTemplate-CweKz", + "sourceHandle": "ChatInput|ChatInput-PqtHe|str", + "targetHandle": "Document;BaseOutputParser;str|input|PromptTemplate-CweKz", + "id": "reactflow__edge-ChatInput-PqtHeChatInput|ChatInput-PqtHe|str-PromptTemplate-CweKzDocument;BaseOutputParser;str|input|PromptTemplate-CweKz", + "style": { + "stroke": "#555" + }, + "className": "stroke-gray-900 ", + "animated": false, + "selected": false + }, + { + "source": "LLMChain-HUM6g", + "sourceHandle": "LLMChain|LLMChain-HUM6g|LLMChain|Chain|function|Text", + "target": "ChatOutput-8SWFf", + "targetHandle": "Text|message|ChatOutput-8SWFf", + "style": { + "stroke": "#555" + }, + "className": "stroke-foreground stroke-connection", + "animated": true, + "id": "reactflow__edge-LLMChain-HUM6gLLMChain|LLMChain-HUM6g|LLMChain|Chain|function|Text-ChatOutput-8SWFfText|message|ChatOutput-8SWFf" + }, + { + "source": "LLMChain-HUM6g", + "sourceHandle": "LLMChain|LLMChain-HUM6g|LLMChain|Chain|function|Text", + "target": "Tool-jyI4N", + "targetHandle": "function|func|Tool-jyI4N", + "style": { + "stroke": "#555" + }, + "className": "stroke-foreground stroke-connection", + "animated": false, + "id": "reactflow__edge-LLMChain-HUM6gLLMChain|LLMChain-HUM6g|LLMChain|Chain|function|Text-Tool-jyI4Nfunction|func|Tool-jyI4N" + } + ], + "viewport": { + "x": -401.32668426335044, + "y": -129.59138346130635, + "zoom": 0.5073779796520557 + } + }, + "id": "cf923ccb-e14c-4754-96eb-a8a3b5bbe082", + "user_id": "c65bfea3-3eea-4e71-8fc4-106238eb0583" +} \ No newline at end of file diff --git a/tests/data/component.py b/tests/data/component.py index e801d2feb..5de63fcd5 100644 --- a/tests/data/component.py +++ b/tests/data/component.py @@ -1,6 +1,6 @@ import random -from langflow import CustomComponent +from langflow.custom import CustomComponent class TestComponent(CustomComponent): diff --git a/tests/data/component_with_templatefield.py b/tests/data/component_with_templatefield.py index 6b2ce011b..26d7e834b 100644 --- a/tests/data/component_with_templatefield.py +++ b/tests/data/component_with_templatefield.py @@ -1,6 +1,6 @@ import random -from langflow import CustomComponent +from langflow.custom import CustomComponent from langflow.field_typing import TemplateField diff --git a/tests/test_api_key.py b/tests/test_api_key.py index 7988793d4..31f484d60 100644 --- a/tests/test_api_key.py +++ b/tests/test_api_key.py @@ -1,4 +1,5 @@ import pytest + from langflow.services.database.models.api_key import ApiKeyCreate @@ -21,7 +22,6 @@ def test_get_api_keys(client, logged_in_headers, api_key): assert any("test-api-key" in api_key["name"] for api_key in data["api_keys"]) # assert all api keys in data["api_keys"] are masked assert all("**" in api_key["api_key"] for api_key in data["api_keys"]) - # Add more assertions as needed based on the expected data structure and content def test_create_api_key(client, logged_in_headers): diff --git a/tests/test_cache.py b/tests/test_cache.py index 8cfebe230..6d4911efd 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -39,7 +39,8 @@ def langchain_objects_are_equal(obj1, obj2): # Test build_graph -def test_build_graph(client, basic_data_graph): +@pytest.mark.asyncio +async def test_build_graph(client, basic_data_graph): graph = Graph.from_payload(basic_data_graph) assert graph is not None assert len(graph.vertices) == len(basic_data_graph["nodes"]) diff --git a/tests/test_cli.py b/tests/test_cli.py index ee95a271c..60595f48a 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -17,6 +17,7 @@ def default_settings(): def test_components_path(runner, client, default_settings): # Create a foldr in the tmp directory + temp_dir = Path(tempdir) # create a "components" folder temp_dir = temp_dir / "components" @@ -34,5 +35,4 @@ def test_components_path(runner, client, default_settings): def test_superuser(runner, client, session): result = runner.invoke(app, ["superuser"], input="admin\nadmin\n") assert result.exit_code == 0, result.stdout - assert "Superuser creation failed." not in result.output, result.output - assert "Superuser created successfully." in result.output, result.output + assert "Superuser created successfully." in result.stdout diff --git a/tests/test_custom_component.py b/tests/test_custom_component.py index 1b85c9198..725d35564 100644 --- a/tests/test_custom_component.py +++ b/tests/test_custom_component.py @@ -3,14 +3,21 @@ import types from uuid import uuid4 import pytest +from langchain_core.documents import Document + from langflow.interface.custom.base import CustomComponent -from langflow.interface.custom.code_parser.code_parser import CodeParser, CodeSyntaxError -from langflow.interface.custom.custom_component.component import Component, ComponentCodeNullError -from langflow.interface.custom.utils import build_custom_component_template +from langflow.interface.custom.code_parser.code_parser import ( + CodeParser, + CodeSyntaxError, +) +from langflow.interface.custom.custom_component.component import ( + Component, + ComponentCodeNullError, +) from langflow.services.database.models.flow import Flow, FlowCreate code_default = """ -from langflow import Prompt +from langflow.field_typing import Prompt from langflow.interface.custom.custom_component import CustomComponent from langchain.llms.base import BaseLLM @@ -67,16 +74,16 @@ def test_component_init(): """ Test the initialization of the Component class. """ - component = Component(code=code_default, _function_entrypoint_name="build") + component = Component(code=code_default, function_entrypoint_name="build") assert component.code == code_default - assert component._function_entrypoint_name == "build" + assert component.function_entrypoint_name == "build" def test_component_get_code_tree(): """ Test the get_code_tree method of the Component class. """ - component = Component(code=code_default, _function_entrypoint_name="build") + component = Component(code=code_default, function_entrypoint_name="build") tree = component.get_code_tree(component.code) assert "imports" in tree @@ -86,22 +93,11 @@ def test_component_code_null_error(): Test the get_function method raises the ComponentCodeNullError when the code is empty. """ - component = Component(code="", _function_entrypoint_name="") + component = Component(code="", function_entrypoint_name="") with pytest.raises(ComponentCodeNullError): component.get_function() -# TODO: Validate if we should remove this -# def test_component_function_entrypoint_name_null_error(): -# """ -# Test the get_function method raises the ComponentFunctionEntrypointNameNullError -# when the function_entrypoint_name is empty. -# """ -# component = Component(code=code_default, _function_entrypoint_name="") -# with pytest.raises(ComponentFunctionEntrypointNameNullError): -# component.get_function() - - def test_custom_component_init(): """ Test the initialization of the CustomComponent class. @@ -118,7 +114,7 @@ def test_custom_component_build_template_config(): Test the build_template_config property of the CustomComponent class. """ custom_component = CustomComponent(code=code_default, function_entrypoint_name="build") - config = custom_component.template_config + config = custom_component.build_template_config() assert isinstance(config, dict) @@ -127,7 +123,7 @@ def test_custom_component_get_function(): Test the get_function property of the CustomComponent class. """ custom_component = CustomComponent(code="def build(): pass", function_entrypoint_name="build") - my_function = custom_component.get_function + my_function = custom_component.get_function() assert isinstance(my_function, types.FunctionType) @@ -201,7 +197,7 @@ def test_component_get_function_valid(): Test the get_function method of the Component class with valid code and function_entrypoint_name. """ - component = Component(code="def build(): pass", _function_entrypoint_name="build") + component = Component(code="def build(): pass", function_entrypoint_name="build") my_function = component.get_function() assert callable(my_function) @@ -224,7 +220,6 @@ def test_custom_component_get_function_entrypoint_return_type(): Test the get_function_entrypoint_return_type property of the CustomComponent class. """ - from langchain_core.documents import Document custom_component = CustomComponent(code=code_default, function_entrypoint_name="build") return_type = custom_component.get_function_entrypoint_return_type @@ -309,7 +304,7 @@ def test_code_parser_parse_ann_assign(): stmt = ast.AnnAssign( target=ast.Name(id="x", ctx=ast.Store()), annotation=ast.Name(id="int", ctx=ast.Load()), - value=ast.Constant(n=1), + value=ast.Num(n=1), simple=1, ) result = parser.parse_ann_assign(stmt) @@ -359,11 +354,21 @@ def test_component_get_code_tree_syntax_error(): Test the get_code_tree method of the Component class raises the CodeSyntaxError when given incorrect syntax. """ - component = Component(code="import os as", _function_entrypoint_name="build") + component = Component(code="import os as", function_entrypoint_name="build") with pytest.raises(CodeSyntaxError): component.get_code_tree(component.code) +def test_custom_component_class_template_validation_no_code(): + """ + Test the _class_template_validation method of the CustomComponent class + raises the HTTPException when the code is None. + """ + custom_component = CustomComponent(code=None, function_entrypoint_name="build") + with pytest.raises(TypeError): + custom_component.get_function() + + def test_custom_component_get_code_tree_syntax_error(): """ Test the get_code_tree method of the CustomComponent class @@ -518,36 +523,3 @@ def test_build_config_field_value_keys(component): config = component.build_config() field_values = config["fields"].values() assert all("type" in value for value in field_values) - - -def test_create_and_validate_component_valid_code(test_component_code): - component = CustomComponent(code=test_component_code) - assert isinstance(component, CustomComponent) - - -def test_build_langchain_template_custom_component_valid_code(test_component_code): - component = CustomComponent(code=test_component_code) - frontend_node = build_custom_component_template(component) - assert isinstance(frontend_node, dict) - template = frontend_node["template"] - assert isinstance(template, dict) - assert "param" in template - param_options = template["param"]["options"] - # Now run it again with an update field - frontend_node = build_custom_component_template(component, update_field="param") - new_param_options = frontend_node["template"]["param"]["options"] - assert param_options != new_param_options - - -def test_build_langchain_template_custom_component_templatefield(test_component_with_templatefield_code): - component = CustomComponent(code=test_component_with_templatefield_code) - frontend_node = build_custom_component_template(component) - assert isinstance(frontend_node, dict) - template = frontend_node["template"] - assert isinstance(template, dict) - assert "param" in template - param_options = template["param"]["options"] - # Now run it again with an update field - frontend_node = build_custom_component_template(component, update_field="param") - new_param_options = frontend_node["template"]["param"]["options"] - assert param_options != new_param_options diff --git a/tests/test_custom_types.py b/tests/test_custom_types.py index ba54b7023..33c854ba3 100644 --- a/tests/test_custom_types.py +++ b/tests/test_custom_types.py @@ -1,7 +1,7 @@ # Test this: -from langflow.interface.importing.utils import get_function import pytest -from langflow.interface.tools.custom import PythonFunctionTool, PythonFunction +from langflow.interface.custom.utils import get_function +from langflow.interface.tools.custom import PythonFunction, PythonFunctionTool from langflow.utils import constants diff --git a/tests/test_data_components.py b/tests/test_data_components.py new file mode 100644 index 000000000..ca92ee190 --- /dev/null +++ b/tests/test_data_components.py @@ -0,0 +1,166 @@ +import os +from pathlib import Path +from unittest.mock import Mock, patch + +import httpx +import pytest +import respx +from httpx import Response + +from langflow.components import ( + data, +) # Adjust the import according to your project structure + + +@pytest.fixture +def api_request(): + # This fixture provides an instance of APIRequest for each test case + return data.APIRequest() + + +@pytest.mark.asyncio +@respx.mock +async def test_successful_get_request(api_request): + # Mocking a successful GET request + url = "https://example.com/api/test" + method = "GET" + mock_response = {"success": True} + respx.get(url).mock(return_value=Response(200, json=mock_response)) + + # Making the request + result = await api_request.make_request(client=httpx.AsyncClient(), method=method, url=url) + + # Assertions + assert result.data["status_code"] == 200 + assert result.data["result"] == mock_response + + +@pytest.mark.asyncio +@respx.mock +async def test_failed_request(api_request): + # Mocking a failed GET request + url = "https://example.com/api/test" + method = "GET" + respx.get(url).mock(return_value=Response(404)) + + # Making the request + result = await api_request.make_request(client=httpx.AsyncClient(), method=method, url=url) + + # Assertions + assert result.data["status_code"] == 404 + + +@pytest.mark.asyncio +@respx.mock +async def test_timeout(api_request): + # Mocking a timeout + url = "https://example.com/api/timeout" + method = "GET" + respx.get(url).mock(side_effect=httpx.TimeoutException(message="Timeout", request=None)) + + # Making the request + result = await api_request.make_request(client=httpx.AsyncClient(), method=method, url=url, timeout=1) + + # Assertions + assert result.data["status_code"] == 408 + assert result.data["error"] == "Request timed out" + + +@pytest.mark.asyncio +@respx.mock +async def test_build_with_multiple_urls(api_request): + # This test depends on having a working internet connection and accessible URLs + # It's better to mock these requests using respx or a similar library + + # Setup for multiple URLs + method = "GET" + urls = ["https://example.com/api/one", "https://example.com/api/two"] + # You would mock these requests similarly to the single request tests + for url in urls: + respx.get(url).mock(return_value=Response(200, json={"success": True})) + + # Do I have to mock the async client? + # + + # Execute the build method + results = await api_request.build(method=method, urls=urls) + + # Assertions + assert len(results) == len(urls) + + +@patch("langflow.components.data.Directory.parallel_load_records") +@patch("langflow.components.data.Directory.retrieve_file_paths") +@patch("langflow.components.data.DirectoryComponent.resolve_path") +def test_directory_component_build_with_multithreading( + mock_resolve_path, mock_retrieve_file_paths, mock_parallel_load_records +): + # Arrange + directory_component = data.DirectoryComponent() + path = os.path.dirname(os.path.abspath(__file__)) + depth = 1 + max_concurrency = 2 + load_hidden = False + recursive = True + silent_errors = False + use_multithreading = True + + mock_resolve_path.return_value = path + mock_retrieve_file_paths.return_value = [ + os.path.join(path, file) for file in os.listdir(path) if file.endswith(".py") + ] + mock_parallel_load_records.return_value = [Mock()] + + # Act + directory_component.build( + path, + depth, + max_concurrency, + load_hidden, + recursive, + silent_errors, + use_multithreading, + ) + + # Assert + mock_resolve_path.assert_called_once_with(path) + mock_retrieve_file_paths.assert_called_once_with(path, load_hidden, recursive, depth) + mock_parallel_load_records.assert_called_once_with( + mock_retrieve_file_paths.return_value, silent_errors, max_concurrency + ) + + +def test_directory_without_mocks(): + directory_component = data.DirectoryComponent() + from langflow.initial_setup import setup + from langflow.initial_setup.setup import load_starter_projects + + projects = load_starter_projects() + # the setup module has a folder where the projects are stored + # the contents of that folder are in the projects variable + # the directory component can be used to load the projects + # and we can validate if the contents are the same as the projects variable + setup_path = Path(setup.__file__).parent / "starter_projects" + results = directory_component.build(str(setup_path), use_multithreading=False) + assert len(results) == len(projects) + # each result is a Record that contains the content attribute + # each are dict that are exactly the same as one of the projects + for result in results: + assert result.text in projects + + # in ../docs/docs/components there are many mdx files + # check if the directory component can load them + # just check if the number of results is the same as the number of files + docs_path = Path(__file__).parent.parent / "docs" / "docs" / "components" + results = directory_component.build(str(docs_path), use_multithreading=False) + docs_files = list(docs_path.glob("*.mdx")) + assert len(results) == len(docs_files) + + +def test_url_component(): + url_component = data.URLComponent() + # the url component can be used to load the contents of a website + records = url_component.build(["https://langflow.org"]) + assert all(record.data for record in records) + assert all(record.text for record in records) + assert all(record.source for record in records) diff --git a/tests/test_database.py b/tests/test_database.py index f52252856..554a1fc4f 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -3,12 +3,14 @@ from uuid import UUID, uuid4 import orjson import pytest from fastapi.testclient import TestClient +from sqlmodel import Session + from langflow.api.v1.schemas import FlowListCreate +from langflow.initial_setup.setup import load_starter_projects from langflow.services.database.models.base import orjson_dumps from langflow.services.database.models.flow import Flow, FlowCreate, FlowUpdate from langflow.services.database.utils import session_getter from langflow.services.deps import get_db_service -from sqlmodel import Session @pytest.fixture(scope="module") @@ -29,13 +31,13 @@ def test_create_flow(client: TestClient, json_flow: str, active_user, logged_in_ flow = orjson.loads(json_flow) data = flow["data"] flow = FlowCreate(name="Test Flow", description="description", data=data) - response = client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + response = client.post("api/v1/flows/", json=flow.dict(), headers=logged_in_headers) assert response.status_code == 201 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data # flow is optional so we can create a flow without a flow - flow = FlowCreate(name="Test Flow", description="description") - response = client.post("api/v1/flows/", json=flow.model_dump(exclude_unset=True), headers=logged_in_headers) + flow = FlowCreate(name="Test Flow") + response = client.post("api/v1/flows/", json=flow.dict(exclude_unset=True), headers=logged_in_headers) assert response.status_code == 201 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data @@ -45,13 +47,13 @@ def test_read_flows(client: TestClient, json_flow: str, active_user, logged_in_h flow_data = orjson.loads(json_flow) data = flow_data["data"] flow = FlowCreate(name="Test Flow", description="description", data=data) - response = client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + response = client.post("api/v1/flows/", json=flow.dict(), headers=logged_in_headers) assert response.status_code == 201 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data flow = FlowCreate(name="Test Flow", description="description", data=data) - response = client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + response = client.post("api/v1/flows/", json=flow.dict(), headers=logged_in_headers) assert response.status_code == 201 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data @@ -65,7 +67,7 @@ def test_read_flow(client: TestClient, json_flow: str, active_user, logged_in_he flow = orjson.loads(json_flow) data = flow["data"] flow = FlowCreate(name="Test Flow", description="description", data=data) - response = client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + response = client.post("api/v1/flows/", json=flow.dict(), headers=logged_in_headers) flow_id = response.json()["id"] # flow_id should be a UUID but is a string # turn it into a UUID flow_id = UUID(flow_id) @@ -81,7 +83,7 @@ def test_update_flow(client: TestClient, json_flow: str, active_user, logged_in_ data = flow["data"] flow = FlowCreate(name="Test Flow", description="description", data=data) - response = client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + response = client.post("api/v1/flows/", json=flow.dict(), headers=logged_in_headers) flow_id = response.json()["id"] updated_flow = FlowUpdate( @@ -89,7 +91,7 @@ def test_update_flow(client: TestClient, json_flow: str, active_user, logged_in_ description="updated description", data=data, ) - response = client.patch(f"api/v1/flows/{flow_id}", json=updated_flow.model_dump(), headers=logged_in_headers) + response = client.patch(f"api/v1/flows/{flow_id}", json=updated_flow.dict(), headers=logged_in_headers) assert response.status_code == 200 assert response.json()["name"] == updated_flow.name @@ -101,7 +103,7 @@ def test_delete_flow(client: TestClient, json_flow: str, active_user, logged_in_ flow = orjson.loads(json_flow) data = flow["data"] flow = FlowCreate(name="Test Flow", description="description", data=data) - response = client.post("api/v1/flows/", json=flow.model_dump(), headers=logged_in_headers) + response = client.post("api/v1/flows/", json=flow.dict(), headers=logged_in_headers) flow_id = response.json()["id"] response = client.delete(f"api/v1/flows/{flow_id}", headers=logged_in_headers) assert response.status_code == 200 @@ -119,7 +121,7 @@ def test_create_flows(client: TestClient, session: Session, json_flow: str, logg ] ) # Make request to endpoint - response = client.post("api/v1/flows/batch/", json=flow_list.model_dump(), headers=logged_in_headers) + response = client.post("api/v1/flows/batch/", json=flow_list.dict(), headers=logged_in_headers) # Check response status code assert response.status_code == 201 # Check response data @@ -143,7 +145,7 @@ def test_upload_file(client: TestClient, session: Session, json_flow: str, logge FlowCreate(name="Flow 2", description="description", data=data), ] ) - file_contents = orjson_dumps(flow_list.model_dump()) + file_contents = orjson_dumps(flow_list.dict()) response = client.post( "api/v1/flows/upload/", files={"file": ("examples.json", file_contents, "application/json")}, @@ -191,7 +193,9 @@ def test_download_file( assert response.status_code == 200, response.json() # Check response data response_data = response.json()["flows"] - assert len(response_data) == 2, response_data + starter_projects = load_starter_projects() + number_of_projects = len(starter_projects) + len(flow_list.flows) + assert len(response_data) == number_of_projects, response_data assert response_data[0]["name"] == "Flow 1" assert response_data[0]["description"] == "description" assert response_data[0]["data"] == data @@ -216,11 +220,11 @@ def test_update_flow_idempotency(client: TestClient, json_flow: str, active_user flow_data = orjson.loads(json_flow) data = flow_data["data"] flow_data = FlowCreate(name="Test Flow", description="description", data=data) - response = client.post("api/v1/flows/", json=flow_data.model_dump(), headers=logged_in_headers) + response = client.post("api/v1/flows/", json=flow_data.dict(), headers=logged_in_headers) flow_id = response.json()["id"] updated_flow = FlowCreate(name="Updated Flow", description="description", data=data) - response1 = client.put(f"api/v1/flows/{flow_id}", json=updated_flow.model_dump(), headers=logged_in_headers) - response2 = client.put(f"api/v1/flows/{flow_id}", json=updated_flow.model_dump(), headers=logged_in_headers) + response1 = client.put(f"api/v1/flows/{flow_id}", json=updated_flow.dict(), headers=logged_in_headers) + response2 = client.put(f"api/v1/flows/{flow_id}", json=updated_flow.dict(), headers=logged_in_headers) assert response1.json() == response2.json() @@ -233,7 +237,7 @@ def test_update_nonexistent_flow(client: TestClient, json_flow: str, active_user description="description", data=data, ) - response = client.patch(f"api/v1/flows/{uuid}", json=updated_flow.model_dump(), headers=logged_in_headers) + response = client.patch(f"api/v1/flows/{uuid}", json=updated_flow.dict(), headers=logged_in_headers) assert response.status_code == 404 @@ -243,7 +247,8 @@ def test_delete_nonexistent_flow(client: TestClient, active_user, logged_in_head assert response.status_code == 404 -def test_read_empty_flows(client: TestClient, active_user, logged_in_headers): +def test_read_only_starter_projects(client: TestClient, active_user, logged_in_headers): response = client.get("api/v1/flows/", headers=logged_in_headers) + starter_projects = load_starter_projects() assert response.status_code == 200 - assert len(response.json()) == 0 + assert len(response.json()) == len(starter_projects) diff --git a/tests/test_embeddings_template.py b/tests/test_embeddings_template.py deleted file mode 100644 index 7334c2abd..000000000 --- a/tests/test_embeddings_template.py +++ /dev/null @@ -1,59 +0,0 @@ -from langflow.template.field.base import TemplateField -from langflow.template.frontend_node.embeddings import EmbeddingFrontendNode - - -def test_format_jina_fields(): - field = TemplateField(name="jina") - EmbeddingFrontendNode.format_jina_fields(field) - assert field.show is True - assert field.advanced is False - - field = TemplateField(name="auth") - EmbeddingFrontendNode.format_jina_fields(field) - assert field.password is True - assert field.show is True - assert field.advanced is False - - field = TemplateField(name="jina_api_url") - EmbeddingFrontendNode.format_jina_fields(field) - assert field.show is True - assert field.advanced is True - assert field.display_name == "Jina API URL" - assert field.password is False - - -def test_format_openai_fields(): - field = TemplateField(name="openai") - EmbeddingFrontendNode.format_openai_fields(field) - assert field.show is True - assert field.advanced is True - assert field.display_name == "OpenAI" - - field = TemplateField(name="openai_api_key") - EmbeddingFrontendNode.format_openai_fields(field) - assert field.password is True - assert field.show is True - assert field.advanced is False - - -def test_format_field(): - field = TemplateField(name="headers") - EmbeddingFrontendNode.format_field(field) - assert field.show is False - - field = TemplateField(name="jina") - EmbeddingFrontendNode.format_field(field) - assert field.advanced is False - assert field.show is True - - field = TemplateField(name="openai") - EmbeddingFrontendNode.format_field(field) - assert field.advanced is True - assert field.show is True - assert field.display_name == "OpenAI" - - field = TemplateField(name="test_field", required=True) - EmbeddingFrontendNode.format_field(field) - assert field.advanced is False - assert field.show is True - assert field.required is True diff --git a/tests/test_endpoints.py b/tests/test_endpoints.py index 2d8f41597..1430682ae 100644 --- a/tests/test_endpoints.py +++ b/tests/test_endpoints.py @@ -1,16 +1,11 @@ import time -import uuid -from collections import namedtuple +from uuid import uuid4 import pytest +from fastapi import status from fastapi.testclient import TestClient - -from langflow.interface.tools.constants import CUSTOM_TOOLS -from langflow.processing.process import Result -from langflow.services.auth.utils import get_password_hash -from langflow.services.database.models.api_key.model import ApiKey -from langflow.services.database.utils import session_getter -from langflow.services.deps import get_db_service, get_settings_service +from langflow.interface.custom.directory_reader.directory_reader import DirectoryReader +from langflow.services.deps import get_settings_service from langflow.template.frontend_node.chains import TimeTravelGuideChainNode @@ -25,7 +20,7 @@ def run_post(client, flow_id, headers, post_data): # Helper function to poll task status -def poll_task_status(client, headers, href, max_attempts=20, sleep_time=2): +def poll_task_status(client, headers, href, max_attempts=20, sleep_time=1): for _ in range(max_attempts): task_status_response = client.get( href, @@ -114,184 +109,171 @@ PROMPT_REQUEST = { } -@pytest.fixture -def created_api_key(active_user): - hashed = get_password_hash("random_key") - api_key = ApiKey( - name="test_api_key", - user_id=active_user.id, - api_key="random_key", - hashed_api_key=hashed, - ) - db_manager = get_db_service() - with session_getter(db_manager) as session: - if existing_api_key := session.query(ApiKey).filter(ApiKey.api_key == api_key.api_key).first(): - return existing_api_key - session.add(api_key) - session.commit() - session.refresh(api_key) - return api_key +# def test_process_flow_invalid_api_key(client, flow, monkeypatch): +# # Mock de process_graph_cached +# from langflow.api.v1 import endpoints +# from langflow.services.database.models.api_key import crud + +# settings_service = get_settings_service() +# settings_service.auth_settings.AUTO_LOGIN = False + +# async def mock_process_graph_cached(*args, **kwargs): +# return Result(result={}, session_id="session_id_mock") + +# def mock_update_total_uses(*args, **kwargs): +# return created_api_key + +# monkeypatch.setattr(endpoints, "process_graph_cached", mock_process_graph_cached) +# monkeypatch.setattr(crud, "update_total_uses", mock_update_total_uses) + +# headers = {"x-api-key": "invalid_api_key"} + +# post_data = { +# "inputs": {"key": "value"}, +# "tweaks": None, +# "clear_cache": False, +# "session_id": None, +# } + +# response = client.post(f"api/v1/process/{flow.id}", headers=headers, json=post_data) + +# assert response.status_code == 403 +# assert response.json() == {"detail": "Invalid or missing API key"} -def test_process_flow_invalid_api_key(client, flow, monkeypatch): - # Mock de process_graph_cached - from langflow.api.v1 import endpoints - from langflow.services.database.models.api_key import crud +# def test_process_flow_invalid_id(client, monkeypatch, created_api_key): +# async def mock_process_graph_cached(*args, **kwargs): +# return Result(result={}, session_id="session_id_mock") - settings_service = get_settings_service() - settings_service.auth_settings.AUTO_LOGIN = False +# from langflow.api.v1 import endpoints - async def mock_process_graph_cached(*args, **kwargs): - return Result(result={}, session_id="session_id_mock") +# monkeypatch.setattr(endpoints, "process_graph_cached", mock_process_graph_cached) - def mock_update_total_uses(*args, **kwargs): - return created_api_key +# api_key = created_api_key.api_key +# headers = {"x-api-key": api_key} - monkeypatch.setattr(endpoints, "process_graph_cached", mock_process_graph_cached) - monkeypatch.setattr(crud, "update_total_uses", mock_update_total_uses) +# post_data = { +# "inputs": {"key": "value"}, +# "tweaks": None, +# "clear_cache": False, +# "session_id": None, +# } - headers = {"x-api-key": "invalid_api_key"} +# invalid_id = uuid.uuid4() +# response = client.post(f"api/v1/process/{invalid_id}", headers=headers, json=post_data) - post_data = { - "inputs": {"key": "value"}, - "tweaks": None, - "clear_cache": False, - "session_id": None, - } - - response = client.post(f"api/v1/process/{flow.id}", headers=headers, json=post_data) - - assert response.status_code == 403 - assert response.json() == {"detail": "Invalid or missing API key"} +# assert response.status_code == 404 +# assert f"Flow {invalid_id} not found" in response.json()["detail"] -def test_process_flow_invalid_id(client, monkeypatch, created_api_key): - async def mock_process_graph_cached(*args, **kwargs): - return Result(result={}, session_id="session_id_mock") +# def test_process_flow_without_autologin(client, flow, monkeypatch, created_api_key): +# # Mock de process_graph_cached +# from langflow.api.v1 import endpoints +# from langflow.services.database.models.api_key import crud - from langflow.api.v1 import endpoints +# settings_service = get_settings_service() +# settings_service.auth_settings.AUTO_LOGIN = False - monkeypatch.setattr(endpoints, "process_graph_cached", mock_process_graph_cached) +# async def mock_process_graph_cached(*args, **kwargs): +# return Result(result={}, session_id="session_id_mock") - api_key = created_api_key.api_key - headers = {"x-api-key": api_key} +# def mock_process_graph_cached_task(*args, **kwargs): +# return Result(result={}, session_id="session_id_mock") - post_data = { - "inputs": {"key": "value"}, - "tweaks": None, - "clear_cache": False, - "session_id": None, - } +# # The task function is ran like this: +# # if not self.use_celery: +# # return None, await task_func(*args, **kwargs) +# # if not hasattr(task_func, "apply"): +# # raise ValueError(f"Task function {task_func} does not have an apply method") +# # task = task_func.apply(args=args, kwargs=kwargs) +# # result = task.get() +# # return task.id, result +# # So we need to mock the task function to return a task object +# # and then mock the task object to return a result +# # maybe a named tuple would be better here +# task = namedtuple("task", ["id", "get"]) +# mock_process_graph_cached_task.apply = lambda *args, **kwargs: task( +# id="task_id_mock", get=lambda: Result(result={}, session_id="session_id_mock") +# ) - invalid_id = uuid.uuid4() - response = client.post(f"api/v1/process/{invalid_id}", headers=headers, json=post_data) +# def mock_update_total_uses(*args, **kwargs): +# return created_api_key - assert response.status_code == 404 - assert f"Flow {invalid_id} not found" in response.json()["detail"] +# monkeypatch.setattr(endpoints, "process_graph_cached", mock_process_graph_cached) +# monkeypatch.setattr(crud, "update_total_uses", mock_update_total_uses) +# monkeypatch.setattr(endpoints, "process_graph_cached_task", mock_process_graph_cached_task) + +# api_key = created_api_key.api_key +# headers = {"x-api-key": api_key} + +# # Dummy POST data +# post_data = { +# "inputs": {"input": "value"}, +# "tweaks": None, +# "clear_cache": False, +# "session_id": None, +# } + +# # Make the request to the FastAPI TestClient + +# response = client.post(f"api/v1/process/{flow.id}", headers=headers, json=post_data) + +# # Check the response +# assert response.status_code == 200, response.json() +# assert response.json()["result"] == {}, response.json() +# assert response.json()["session_id"] == "session_id_mock", response.json() -def test_process_flow_without_autologin(client, flow, monkeypatch, created_api_key): - # Mock de process_graph_cached - from langflow.api.v1 import endpoints - from langflow.services.database.models.api_key import crud +# def test_process_flow_fails_autologin_off(client, flow, monkeypatch): +# # Mock de process_graph_cached +# from langflow.api.v1 import endpoints +# from langflow.services.database.models.api_key import crud - settings_service = get_settings_service() - settings_service.auth_settings.AUTO_LOGIN = False +# settings_service = get_settings_service() +# settings_service.auth_settings.AUTO_LOGIN = False - async def mock_process_graph_cached(*args, **kwargs): - return Result(result={}, session_id="session_id_mock") +# async def mock_process_graph_cached(*args, **kwargs): +# return Result(result={}, session_id="session_id_mock") - def mock_process_graph_cached_task(*args, **kwargs): - return Result(result={}, session_id="session_id_mock") +# async def mock_update_total_uses(*args, **kwargs): +# return created_api_key - # The task function is ran like this: - # if not self.use_celery: - # return None, await task_func(*args, **kwargs) - # if not hasattr(task_func, "apply"): - # raise ValueError(f"Task function {task_func} does not have an apply method") - # task = task_func.apply(args=args, kwargs=kwargs) - # result = task.get() - # return task.id, result - # So we need to mock the task function to return a task object - # and then mock the task object to return a result - # maybe a named tuple would be better here - task = namedtuple("task", ["id", "get"]) - mock_process_graph_cached_task.apply = lambda *args, **kwargs: task( - id="task_id_mock", get=lambda: Result(result={}, session_id="session_id_mock") - ) +# monkeypatch.setattr(endpoints, "process_graph_cached", mock_process_graph_cached) +# monkeypatch.setattr(crud, "update_total_uses", mock_update_total_uses) - def mock_update_total_uses(*args, **kwargs): - return created_api_key +# headers = {"x-api-key": "api_key"} - monkeypatch.setattr(endpoints, "process_graph_cached", mock_process_graph_cached) - monkeypatch.setattr(crud, "update_total_uses", mock_update_total_uses) - monkeypatch.setattr(endpoints, "process_graph_cached_task", mock_process_graph_cached_task) +# # Dummy POST data +# post_data = { +# "inputs": {"key": "value"}, +# "tweaks": None, +# "clear_cache": False, +# "session_id": None, +# } - api_key = created_api_key.api_key - headers = {"x-api-key": api_key} +# # Make the request to the FastAPI TestClient - # Dummy POST data - post_data = { - "inputs": {"input": "value"}, - "tweaks": None, - "clear_cache": False, - "session_id": None, - } +# response = client.post(f"api/v1/process/{flow.id}", headers=headers, json=post_data) - # Make the request to the FastAPI TestClient - - response = client.post(f"api/v1/process/{flow.id}", headers=headers, json=post_data) - - # Check the response - assert response.status_code == 200, response.json() - assert response.json()["result"] == {}, response.json() - assert response.json()["session_id"] == "session_id_mock", response.json() - - -def test_process_flow_fails_autologin_off(client, flow, monkeypatch): - # Mock de process_graph_cached - from langflow.api.v1 import endpoints - from langflow.services.database.models.api_key import crud - - settings_service = get_settings_service() - settings_service.auth_settings.AUTO_LOGIN = False - - async def mock_process_graph_cached(*args, **kwargs): - return Result(result={}, session_id="session_id_mock") - - async def mock_update_total_uses(*args, **kwargs): - return created_api_key - - monkeypatch.setattr(endpoints, "process_graph_cached", mock_process_graph_cached) - monkeypatch.setattr(crud, "update_total_uses", mock_update_total_uses) - - headers = {"x-api-key": "api_key"} - - # Dummy POST data - post_data = { - "inputs": {"key": "value"}, - "tweaks": None, - "clear_cache": False, - "session_id": None, - } - - # Make the request to the FastAPI TestClient - - response = client.post(f"api/v1/process/{flow.id}", headers=headers, json=post_data) - - # Check the response - assert response.status_code == 403, response.json() - assert response.json() == {"detail": "Invalid or missing API key"} +# # Check the response +# assert response.status_code == 403, response.json() +# assert response.json() == {"detail": "Invalid or missing API key"} def test_get_all(client: TestClient, logged_in_headers): response = client.get("api/v1/all", headers=logged_in_headers) assert response.status_code == 200 + settings = get_settings_service().settings + dir_reader = DirectoryReader(settings.COMPONENTS_PATH[0]) + files = dir_reader.get_files() + # json_response is a dict of dicts + all_names = [component_name for _, components in response.json().items() for component_name in components] json_response = response.json() # We need to test the custom nodes - assert "PromptTemplate" in json_response["prompts"] - # All CUSTOM_TOOLS(dict) should be in the response - assert all(tool in json_response["tools"] for tool in CUSTOM_TOOLS.keys()) + assert len(all_names) > len(files) + assert "ChatInput" in json_response["inputs"] + assert "Prompt" in json_response["inputs"] + assert "ChatOutput" in json_response["outputs"] def test_post_validate_code(client: TestClient): @@ -409,173 +391,345 @@ def test_various_prompts(client, prompt, expected_input_variables): assert response.json()["input_variables"] == expected_input_variables -def test_basic_chat_in_process(client, added_flow, created_api_key): - # Run the /api/v1/process/{flow_id} endpoint +def test_get_vertices_flow_not_found(client, logged_in_headers): + response = client.get("/api/v1/build/nonexistent_id/vertices", headers=logged_in_headers) + assert response.status_code == 500 # Or whatever status code you've set for invalid ID + + +def test_get_vertices(client, added_flow_with_prompt_and_history, logged_in_headers): + flow_id = added_flow_with_prompt_and_history["id"] + response = client.get(f"/api/v1/build/{flow_id}/vertices", headers=logged_in_headers) + assert response.status_code == 200 + assert "ids" in response.json() + # The response should contain the list in this order + # ['ConversationBufferMemory-Lu2Nb', 'PromptTemplate-5Q0W8', 'ChatOpenAI-vy7fV', 'LLMChain-UjBh1'] + # The important part is before the - (ConversationBufferMemory, PromptTemplate, ChatOpenAI, LLMChain) + ids = [_id.split("-")[0] for _id in response.json()["ids"]] + assert ids == [ + "ChatOpenAI", + "PromptTemplate", + "ConversationBufferMemory", + ] + + +def test_build_vertex_invalid_flow_id(client, logged_in_headers): + response = client.post("/api/v1/build/nonexistent_id/vertices/vertex_id", headers=logged_in_headers) + assert response.status_code == 500 + + +def test_build_vertex_invalid_vertex_id(client, added_flow_with_prompt_and_history, logged_in_headers): + flow_id = added_flow_with_prompt_and_history["id"] + response = client.post(f"/api/v1/build/{flow_id}/vertices/invalid_vertex_id", headers=logged_in_headers) + assert response.status_code == 500 + + +def test_successful_run_no_payload(client, starter_project, created_api_key): headers = {"x-api-key": created_api_key.api_key} - post_data = {"inputs": {"text": "Hi, My name is Gabriel"}} - response = client.post( - f"api/v1/process/{added_flow.get('id')}", - headers=headers, - json=post_data, - ) - assert response.status_code == 200, response.json() - # Check the response - assert "Gabriel" in response.json()["result"]["text"] - # session_id should be returned - assert "session_id" in response.json() - assert response.json()["session_id"] is not None - # New request with the same session_id - # asking "What is my name?" should return "Gabriel" - post_data = { - "inputs": {"text": "What is my name?"}, - "session_id": response.json()["session_id"], + flow_id = starter_project["id"] + response = client.post(f"/api/v1/run/{flow_id}", headers=headers) + assert response.status_code == status.HTTP_200_OK, response.text + # Add more assertions here to validate the response content + json_response = response.json() + assert "session_id" in json_response + assert "outputs" in json_response + outer_outputs = json_response["outputs"] + assert len(outer_outputs) == 1 + outputs_dict = outer_outputs[0] + assert len(outputs_dict) == 2 + assert "inputs" in outputs_dict + assert "outputs" in outputs_dict + assert outputs_dict.get("inputs") == {"input_value": ""} + assert isinstance(outputs_dict.get("outputs"), list) + assert len(outputs_dict.get("outputs")) == 1 + ids = [output.get("component_id") for output in outputs_dict.get("outputs")] + assert all(["ChatOutput" in _id for _id in ids]) + display_names = [output.get("component_display_name") for output in outputs_dict.get("outputs")] + assert all([name in display_names for name in ["Chat Output"]]) + inner_results = [output.get("results").get("result") for output in outputs_dict.get("outputs")] + + assert all([len(result) > 0 for result in inner_results]), inner_results + + +def test_successful_run_with_output_type_text(client, starter_project, created_api_key): + headers = {"x-api-key": created_api_key.api_key} + flow_id = starter_project["id"] + payload = { + "output_type": "text", } - response = client.post( - f"api/v1/process/{added_flow.get('id')}", - headers=headers, - json=post_data, - ) - assert response.status_code == 200, response.json() - assert "Gabriel" in response.json()["result"]["text"] + response = client.post(f"/api/v1/run/{flow_id}", headers=headers, json=payload) + assert response.status_code == status.HTTP_200_OK, response.text + # Add more assertions here to validate the response content + json_response = response.json() + assert "session_id" in json_response + assert "outputs" in json_response + outer_outputs = json_response["outputs"] + assert len(outer_outputs) == 1 + outputs_dict = outer_outputs[0] + assert len(outputs_dict) == 2 + assert "inputs" in outputs_dict + assert "outputs" in outputs_dict + assert outputs_dict.get("inputs") == {"input_value": ""} + assert isinstance(outputs_dict.get("outputs"), list) + assert len(outputs_dict.get("outputs")) == 1 + ids = [output.get("component_id") for output in outputs_dict.get("outputs")] + assert all(["TextOutput" in _id for _id in ids]), ids + display_names = [output.get("component_display_name") for output in outputs_dict.get("outputs")] + assert all([name in display_names for name in ["Prompt Output"]]), display_names + inner_results = [output.get("results").get("result") for output in outputs_dict.get("outputs")] + expected_result = "Langflow" + assert all([expected_result in result for result in inner_results]), inner_results -def test_basic_chat_different_session_ids(client, added_flow, created_api_key): - # Run the /api/v1/process/{flow_id} endpoint +def test_successful_run_with_output_type_any(client, starter_project, created_api_key): + # This one should have both the ChatOutput and TextOutput components headers = {"x-api-key": created_api_key.api_key} - post_data = {"inputs": {"text": "Hi, My name is Gabriel"}} - response = client.post( - f"api/v1/process/{added_flow.get('id')}", - headers=headers, - json=post_data, - ) - assert response.status_code == 200, response.json() - # Check the response - assert "Gabriel" in response.json()["result"]["text"] - # session_id should be returned - assert "session_id" in response.json() - assert response.json()["session_id"] is not None - session_id1 = response.json()["session_id"] - # New request with a different session_id - # asking "What is my name?" should return "Gabriel" - post_data = { - "inputs": {"text": "What is my name?"}, + flow_id = starter_project["id"] + payload = { + "output_type": "any", } - response = client.post( - f"api/v1/process/{added_flow.get('id')}", - headers=headers, - json=post_data, - ) - assert response.status_code == 200, response.json() - assert "Gabriel" not in response.json()["result"]["text"] - assert session_id1 != response.json()["session_id"] + response = client.post(f"/api/v1/run/{flow_id}", headers=headers, json=payload) + assert response.status_code == status.HTTP_200_OK, response.text + # Add more assertions here to validate the response content + json_response = response.json() + assert "session_id" in json_response + assert "outputs" in json_response + outer_outputs = json_response["outputs"] + assert len(outer_outputs) == 1 + outputs_dict = outer_outputs[0] + assert len(outputs_dict) == 2 + assert "inputs" in outputs_dict + assert "outputs" in outputs_dict + assert outputs_dict.get("inputs") == {"input_value": ""} + assert isinstance(outputs_dict.get("outputs"), list) + assert len(outputs_dict.get("outputs")) == 2 + ids = [output.get("component_id") for output in outputs_dict.get("outputs")] + assert all(["ChatOutput" in _id or "TextOutput" in _id for _id in ids]), ids + display_names = [output.get("component_display_name") for output in outputs_dict.get("outputs")] + assert all([name in display_names for name in ["Chat Output", "Prompt Output"]]), display_names + inner_results = [output.get("results").get("result") for output in outputs_dict.get("outputs")] + expected_result = "Langflow" + assert all([expected_result in result for result in inner_results]), inner_results -def test_basic_chat_with_two_session_ids_and_names(client, added_flow, created_api_key): +def test_successful_run_with_output_type_debug(client, starter_project, created_api_key): + # This one should return outputs for all components + # Let's just check the amount of outputs(there hsould be 7) headers = {"x-api-key": created_api_key.api_key} - flow_id = added_flow.get("id") - names = ["Gabriel", "John"] - session_ids = [] - - for name in names: - post_data = {"inputs": {"text": f"Hi, My name is {name}"}} - response_json = run_post(client, flow_id, headers, post_data) - - assert name in response_json["result"]["text"] - assert "session_id" in response_json - assert response_json["session_id"] is not None - - session_ids.append(response_json["session_id"]) - - for i, name in enumerate(names): - post_data = { - "inputs": {"text": "What is my name?"}, - "session_id": session_ids[i], - } - response_json = run_post(client, flow_id, headers, post_data) - - assert name in response_json["result"]["text"] + flow_id = starter_project["id"] + payload = { + "output_type": "debug", + } + response = client.post(f"/api/v1/run/{flow_id}", headers=headers, json=payload) + assert response.status_code == status.HTTP_200_OK, response.text + # Add more assertions here to validate the response content + json_response = response.json() + assert "session_id" in json_response + assert "outputs" in json_response + outer_outputs = json_response["outputs"] + assert len(outer_outputs) == 1 + outputs_dict = outer_outputs[0] + assert len(outputs_dict) == 2 + assert "inputs" in outputs_dict + assert "outputs" in outputs_dict + assert outputs_dict.get("inputs") == {"input_value": ""} + assert isinstance(outputs_dict.get("outputs"), list) + assert len(outputs_dict.get("outputs")) == 7 -@pytest.mark.async_test -def test_vector_store_in_process(distributed_client, added_vector_store, created_api_key): - # Run the /api/v1/process/{flow_id} endpoint +# To test input_type wel'l just set it with output_type debug and check if the value is correct +def test_successful_run_with_input_type_text(client, starter_project, created_api_key): headers = {"x-api-key": created_api_key.api_key} - post_data = {"inputs": {"input": "What is Langflow?"}} - response = distributed_client.post( - f"api/v1/process/{added_vector_store.get('id')}", - headers=headers, - json=post_data, - ) - assert response.status_code == 200, response.json() - # Check the response - assert "Langflow" in response.json()["result"]["output"] - # session_id should be returned - assert "session_id" in response.json() - assert response.json()["session_id"] is not None + flow_id = starter_project["id"] + payload = { + "input_type": "text", + "output_type": "debug", + "input_value": "value1", + } + response = client.post(f"/api/v1/run/{flow_id}", headers=headers, json=payload) + assert response.status_code == status.HTTP_200_OK, response.text + # Add more assertions here to validate the response content + json_response = response.json() + assert "session_id" in json_response + assert "outputs" in json_response + outer_outputs = json_response["outputs"] + assert len(outer_outputs) == 1 + outputs_dict = outer_outputs[0] + assert len(outputs_dict) == 2 + assert "inputs" in outputs_dict + assert "outputs" in outputs_dict + assert outputs_dict.get("inputs") == {"input_value": "value1"} + assert isinstance(outputs_dict.get("outputs"), list) + assert len(outputs_dict.get("outputs")) == 7 + # Now we get all components that contain TextInput in the component_id + text_input_outputs = [output for output in outputs_dict.get("outputs") if "TextInput" in output.get("component_id")] + assert len(text_input_outputs) == 2 + # Now we check if the input_value is correct + assert all([output.get("results").get("result") == "value1" for output in text_input_outputs]), text_input_outputs -# Test function without loop -@pytest.mark.async_test -def test_async_task_processing(distributed_client, added_flow, created_api_key): +# Now do the same for "chat" input type +def test_successful_run_with_input_type_chat(client, starter_project, created_api_key): headers = {"x-api-key": created_api_key.api_key} - post_data = {"inputs": {"text": "Hi, My name is Gabriel"}} - - # Run the /api/v1/process/{flow_id} endpoint with sync=False - response = distributed_client.post( - f"api/v1/process/{added_flow.get('id')}", - headers=headers, - json={**post_data, "sync": False}, - ) - assert response.status_code == 200, response.json() - - # Extract the task ID from the response - task = response.json().get("task") - task_id = task.get("id") - task_href = task.get("href") - assert task_id is not None - assert task_href is not None - assert task_href == f"api/v1/task/{task_id}" - - # Polling the task status using the helper function - task_status_json = poll_task_status(distributed_client, headers, task_href) - assert task_status_json is not None, "Task did not complete in time" - - # Validate that the task completed successfully and the result is as expected - assert "result" in task_status_json, task_status_json - assert "text" in task_status_json["result"], task_status_json["result"] - assert "Gabriel" in task_status_json["result"]["text"], task_status_json["result"] + flow_id = starter_project["id"] + payload = { + "input_type": "chat", + "output_type": "debug", + "input_value": "value1", + } + response = client.post(f"/api/v1/run/{flow_id}", headers=headers, json=payload) + assert response.status_code == status.HTTP_200_OK, response.text + # Add more assertions here to validate the response content + json_response = response.json() + assert "session_id" in json_response + assert "outputs" in json_response + outer_outputs = json_response["outputs"] + assert len(outer_outputs) == 1 + outputs_dict = outer_outputs[0] + assert len(outputs_dict) == 2 + assert "inputs" in outputs_dict + assert "outputs" in outputs_dict + assert outputs_dict.get("inputs") == {"input_value": "value1"} + assert isinstance(outputs_dict.get("outputs"), list) + assert len(outputs_dict.get("outputs")) == 7 + # Now we get all components that contain TextInput in the component_id + chat_input_outputs = [output for output in outputs_dict.get("outputs") if "ChatInput" in output.get("component_id")] + assert len(chat_input_outputs) == 1 + # Now we check if the input_value is correct + assert all([output.get("results").get("result") == "value1" for output in chat_input_outputs]), chat_input_outputs -# ! Deactivating this until updating the test -# Test function without loop -# @pytest.mark.async_test -# def test_async_task_processing_vector_store(client, added_vector_store, created_api_key): -# headers = {"x-api-key": created_api_key.api_key} -# post_data = {"inputs": {"input": "How do I upload examples?"}} +def test_successful_run_with_input_type_any(client, starter_project, created_api_key): + headers = {"x-api-key": created_api_key.api_key} + flow_id = starter_project["id"] + payload = { + "input_type": "any", + "output_type": "debug", + "input_value": "value1", + } + response = client.post(f"/api/v1/run/{flow_id}", headers=headers, json=payload) + assert response.status_code == status.HTTP_200_OK, response.text + # Add more assertions here to validate the response content + json_response = response.json() + assert "session_id" in json_response + assert "outputs" in json_response + outer_outputs = json_response["outputs"] + assert len(outer_outputs) == 1 + outputs_dict = outer_outputs[0] + assert len(outputs_dict) == 2 + assert "inputs" in outputs_dict + assert "outputs" in outputs_dict + assert outputs_dict.get("inputs") == {"input_value": "value1"} + assert isinstance(outputs_dict.get("outputs"), list) + assert len(outputs_dict.get("outputs")) == 7 + # Now we get all components that contain TextInput or ChatInput in the component_id + any_input_outputs = [ + output + for output in outputs_dict.get("outputs") + if "TextInput" in output.get("component_id") or "ChatInput" in output.get("component_id") + ] + assert len(any_input_outputs) == 3 + # Now we check if the input_value is correct + assert all([output.get("results").get("result") == "value1" for output in any_input_outputs]), any_input_outputs -# # Run the /api/v1/process/{flow_id} endpoint with sync=False -# response = client.post( -# f"api/v1/process/{added_vector_store.get('id')}", -# headers=headers, -# json={**post_data, "sync": False}, -# ) -# assert response.status_code == 200, response.json() -# assert "result" in response.json() -# assert "FAILURE" not in response.json()["result"] -# # Extract the task ID from the response -# task = response.json().get("task") -# task_id = task.get("id") -# task_href = task.get("href") -# assert task_id is not None -# assert task_href is not None -# assert task_href == f"api/v1/task/{task_id}" +def test_run_with_inputs_and_outputs(client, starter_project, created_api_key): + headers = {"x-api-key": created_api_key.api_key} + flow_id = starter_project["id"] + payload = { + "input_value": "value1", + "input_type": "text", + "output_type": "text", + "tweaks": {"parameter_name": "value"}, + "stream": False, + } + response = client.post(f"/api/v1/run/{flow_id}", json=payload, headers=headers) + assert response.status_code == status.HTTP_200_OK, response.text + # Validate the response structure and content -# # Polling the task status using the helper function -# task_status_json = poll_task_status(client, headers, task_href) -# assert task_status_json is not None, "Task did not complete in time" -# # Validate that the task completed successfully and the result is as expected -# assert "result" in task_status_json, task_status_json -# assert "output" in task_status_json["result"], task_status_json["result"] -# assert "Langflow" in task_status_json["result"]["output"], task_status_json["result"] +def test_invalid_flow_id(client, created_api_key): + headers = {"x-api-key": created_api_key.api_key} + flow_id = "invalid-flow-id" + response = client.post(f"/api/v1/run/{flow_id}", headers=headers) + assert response.status_code == status.HTTP_404_NOT_FOUND + # Check if the error detail is as expected + + +def test_run_flow_with_caching_success(client: TestClient, starter_project, created_api_key): + flow_id = starter_project["id"] + headers = {"x-api-key": created_api_key.api_key} + payload = { + "input_value": "value1", + "input_type": "text", + "output_type": "text", + "tweaks": {"parameter_name": "value"}, + "stream": False, + } + response = client.post(f"/api/v1/run/{flow_id}", json=payload, headers=headers) + assert response.status_code == status.HTTP_200_OK + data = response.json() + assert "outputs" in data + assert "session_id" in data + + +def test_run_flow_with_caching_invalid_flow_id(client: TestClient, created_api_key): + invalid_flow_id = uuid4() + headers = {"x-api-key": created_api_key.api_key} + payload = {"input_value": "", "input_type": "text", "output_type": "text", "tweaks": {}, "stream": False} + response = client.post(f"/api/v1/run/{invalid_flow_id}", json=payload, headers=headers) + assert response.status_code == status.HTTP_404_NOT_FOUND + data = response.json() + assert "detail" in data + assert f"Flow {invalid_flow_id} not found" in data["detail"] + + +def test_run_flow_with_caching_invalid_input_format(client: TestClient, starter_project, created_api_key): + flow_id = starter_project["id"] + headers = {"x-api-key": created_api_key.api_key} + payload = {"input_value": {"key": "value"}, "input_type": "text", "output_type": "text", "tweaks": {}} + response = client.post(f"/api/v1/run/{flow_id}", json=payload, headers=headers) + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY + + +def test_run_flow_with_session_id(client, starter_project, created_api_key): + headers = {"x-api-key": created_api_key.api_key} + flow_id = starter_project["id"] + payload = { + "input_value": "value1", + "input_type": "text", + "output_type": "text", + "session_id": "test-session-id", + } + response = client.post(f"/api/v1/run/{flow_id}", json=payload, headers=headers) + assert response.status_code == status.HTTP_404_NOT_FOUND + data = response.json() + assert {"detail": "Session test-session-id not found"} == data + + +def test_run_flow_with_invalid_session_id(client, starter_project, created_api_key): + headers = {"x-api-key": created_api_key.api_key} + flow_id = starter_project["id"] + payload = { + "input_value": "value1", + "input_type": "text", + "output_type": "text", + "session_id": "invalid-session-id", + } + response = client.post(f"/api/v1/run/{flow_id}", json=payload, headers=headers) + assert response.status_code == status.HTTP_404_NOT_FOUND + data = response.json() + assert "detail" in data + assert f"Session {payload['session_id']} not found" in data["detail"] + + +def test_run_flow_with_invalid_tweaks(client, starter_project, created_api_key): + headers = {"x-api-key": created_api_key.api_key} + flow_id = starter_project["id"] + payload = { + "input_value": "value1", + "input_type": "text", + "output_type": "text", + "tweaks": {"invalid_tweak": "value"}, + } + response = client.post(f"/api/v1/run/{flow_id}", json=payload, headers=headers) + assert response.status_code == status.HTTP_200_OK diff --git a/tests/test_files.py b/tests/test_files.py new file mode 100644 index 000000000..b3a85cf03 --- /dev/null +++ b/tests/test_files.py @@ -0,0 +1,104 @@ +from unittest.mock import MagicMock + +import pytest + +from langflow.services.deps import get_storage_service +from langflow.services.storage.service import StorageService + + +@pytest.fixture +def mock_storage_service(): + # Create a mock instance of StorageService + service = MagicMock(spec=StorageService) + # Setup mock behaviors for the service methods as needed + service.save_file.return_value = None + service.get_file.return_value = b"file content" # Binary content for files + service.list_files.return_value = ["file1.txt", "file2.jpg"] + service.delete_file.return_value = None + return service + + +def test_upload_file(client, mock_storage_service, created_api_key, flow): + headers = {"x-api-key": created_api_key.api_key} + # Replace the actual storage service with the mock + client.app.dependency_overrides[get_storage_service] = lambda: mock_storage_service + + response = client.post( + f"api/v1/files/upload/{flow.id}", + files={"file": ("test.txt", b"test content")}, + headers=headers, + ) + assert response.status_code == 201 + assert response.json() == { + "flowId": str(flow.id), + "file_path": f"{flow.id}/test.txt", + } + + +def test_download_file(client, mock_storage_service, created_api_key, flow): + headers = {"x-api-key": created_api_key.api_key} + client.app.dependency_overrides[get_storage_service] = lambda: mock_storage_service + + response = client.get(f"api/v1/files/download/{flow.id}/test.txt", headers=headers) + assert response.status_code == 200 + assert response.content == b"file content" + + +def test_list_files(client, mock_storage_service, created_api_key, flow): + headers = {"x-api-key": created_api_key.api_key} + client.app.dependency_overrides[get_storage_service] = lambda: mock_storage_service + + response = client.get(f"api/v1/files/list/{flow.id}", headers=headers) + assert response.status_code == 200 + assert response.json() == {"files": ["file1.txt", "file2.jpg"]} + + +def test_delete_file(client, mock_storage_service, created_api_key, flow): + headers = {"x-api-key": created_api_key.api_key} + client.app.dependency_overrides[get_storage_service] = lambda: mock_storage_service + + response = client.delete(f"api/v1/files/delete/{flow.id}/test.txt", headers=headers) + assert response.status_code == 200 + assert response.json() == {"message": "File test.txt deleted successfully"} + + +def test_file_operations(client, created_api_key, flow): + headers = {"x-api-key": created_api_key.api_key} + flow_id = flow.id + file_name = "test.txt" + file_content = b"Hello, world!" + + # Step 1: Upload the file + response = client.post( + f"api/v1/files/upload/{flow_id}", + files={"file": (file_name, file_content)}, + headers=headers, + ) + assert response.status_code == 201 + assert response.json() == { + "flowId": str(flow_id), + "file_path": f"{flow_id}/{file_name}", + } + + # Step 2: List files in the folder + response = client.get(f"api/v1/files/list/{flow_id}", headers=headers) + assert response.status_code == 200 + assert file_name in response.json()["files"] + + # Step 3: Download the file and verify its content + + response = client.get(f"api/v1/files/download/{flow_id}/{file_name}", headers=headers) + assert response.status_code == 200 + assert response.content == file_content + # the headers are application/octet-stream + assert response.headers["content-type"] == "application/octet-stream" + # mime_type is inside media_type + + # Step 4: Delete the file + response = client.delete(f"api/v1/files/delete/{flow_id}/{file_name}", headers=headers) + assert response.status_code == 200 + assert response.json() == {"message": f"File {file_name} deleted successfully"} + + # Verify that the file is indeed deleted + response = client.get(f"api/v1/files/list/{flow_id}", headers=headers) + assert file_name not in response.json()["files"] diff --git a/tests/test_graph.py b/tests/test_graph.py index cca6d4f49..8395c304f 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -1,14 +1,9 @@ import copy import json -import os import pickle -from pathlib import Path from typing import Type, Union import pytest -from langchain.agents import AgentExecutor -from langchain.chains.base import Chain -from langchain.llms.fake import FakeListLLM from langflow.graph import Graph from langflow.graph.edge.base import Edge @@ -21,10 +16,8 @@ from langflow.graph.graph.utils import ( update_target_handle, update_template, ) -from langflow.graph.utils import UnbuiltObject from langflow.graph.vertex.base import Vertex -from langflow.graph.vertex.types import FileToolVertex, LLMVertex, ToolkitVertex -from langflow.processing.process import get_result_and_thought +from langflow.initial_setup.setup import load_starter_projects from langflow.utils.payload import get_root_vertex # Test cases for the graph module @@ -82,8 +75,10 @@ def test_graph_structure(basic_graph): assert isinstance(node, Vertex) for edge in basic_graph.edges: assert isinstance(edge, Edge) - assert edge.source_id in basic_graph.vertex_map.keys() - assert edge.target_id in basic_graph.vertex_map.keys() + source_vertex = basic_graph.get_vertex(edge.source_id) + target_vertex = basic_graph.get_vertex(edge.target_id) + assert source_vertex in basic_graph.vertices + assert target_vertex in basic_graph.vertices def test_circular_dependencies(basic_graph): @@ -152,55 +147,6 @@ def test_get_node_neighbors_basic(basic_graph): assert any("OpenAI" in neighbor.data["type"] for neighbor, val in neighbors.items() if val) -# def test_get_node_neighbors_complex(complex_graph): -# """Test getting node neighbors""" -# assert isinstance(complex_graph, Graph) -# # Get root node -# root = get_root_node(complex_graph) -# assert root is not None -# neighbors = complex_graph.get_nodes_with_target(root) -# assert neighbors is not None -# # Neighbors should be a list of nodes -# assert isinstance(neighbors, list) -# # Root Node is an Agent, it requires an LLMChain and tools -# # We need to check if there is a Chain in the one of the neighbors' -# assert any("Chain" in neighbor.data["type"] for neighbor in neighbors) -# # assert Tool is in the neighbors -# assert any("Tool" in neighbor.data["type"] for neighbor in neighbors) -# # Now on to the Chain's neighbors -# chain = next(neighbor for neighbor in neighbors if "Chain" in neighbor.data["type"]) -# chain_neighbors = complex_graph.get_nodes_with_target(chain) -# assert chain_neighbors is not None -# # Check if there is a LLM in the chain's neighbors -# assert any("OpenAI" in neighbor.data["type"] for neighbor in chain_neighbors) -# # Chain should have a Prompt as a neighbor -# assert any("Prompt" in neighbor.data["type"] for neighbor in chain_neighbors) -# # Now on to the Tool's neighbors -# tool = next(neighbor for neighbor in neighbors if "Tool" in neighbor.data["type"]) -# tool_neighbors = complex_graph.get_nodes_with_target(tool) -# assert tool_neighbors is not None -# # Check if there is an Agent in the tool's neighbors -# assert any("Agent" in neighbor.data["type"] for neighbor in tool_neighbors) -# # This Agent has a Tool that has a PythonFunction as func -# agent = next( -# neighbor for neighbor in tool_neighbors if "Agent" in neighbor.data["type"] -# ) -# agent_neighbors = complex_graph.get_nodes_with_target(agent) -# assert agent_neighbors is not None -# # Check if there is a Tool in the agent's neighbors -# assert any("Tool" in neighbor.data["type"] for neighbor in agent_neighbors) -# # This Tool has a PythonFunction as func -# tool = next( -# neighbor for neighbor in agent_neighbors if "Tool" in neighbor.data["type"] -# ) -# tool_neighbors = complex_graph.get_nodes_with_target(tool) -# assert tool_neighbors is not None -# # Check if there is a PythonFunction in the tool's neighbors -# assert any( -# "PythonFunctionTool" in neighbor.data["type"] for neighbor in tool_neighbors -# ) - - def test_get_node(basic_graph): """Test getting a single node""" node_id = basic_graph.vertices[0].id @@ -222,7 +168,6 @@ def test_build_edges(basic_graph): assert len(basic_graph.edges) == len(basic_graph._edges) for edge in basic_graph.edges: assert isinstance(edge, Edge) - assert isinstance(edge.source_id, str) assert isinstance(edge.target_id, str) @@ -281,83 +226,11 @@ def test_build_params(basic_graph): assert "memory" in root.params -@pytest.mark.asyncio -async def test_build(basic_graph): - """Test Node's build method""" - await assert_agent_was_built(basic_graph) - - -async def assert_agent_was_built(graph): - """Assert that the agent was built""" - assert isinstance(graph, Graph) - # Now we test the build method - # Build the Agent - result = await graph.build() - # The agent should be a AgentExecutor - assert isinstance(result, Chain) - - -@pytest.mark.asyncio -async def test_llm_node_build(basic_graph): - llm_node = get_node_by_type(basic_graph, LLMVertex) - assert llm_node is not None - built_object = await llm_node.build() - assert built_object is not UnbuiltObject() - - -@pytest.mark.asyncio -async def test_toolkit_node_build(client, openapi_graph): - # Write a file to the disk - file_path = "api-with-examples.yaml" - with open(file_path, "w") as f: - f.write("openapi: 3.0.0") - - toolkit_node = get_node_by_type(openapi_graph, ToolkitVertex) - assert toolkit_node is not None - built_object = await toolkit_node.build() - assert built_object is not UnbuiltObject - # Remove the file - os.remove(file_path) - assert not Path(file_path).exists() - - -@pytest.mark.asyncio -async def test_file_tool_node_build(client, openapi_graph): - file_path = "api-with-examples.yaml" - with open(file_path, "w") as f: - f.write("openapi: 3.0.0") - - assert Path(file_path).exists() - file_tool_node = get_node_by_type(openapi_graph, FileToolVertex) - assert file_tool_node is not UnbuiltObject and file_tool_node is not None - built_object = await file_tool_node.build() - assert built_object is not UnbuiltObject - # Remove the file - os.remove(file_path) - assert not Path(file_path).exists() - - -@pytest.mark.asyncio -async def test_get_result_and_thought(basic_graph): - """Test the get_result_and_thought method""" - responses = [ - "Final Answer: I am a response", - ] - message = {"input": "Hello"} - # Find the node that is an LLMNode and change the - # _built_object to a FakeListLLM - llm_node = get_node_by_type(basic_graph, LLMVertex) - assert llm_node is not None - llm_node._built_object = FakeListLLM(responses=responses) - llm_node._built = True - langchain_object = await basic_graph.build() - # assert all nodes are built - assert all(node._built for node in basic_graph.vertices) - # now build again and check if FakeListLLM was used - - # Get the result and thought - result = get_result_and_thought(langchain_object, message) - assert isinstance(result, dict) +# def test_wrapper_node_build(openapi_graph): +# wrapper_node = get_node_by_type(openapi_graph, WrapperVertex) +# assert wrapper_node is not None +# built_object = wrapper_node.build() +# assert built_object is not None def test_find_last_node(grouped_chat_json_flow): @@ -471,12 +344,10 @@ def test_update_template(sample_template, sample_nodes): node2_updated = next((n for n in nodes_copy if n["id"] == "node2"), None) node3_updated = next((n for n in nodes_copy if n["id"] == "node3"), None) - assert node1_updated is not None assert node1_updated["data"]["node"]["template"]["some_field"]["show"] is True assert node1_updated["data"]["node"]["template"]["some_field"]["advanced"] is False assert node1_updated["data"]["node"]["template"]["some_field"]["display_name"] == "Name1" - assert node2_updated is not None assert node2_updated["data"]["node"]["template"]["other_field"]["show"] is False assert node2_updated["data"]["node"]["template"]["other_field"]["advanced"] is True assert node2_updated["data"]["node"]["template"]["other_field"]["display_name"] == "DisplayName2" @@ -537,27 +408,37 @@ def test_update_source_handle(): @pytest.mark.asyncio async def test_pickle_graph(json_vector_store): - loaded_json = json.loads(json_vector_store) - graph = Graph.from_payload(loaded_json) + starter_projects = load_starter_projects() + data = starter_projects[0]["data"] + graph = Graph.from_payload(data) assert isinstance(graph, Graph) - first_result = await graph.build() - assert isinstance(first_result, AgentExecutor) pickled = pickle.dumps(graph) - assert pickled is not UnbuiltObject + assert pickled is not None unpickled = pickle.loads(pickled) - assert unpickled is not UnbuiltObject - result = await unpickled.build() - assert isinstance(result, AgentExecutor) + assert unpickled is not None @pytest.mark.asyncio async def test_pickle_each_vertex(json_vector_store): - loaded_json = json.loads(json_vector_store) - graph = Graph.from_payload(loaded_json) + starter_projects = load_starter_projects() + data = starter_projects[0]["data"] + graph = Graph.from_payload(data) assert isinstance(graph, Graph) for vertex in graph.vertices: await vertex.build() pickled = pickle.dumps(vertex) - assert pickled is not UnbuiltObject + assert pickled is not None unpickled = pickle.loads(pickled) - assert unpickled is not UnbuiltObject + assert unpickled is not None + + +@pytest.mark.asyncio +async def test_build_ordering(complex_graph_with_groups): + sorted_vertices = complex_graph_with_groups.sort_vertices(stop_component_id="ChatInput-Ay8QQ") + assert sorted_vertices == [ + "ChatInput-Ay8QQ", + "RecordsAsText-vkx2A", + "FileLoader-Vo1Cq", + ] + + sorted_vertices = complex_graph_with_groups.sort_vertices() diff --git a/tests/test_helper_components.py b/tests/test_helper_components.py new file mode 100644 index 000000000..8414cdac5 --- /dev/null +++ b/tests/test_helper_components.py @@ -0,0 +1,80 @@ +from langchain_core.documents import Document +from langflow.components import helpers +from langflow.interface.custom.utils import build_custom_component_template +from langflow.schema import Record + + +def test_update_record_component(): + # Arrange + update_record_component = helpers.UpdateRecordComponent() + + # Act + new_data = {"new_key": "new_value"} + existing_record = Record(data={"existing_key": "existing_value"}) + result = update_record_component.build(existing_record, new_data) + assert result.data == {"existing_key": "existing_value", "new_key": "new_value"} + assert result.existing_key == "existing_value" + assert result.new_key == "new_value" + + +def test_document_to_record_component(): + # Arrange + document_to_record_component = helpers.DocumentToRecordComponent() + + # Act + # Replace with your actual test data + document = Document(page_content="key: value", metadata={"url": "https://example.com"}) + result = document_to_record_component.build(document) + + # Assert + # Replace with your actual expected result + assert result == [Record(data={"text": "key: value", "url": "https://example.com"})] + + +def test_uuid_generator_component(): + # Arrange + uuid_generator_component = helpers.UUIDGeneratorComponent() + uuid_generator_component.code = open(helpers.IDGenerator.__file__, "r").read() + + frontend_node, _ = build_custom_component_template(uuid_generator_component) + + # Act + build_config = frontend_node.get("template") + field_name = "unique_id" + build_config = uuid_generator_component.update_build_config(build_config, None, field_name) + unique_id = build_config["unique_id"]["value"] + result = uuid_generator_component.build(unique_id) + + # Assert + # UUID should be a string of length 36 + assert isinstance(result, str) + assert len(result) == 36 + + +def test_records_as_text_component(): + # Arrange + records_as_text_component = helpers.RecordsToTextComponent() + + # Act + # Replace with your actual test data + records = [Record(data={"key": "value", "bacon": "eggs"})] + template = "Data:{data} -- Bacon:{bacon}" + result = records_as_text_component.build(records, template=template) + + # Assert + # Replace with your actual expected result + assert result == "Data:{'key': 'value', 'bacon': 'eggs'} -- Bacon:eggs" + + +def test_text_to_record_component(): + # Arrange + text_to_record_component = helpers.CreateRecordComponent() + + # Act + # Replace with your actual test data + dict_with_text = {"field_1": {"key": "value"}} + result = text_to_record_component.build(number_of_fields=1, **dict_with_text) + + # Assert + # Replace with your actual expected result + assert result == Record(data={"key": "value"}) diff --git a/tests/test_initial_setup.py b/tests/test_initial_setup.py new file mode 100644 index 000000000..33aaf8df3 --- /dev/null +++ b/tests/test_initial_setup.py @@ -0,0 +1,92 @@ +from datetime import datetime + +import pytest +from langflow.graph.graph.base import Graph +from langflow.graph.schema import RunOutputs +from langflow.initial_setup.setup import ( + STARTER_FOLDER_NAME, + create_or_update_starter_projects, + get_project_data, + load_starter_projects, +) +from langflow.memory import delete_messages +from langflow.services.database.models.flow.model import Flow +from langflow.services.deps import session_scope +from sqlalchemy import func +from sqlmodel import select + + +def test_load_starter_projects(): + projects = load_starter_projects() + assert isinstance(projects, list) + assert all(isinstance(project, dict) for project in projects) + + +def test_get_project_data(): + projects = load_starter_projects() + for _, project in projects: + ( + project_name, + project_description, + project_is_component, + updated_at_datetime, + project_data, + project_icon, + project_icon_bg_color, + ) = get_project_data(project) + assert isinstance(project_name, str) + assert isinstance(project_description, str) + assert isinstance(project_is_component, bool) + assert isinstance(updated_at_datetime, datetime) + assert isinstance(project_data, dict) + assert isinstance(project_icon, str) or project_icon is None + assert isinstance(project_icon_bg_color, str) or project_icon_bg_color is None + + +def test_create_or_update_starter_projects(client): + with session_scope() as session: + # Run the function to create or update projects + create_or_update_starter_projects() + + # Get the number of projects returned by load_starter_projects + num_projects = len(load_starter_projects()) + + # Get the number of projects in the database + num_db_projects = session.exec(select(func.count(Flow.id)).where(Flow.folder == STARTER_FOLDER_NAME)).one() + + # Check that the number of projects in the database is the same as the number of projects returned by load_starter_projects + assert num_db_projects == num_projects + + +@pytest.mark.asyncio +async def test_starter_project_can_run_successfully(client): + with session_scope() as session: + # Run the function to create or update projects + create_or_update_starter_projects() + + # Get the number of projects returned by load_starter_projects + num_projects = len(load_starter_projects()) + + # Get the number of projects in the database + num_db_projects = session.exec(select(func.count(Flow.id)).where(Flow.folder == STARTER_FOLDER_NAME)).one() + + # Check that the number of projects in the database is the same as the number of projects returned by load_starter_projects + assert num_db_projects == num_projects + + # Get all the starter projects + projects = session.exec(select(Flow).where(Flow.folder == STARTER_FOLDER_NAME)).all() + + graphs: list[tuple[str, Graph]] = [ + (project.name, Graph.from_payload(project.data, flow_id=project.id)) + for project in projects + if "Document" not in project.name or "RAG" not in project.name + ] + assert len(graphs) == len(projects) + for name, graph in graphs: + outputs = await graph.arun( + inputs={}, + outputs=[], + session_id="test", + ) + assert all(isinstance(output, RunOutputs) for output in outputs), f"Project {name} error: {outputs}" + delete_messages(session_id="test") diff --git a/tests/test_loading.py b/tests/test_loading.py index eb3987e93..b95791139 100644 --- a/tests/test_loading.py +++ b/tests/test_loading.py @@ -1,37 +1,19 @@ -import json - import pytest -from langchain.chains.base import Chain + from langflow.graph import Graph from langflow.processing.load import load_flow_from_json -from langflow.utils.payload import get_root_vertex def test_load_flow_from_json(): """Test loading a flow from a json file""" loaded = load_flow_from_json(pytest.BASIC_EXAMPLE_PATH) assert loaded is not None - assert isinstance(loaded, Chain) + assert isinstance(loaded, Graph) def test_load_flow_from_json_with_tweaks(): """Test loading a flow from a json file and applying tweaks""" - tweaks = {"dndnode_82": {"model_name": "test model"}} + tweaks = {"dndnode_82": {"model_name": "gpt-3.5-turbo-16k-0613"}} loaded = load_flow_from_json(pytest.BASIC_EXAMPLE_PATH, tweaks=tweaks) assert loaded is not None - assert isinstance(loaded, Chain) - assert loaded.llm.model_name == "test model" - - -def test_get_root_vertex(): - with open(pytest.BASIC_EXAMPLE_PATH, "r") as f: - flow_graph = json.load(f) - data_graph = flow_graph["data"] - nodes = data_graph["nodes"] - edges = data_graph["edges"] - graph = Graph(nodes, edges) - root = get_root_vertex(graph) - assert root is not None - assert hasattr(root, "id") - assert hasattr(root, "data") - assert hasattr(root, "data") + assert isinstance(loaded, Graph) diff --git a/tests/test_login.py b/tests/test_login.py index 399c7b761..29a8a852e 100644 --- a/tests/test_login.py +++ b/tests/test_login.py @@ -1,8 +1,8 @@ +import pytest +from langflow.services.auth.utils import get_password_hash +from langflow.services.database.models.user import User from langflow.services.database.utils import session_getter from langflow.services.deps import get_db_service -import pytest -from langflow.services.database.models.user import User -from langflow.services.auth.utils import get_password_hash @pytest.fixture diff --git a/tests/test_process.py b/tests/test_process.py index c8e4ec9cc..2548e9215 100644 --- a/tests/test_process.py +++ b/tests/test_process.py @@ -1,4 +1,5 @@ import pytest + from langflow.processing.process import process_tweaks from langflow.services.deps import get_session_service @@ -164,6 +165,70 @@ def test_multiple_tweaks(): assert result == expected_result +# Test twekas that just pass the param and value but no node id. +# This is a new feature that was added to the process_tweaks function +def test_tweak_no_node_id(): + graph_data = { + "data": { + "nodes": [ + { + "id": "node1", + "data": { + "node": { + "template": { + "param1": {"value": 1}, + "param2": {"value": 2}, + } + } + }, + }, + { + "id": "node2", + "data": { + "node": { + "template": { + "param1": {"value": 3}, + "param2": {"value": 4}, + } + } + }, + }, + ] + } + } + tweaks = {"param1": 5} + expected_result = { + "data": { + "nodes": [ + { + "id": "node1", + "data": { + "node": { + "template": { + "param1": {"value": 5}, + "param2": {"value": 2}, + } + } + }, + }, + { + "id": "node2", + "data": { + "node": { + "template": { + "param1": {"value": 5}, + "param2": {"value": 4}, + } + } + }, + }, + ] + } + } + result = process_tweaks(graph_data, tweaks) + assert result == expected_result + + def test_tweak_not_in_template(): graph_data = { "data": { @@ -217,11 +282,11 @@ async def test_load_langchain_object_with_no_cached_session(client, basic_graph_ session_service = get_session_service() session_id1 = "non-existent-session-id" session_id = session_service.build_key(session_id1, basic_graph_data) - graph1, artifacts1 = await session_service.load_session(session_id, basic_graph_data) + graph1, artifacts1 = await session_service.load_session(session_id, data_graph=basic_graph_data, flow_id="flow_id") # Clear the cache session_service.clear_session(session_id) # Use the new session_id to get the langchain_object again - graph2, artifacts2 = await session_service.load_session(session_id, basic_graph_data) + graph2, artifacts2 = await session_service.load_session(session_id, data_graph=basic_graph_data, flow_id="flow_id") assert id(graph1) != id(graph2) # Since the cache was cleared, objects should be different @@ -232,8 +297,8 @@ async def test_load_langchain_object_without_session_id(client, basic_graph_data # Provide a non-existent session_id session_service = get_session_service() session_id1 = None - graph1, artifacts1 = await session_service.load_session(session_id1, basic_graph_data) + graph1, artifacts1 = await session_service.load_session(session_id1, data_graph=basic_graph_data, flow_id="flow_id") # Use the new session_id to get the langchain_object again - graph2, artifacts2 = await session_service.load_session(session_id1, basic_graph_data) + graph2, artifacts2 = await session_service.load_session(session_id1, data_graph=basic_graph_data, flow_id="flow_id") assert graph1 == graph2 diff --git a/tests/test_prompts_template.py b/tests/test_prompts_template.py deleted file mode 100644 index 505b9db2f..000000000 --- a/tests/test_prompts_template.py +++ /dev/null @@ -1,11 +0,0 @@ -from fastapi.testclient import TestClient -from langflow.services.deps import get_settings_service - - -def test_prompts_settings(client: TestClient, logged_in_headers): - settings_service = get_settings_service() - response = client.get("api/v1/all", headers=logged_in_headers) - assert response.status_code == 200 - json_response = response.json() - prompts = json_response["prompts"] - assert set(prompts.keys()) == set(settings_service.settings.PROMPTS) diff --git a/tests/test_record.py b/tests/test_record.py new file mode 100644 index 000000000..45afaa5af --- /dev/null +++ b/tests/test_record.py @@ -0,0 +1,139 @@ +from langchain_core.documents import Document + +from langflow.schema import Record + + +def test_record_initialization(): + record = Record(text_key="msg", data={"msg": "Hello, World!", "extra": "value"}) + assert record.msg == "Hello, World!" + assert record.extra == "value" + + +def test_validate_data_with_extra_keys(): + record = Record(dummy_key="dummy", data={"key": "value"}) + assert record.data["dummy_key"] == "dummy" + assert "dummy_key" in record.data + assert record.key == "value" + + +def test_conversion_to_document(): + record = Record(data={"text": "Sample text", "meta": "data"}) + document = record.to_lc_document() + assert document.page_content == "Sample text" + assert document.metadata == {"meta": "data"} + + +def test_conversion_from_document(): + document = Document(page_content="Doc content", metadata={"meta": "info"}) + record = Record.from_document(document) + assert record.text == "Doc content" + assert record.meta == "info" + + +def test_add_method_for_strings(): + record1 = Record(data={"text": "Hello"}) + record2 = Record(data={"text": " World"}) + combined = record1 + record2 + assert combined.text == "Hello World" + + +def test_add_method_for_integers(): + record1 = Record(data={"number": 5}) + record2 = Record(data={"number": 10}) + combined = record1 + record2 + assert combined.number == 15 + + +def test_add_method_with_non_overlapping_keys(): + record1 = Record(data={"text": "Hello"}) + record2 = Record(data={"number": 10}) + combined = record1 + record2 + assert combined.text == "Hello" + assert combined.number == 10 + + +def test_custom_attribute_get_set_del(): + record = Record() + record.custom_attr = "custom_value" + assert record.custom_attr == "custom_value" + del record.custom_attr + assert record.custom_attr == record.default_value + + +def test_deep_copy(): + import copy + + record1 = Record(data={"text": "Hello", "number": 10}) + record2 = copy.deepcopy(record1) + assert record2.text == "Hello" + assert record2.number == 10 + record2.text = "World" + assert record1.text == "Hello" # Ensure original is unchanged + + +def test_custom_attribute_setting_and_getting(): + record = Record() + record.dynamic_attribute = "Dynamic Value" + assert record.dynamic_attribute == "Dynamic Value" + + +def test_str_and_dir_methods(): + record = Record(text_key="text", data={"text": "Test Text", "key": "value"}) + assert "Test Text" in str(record) + assert "key" in dir(record) + assert "data" in dir(record) + + +def test_dir_includes_data_keys(): + record = Record(data={"text": "Hello", "new_attr": "value"}) + dir_output = dir(record) + + # Check for standard attributes + assert "data" in dir_output + assert "text_key" in dir_output + assert "__add__" in dir_output # Checking for a method + + # Check for dynamic attributes from data + assert "text" in dir_output + assert "new_attr" in dir_output + + # Optionally, verify that dynamically added attributes are listed + record.dynamic_attr = "dynamic" + assert "dynamic_attr" in dir_output or "dynamic_attr" in dir(record) # To account for the change + + +def test_dir_reflects_attribute_deletion(): + record = Record(data={"removable": "I can be removed"}) + assert "removable" in dir(record) + + # Delete the attribute and check again + del record.removable + assert "removable" not in dir(record) + + +def test_get_text_with_text_key(): + data = {"text": "Hello, World!"} + schema = Record(data=data, text_key="text", default_value="default") + result = schema.get_text() + assert result == "Hello, World!" + + +def test_get_text_without_text_key(): + data = {"other_key": "Hello, World!"} + schema = Record(data=data, text_key="text", default_value="default") + result = schema.get_text() + assert result == "default" + + +def test_get_text_with_empty_data(): + data = {} + schema = Record(data=data, text_key="text", default_value="default") + result = schema.get_text() + assert result == "default" + + +def test_get_text_with_none_data(): + data = None + schema = Record(data=data, text_key="text", default_value="default") + result = schema.get_text() + assert result == "default" diff --git a/tests/test_setup_superuser.py b/tests/test_setup_superuser.py index 03d3882fb..c2172429b 100644 --- a/tests/test_setup_superuser.py +++ b/tests/test_setup_superuser.py @@ -1,6 +1,9 @@ from unittest.mock import MagicMock, patch -from langflow.services.settings.constants import DEFAULT_SUPERUSER, DEFAULT_SUPERUSER_PASSWORD +from langflow.services.settings.constants import ( + DEFAULT_SUPERUSER, + DEFAULT_SUPERUSER_PASSWORD, +) from langflow.services.utils import teardown_superuser # @patch("langflow.services.deps.get_session") @@ -119,11 +122,11 @@ def test_teardown_superuser_no_default_superuser(mock_get_session, mock_get_sett mock_session = MagicMock() mock_user = MagicMock() mock_user.is_superuser = False - mock_session.exec.return_value.filter.return_value.first.return_value = mock_user + mock_session.query.return_value.filter.return_value.first.return_value = mock_user mock_get_session.return_value = [mock_session] teardown_superuser(mock_settings_service, mock_session) - mock_session.exec.assert_called_once() + mock_session.query.assert_not_called() mock_session.delete.assert_not_called() mock_session.commit.assert_not_called() diff --git a/tests/test_user.py b/tests/test_user.py index d7e814845..5f894e801 100644 --- a/tests/test_user.py +++ b/tests/test_user.py @@ -1,7 +1,6 @@ from datetime import datetime import pytest - from langflow.services.auth.utils import create_super_user, get_password_hash from langflow.services.database.models.user import UserUpdate from langflow.services.database.models.user.model import User @@ -90,7 +89,7 @@ def test_data_consistency_after_update(client, active_user, logged_in_headers, s user_id = active_user.id update_data = UserUpdate(is_active=False) - response = client.patch(f"/api/v1/users/{user_id}", json=update_data.model_dump(), headers=super_user_headers) + response = client.patch(f"/api/v1/users/{user_id}", json=update_data.dict(), headers=super_user_headers) assert response.status_code == 200, response.json() # Fetch the updated user from the database @@ -117,7 +116,7 @@ def test_inactive_user(client): username="inactiveuser", password=get_password_hash("testpassword"), is_active=False, - last_login_at=datetime.now(), + last_login_at=datetime(2023, 1, 1, 0, 0, 0), ) session.add(user) session.commit() @@ -164,13 +163,13 @@ def test_patch_user(client, active_user, logged_in_headers): username="newname", ) - response = client.patch(f"/api/v1/users/{user_id}", json=update_data.model_dump(), headers=logged_in_headers) + response = client.patch(f"/api/v1/users/{user_id}", json=update_data.dict(), headers=logged_in_headers) assert response.status_code == 200, response.json() update_data = UserUpdate( profile_image="new_image", ) - response = client.patch(f"/api/v1/users/{user_id}", json=update_data.model_dump(), headers=logged_in_headers) + response = client.patch(f"/api/v1/users/{user_id}", json=update_data.dict(), headers=logged_in_headers) assert response.status_code == 200, response.json() @@ -182,7 +181,7 @@ def test_patch_reset_password(client, active_user, logged_in_headers): response = client.patch( f"/api/v1/users/{user_id}/reset-password", - json=update_data.model_dump(), + json=update_data.dict(), headers=logged_in_headers, ) assert response.status_code == 200, response.json() @@ -198,13 +197,13 @@ def test_patch_user_wrong_id(client, active_user, logged_in_headers): username="newname", ) - response = client.patch(f"/api/v1/users/{user_id}", json=update_data.model_dump(), headers=logged_in_headers) + response = client.patch(f"/api/v1/users/{user_id}", json=update_data.dict(), headers=logged_in_headers) assert response.status_code == 422, response.json() json_response = response.json() detail = json_response["detail"] - assert detail[0]["type"] == "uuid_parsing" - assert detail[0]["loc"] == ["path", "user_id"] - assert detail[0]["input"] == "wrong_id" + error = detail[0] + assert error["loc"] == ["path", "user_id"] + assert error["type"] == "uuid_parsing" def test_delete_user(client, test_user, super_user_headers): @@ -220,9 +219,9 @@ def test_delete_user_wrong_id(client, test_user, super_user_headers): assert response.status_code == 422 json_response = response.json() detail = json_response["detail"] - assert detail[0]["type"] == "uuid_parsing" - assert detail[0]["loc"] == ["path", "user_id"] - assert detail[0]["input"] == "wrong_id" + error = detail[0] + assert error["loc"] == ["path", "user_id"] + assert error["type"] == "uuid_parsing" def test_normal_user_cant_delete_user(client, test_user, logged_in_headers): diff --git a/tests/test_websocket.py b/tests/test_websocket.py deleted file mode 100644 index c4c9ee322..000000000 --- a/tests/test_websocket.py +++ /dev/null @@ -1,50 +0,0 @@ -from fastapi import WebSocketDisconnect -from fastapi.testclient import TestClient - -# from langflow.services.chat.manager import ChatService - -import pytest - - -def test_init_build(client, active_user, logged_in_headers): - response = client.post( - "api/v1/build/init/test", - json={"id": "test", "data": {"key": "value"}}, - headers=logged_in_headers, - ) - assert response.status_code == 201 - assert response.json() == {"flowId": "test"} - - -# def test_stream_build(client): -# client.post( -# "api/v1/build/init", json={"id": "stream_test", "data": {"key": "value"}} -# ) - -# # Test the stream -# response = client.get("api/v1/build/stream/stream_test") -# assert response.status_code == 200 -# assert response.headers["content-type"] == "text/event-stream; charset=utf-8" - - -def test_websocket_endpoint(client: TestClient, active_user, logged_in_headers): - # Assuming your websocket_endpoint uses chat_service which caches data from stream_build - access_token = logged_in_headers["Authorization"].split(" ")[1] - with pytest.raises(WebSocketDisconnect): - with client.websocket_connect(f"api/v1/chat/non_existing_client_id?token={access_token}") as websocket: - websocket.send_json({"type": "test"}) - data = websocket.receive_json() - assert "Please, build the flow before sending messages" in data["message"] - - -def test_websocket_endpoint_after_build(client, basic_graph_data): - # Assuming your websocket_endpoint uses chat_service which caches data from stream_build - client.post("api/v1/build/init", json=basic_graph_data) - client.get("api/v1/build/stream/websocket_test") - - # There should be more to test here, but it depends on the inner workings of your websocket handler - # and how your chat_service and other classes behave. The following is just an example structure. - with pytest.raises(WebSocketDisconnect): - with client.websocket_connect("api/v1/chat/websocket_test") as websocket: - websocket.send_json({"input": "test"}) - websocket.receive_json() diff --git a/tests/text_experimental_components.py b/tests/text_experimental_components.py new file mode 100644 index 000000000..a4ee47729 --- /dev/null +++ b/tests/text_experimental_components.py @@ -0,0 +1,15 @@ +from langflow.components import experimental + + +def test_python_function_component(): + # Arrange + python_function_component = experimental.PythonFunctionComponent() + + # Act + # function must be a string representation + function = "def function():\n return 'Hello, World!'" + # result is the callable function + result = python_function_component.build(function) + + # Assert + assert result() == "Hello, World!"