diff --git a/.devcontainer/demo/devcontainer.json b/.devcontainer/demo/devcontainer.json index 26ecd86d3..0fb998b81 100644 --- a/.devcontainer/demo/devcontainer.json +++ b/.devcontainer/demo/devcontainer.json @@ -1,33 +1,33 @@ // For format details, see https://aka.ms/devcontainer.json. For config options, see the // README at: https://github.com/devcontainers/templates/tree/main/src/universal { - "name": "LangChain Demo Container", - // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile - "image": "mcr.microsoft.com/devcontainers/python:3.10", - "features": { - "ghcr.io/devcontainers/features/aws-cli:1": {}, - "ghcr.io/devcontainers/features/docker-in-docker": {}, - "ghcr.io/devcontainers/features/node": {} - }, - "customizations": { - "vscode": { - "extensions": [ - "actboy168.tasks", - "GitHub.copilot", - "ms-python.python", - "eamodio.gitlens", - "GitHub.vscode-pull-request-github" - ] - } - }, - // Features to add to the dev container. More info: https://containers.dev/features. - // "features": {}, - // Use 'forwardPorts' to make a list of ports inside the container available locally. - // "forwardPorts": [], - // Use 'postCreateCommand' to run commands after the container is created. - "postCreateCommand": "pipx install 'langflow>=0.0.33' && langflow --host 0.0.0.0" - // Configure tool-specific properties. - // "customizations": {}, - // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. - // "remoteUser": "root" -} \ No newline at end of file + "name": "Langflow Demo Container", + // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile + "image": "mcr.microsoft.com/devcontainers/python:3.10", + "features": { + "ghcr.io/devcontainers/features/aws-cli:1": {}, + "ghcr.io/devcontainers/features/docker-in-docker": {}, + "ghcr.io/devcontainers/features/node": {} + }, + "customizations": { + "vscode": { + "extensions": [ + "actboy168.tasks", + "GitHub.copilot", + "ms-python.python", + "eamodio.gitlens", + "GitHub.vscode-pull-request-github" + ] + } + }, + // Features to add to the dev container. More info: https://containers.dev/features. + // "features": {}, + // Use 'forwardPorts' to make a list of ports inside the container available locally. + // "forwardPorts": [], + // Use 'postCreateCommand' to run commands after the container is created. + "postCreateCommand": "pipx install 'langflow>=0.0.33' && langflow --host 0.0.0.0" + // Configure tool-specific properties. + // "customizations": {}, + // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. + // "remoteUser": "root" +} diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index aed1ec954..90966fb38 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,39 +1,41 @@ // For format details, see https://aka.ms/devcontainer.json. For config options, see the // README at: https://github.com/devcontainers/templates/tree/main/src/universal { - "name": "LangChain Dev Container", - // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile - "image": "mcr.microsoft.com/devcontainers/python:1-3.10-bullseye", - - // Features to add to the dev container. More info: https://containers.dev/features. - "features": { - "ghcr.io/devcontainers/features/node": {}, - "ghcr.io/devcontainers-contrib/features/poetry": {} - }, + "name": "Langflow Dev Container", + // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile + "image": "mcr.microsoft.com/devcontainers/python:1-3.10-bullseye", - // Use 'forwardPorts' to make a list of ports inside the container available locally. - // "forwardPorts": [], + // Features to add to the dev container. More info: https://containers.dev/features. + "features": { + "ghcr.io/devcontainers/features/node": {}, + "ghcr.io/devcontainers-contrib/features/poetry": {} + }, - // Use 'postCreateCommand' to run commands after the container is created. - "postCreateCommand": "make setup_devcontainer", + // Use 'forwardPorts' to make a list of ports inside the container available locally. + // "forwardPorts": [], - "containerEnv": { - "POETRY_VIRTUALENVS_IN_PROJECT": "true" - }, + // Use 'postCreateCommand' to run commands after the container is created. + "postCreateCommand": "make install_frontend && make install_backend", - // Configure tool-specific properties. - "customizations": { - "vscode": {"extensions": [ - "actboy168.tasks", - "GitHub.copilot", - "ms-python.python", - "sourcery.sourcery", - "eamodio.gitlens", - "ms-vscode.makefile-tools", - "GitHub.vscode-pull-request-github" - ]} - } + "containerEnv": { + "POETRY_VIRTUALENVS_IN_PROJECT": "true" + }, - // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. - // "remoteUser": "root" + // Configure tool-specific properties. + "customizations": { + "vscode": { + "extensions": [ + "actboy168.tasks", + "GitHub.copilot", + "ms-python.python", + "sourcery.sourcery", + "eamodio.gitlens", + "ms-vscode.makefile-tools", + "GitHub.vscode-pull-request-github" + ] + } + } + + // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. + // "remoteUser": "root" } diff --git a/.env.example b/.env.example index 6c6d2f667..b3e592bdf 100644 --- a/.env.example +++ b/.env.example @@ -45,3 +45,21 @@ LANGFLOW_OPEN_BROWSER= # Values: true, false # Example: LANGFLOW_REMOVE_API_KEYS=false LANGFLOW_REMOVE_API_KEYS= + +# Whether to use RedisCache or InMemoryCache +# Values: memory, redis +# Example: LANGFLOW_CACHE_TYPE=memory +# If you want to use redis then the following environment variables must be set: +# LANGFLOW_REDIS_HOST (default: localhost) +# LANGFLOW_REDIS_PORT (default: 6379) +# LANGFLOW_REDIS_DB (default: 0) +# LANGFLOW_REDIS_CACHE_EXPIRE (default: 3600) +LANGFLOW_CACHE_TYPE= + +# Superuser username +# Example: LANGFLOW_SUPERUSER=admin +LANGFLOW_SUPERUSER= + +# Superuser password +# Example: LANGFLOW_SUPERUSER_PASSWORD=123456 +LANGFLOW_SUPERUSER_PASSWORD= diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 000000000..d81146d19 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,44 @@ +name: "Async API tests" + +on: + push: + branches: + - dev + pull_request: + branches: + - dev + - main + +jobs: + build-and-test: + runs-on: ubuntu-latest + env: + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Cache Docker layers + uses: actions/cache@v2 + with: + path: /tmp/.buildx-cache + key: ${{ runner.os }}-buildx-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-buildx- + + - name: Set up Docker + run: docker --version && docker-compose --version + + - name: "Create env file" + working-directory: ./deploy + run: | + echo "${{ secrets.ENV_FILE }}" > .env + + - name: Build and start services + + working-directory: ./deploy + run: docker compose up --exit-code-from tests tests result_backend broker celeryworker db --build + continue-on-error: true + + - name: Stop services + run: docker compose down diff --git a/.github/workflows/pre-release.yml b/.github/workflows/pre-release.yml index 1b46e48b8..a013f686a 100644 --- a/.github/workflows/pre-release.yml +++ b/.github/workflows/pre-release.yml @@ -14,9 +14,7 @@ env: jobs: if_release: - if: | - ${{ github.event.pull_request.merged == true }} - && ${{ contains(github.event.pull_request.labels.*.name, 'pre-release') }} + if: ${{ (github.event.pull_request.merged == true) && contains(github.event.pull_request.labels.*.name, 'pre-release') }} runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 @@ -40,6 +38,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} draft: false generateReleaseNotes: true + prerelease: true tag: v${{ steps.check-version.outputs.version }} commit: main - name: Publish to PyPI diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 2a258949e..908c5fc51 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -45,11 +45,3 @@ jobs: POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_API_TOKEN }} run: | poetry publish - - - name: Trigger build and push on langchain-serve - uses: peter-evans/repository-dispatch@v2 - with: - token: ${{ secrets.SERVE_GITHUB_TOKEN }} - repository: jina-ai/langchain-serve - event-type: langflow-push - client-payload: '{"push_token": "${{ secrets.LCSERVE_PUSH_TOKEN }}", "branch": "main"}' diff --git a/.github/workflows/test-lcserve-push.yml b/.github/workflows/test-lcserve-push.yml deleted file mode 100644 index f207c0a91..000000000 --- a/.github/workflows/test-lcserve-push.yml +++ /dev/null @@ -1,17 +0,0 @@ -name: Trigger build and push on langchain-serve - -on: - workflow_dispatch: - - -jobs: - build: - runs-on: ubuntu-latest - steps: - - name: Trigger build and push on langchain-serve - uses: peter-evans/repository-dispatch@v2 - with: - token: ${{ secrets.SERVE_GITHUB_TOKEN }} - repository: jina-ai/langchain-serve - event-type: langflow-push - client-payload: '{"push_token": "${{ secrets.LCSERVE_PUSH_TOKEN }}", "branch": "dev"}' diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e009534c2..bc2e193c6 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -7,7 +7,7 @@ on: branches: [dev] env: - POETRY_VERSION: "1.4.0" + POETRY_VERSION: "1.5.0" jobs: build: @@ -16,6 +16,8 @@ jobs: matrix: python-version: - "3.10" + env: + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} steps: - uses: actions/checkout@v3 - name: Install poetry diff --git a/.gitignore b/.gitignore index 3b6cfebbf..9b8ec1cc1 100644 --- a/.gitignore +++ b/.gitignore @@ -253,3 +253,5 @@ langflow.db .docusaurus/ /tmp/* +src/backend/langflow/frontend/ +.docker \ No newline at end of file diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 000000000..25b480b27 --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,48 @@ +{ + // See https://go.microsoft.com/fwlink/?LinkId=733558 + // for the documentation about the tasks.json format + "version": "2.0.0", + "tasks": [ + { + "label": "Init", + "type": "shell", + "command": "make init" + }, + // make backend + { + "label": "Backend", + "type": "shell", + "command": "make backend" + }, + // make frontend + { + "label": "Frontend", + "type": "shell", + "command": "make frontend" + }, + // make test + { + "label": "Test", + "type": "shell", + "command": "make tests" + }, + // make lint + { + "label": "Lint", + "type": "shell", + "command": "make lint" + }, + // make format + { + "label": "Format", + "type": "shell", + "command": "make format" + }, + // make install + { + "label": "Install", + "type": "shell", + "command": "make install_backend && make install_frontend" + } + ] +} diff --git a/Makefile b/Makefile index b1d42ff4e..1fbd7403c 100644 --- a/Makefile +++ b/Makefile @@ -19,6 +19,7 @@ coverage: --cov-report term-missing:skip-covered tests: + @make install_backend poetry run pytest tests format: @@ -27,19 +28,25 @@ format: cd src/frontend && npm run format lint: - poetry run mypy . + poetry run mypy src/backend/langflow poetry run black . --check poetry run ruff . --fix install_frontend: - cd src/frontend && npm install; + cd src/frontend && npm install install_frontendc: - cd src/frontend && rm -rf node_modules package-lock.json && npm install; + cd src/frontend && rm -rf node_modules package-lock.json && npm install run_frontend: cd src/frontend && npm start +run_cli: + poetry run langflow run --path src/frontend/build + +run_cli_debug: + poetry run langflow run --path src/frontend/build --log-level debug + setup_devcontainer: make init make build_frontend @@ -54,7 +61,7 @@ frontendc: make run_frontend install_backend: - poetry install + poetry install --extras deploy backend: make install_backend @@ -63,7 +70,7 @@ backend: build_and_run: echo 'Removing dist folder' rm -rf dist - make build && poetry run pip install dist/*.tar.gz && poetry run langflow + make build && poetry run pip install dist/*.tar.gz && poetry run langflow run build_and_install: echo 'Removing dist folder' @@ -80,17 +87,6 @@ build: poetry build --format sdist rm -rf src/backend/langflow/frontend -lcserve_push: - make build_frontend - @version=$$(poetry version --short); \ - lc-serve push --app langflow.lcserve:app --app-dir . \ - --image-name langflow --image-tag $${version} --verbose --public - -lcserve_deploy: - @:$(if $(uses),,$(error `uses` is not set. Please run `make uses=... lcserve_deploy`)) - lc-serve deploy jcloud --app langflow.lcserve:app --app-dir . \ - --uses $(uses) --config src/backend/langflow/jcloud.yml --verbose - dev: make install_frontend ifeq ($(build),1) diff --git a/README.md b/README.md index 9137ea714..538d53bef 100644 --- a/README.md +++ b/README.md @@ -36,8 +36,6 @@ - [Environment Variables](#environment-variables) - [Deployment](#deployment) - [Deploy Langflow on Google Cloud Platform](#deploy-langflow-on-google-cloud-platform) - - [Deploy Langflow on Jina AI Cloud](#deploy-langflow-on-jina-ai-cloud) - - [API Usage](#api-usage) - [Deploy on Railway](#deploy-on-railway) - [Deploy on Render](#deploy-on-render) - [๐ŸŽจ Creating Flows](#-creating-flows) @@ -110,7 +108,6 @@ Each option is detailed below: - `--components-path`: Specifies the path to the directory containing custom components. Can be set using the `LANGFLOW_COMPONENTS_PATH` environment variable. The default is `langflow/components`. - `--log-file`: Specifies the path to the log file. Can be set using the `LANGFLOW_LOG_FILE` environment variable. The default is `logs/langflow.log`. - `--cache`: Selects the type of cache to use. Options are `InMemoryCache` and `SQLiteCache`. Can be set using the `LANGFLOW_LANGCHAIN_CACHE` environment variable. The default is `SQLiteCache`. -- `--jcloud/--no-jcloud`: Toggles the option to deploy on Jina AI Cloud. The default is `no-jcloud`. - `--dev/--no-dev`: Toggles the development mode. The default is `no-dev`. - `--path`: Specifies the path to the frontend directory containing build files. This option is for development purposes only. Can be set using the `LANGFLOW_FRONTEND_PATH` environment variable. - `--open-browser/--no-open-browser`: Toggles the option to open the browser after starting the server. Can be set using the `LANGFLOW_OPEN_BROWSER` environment variable. The default is `open-browser`. @@ -134,118 +131,10 @@ Alternatively, click the **"Open in Cloud Shell"** button below to launch Google [![Open in Cloud Shell](https://gstatic.com/cloudssh/images/open-btn.svg)](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/logspace-ai/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial_spot.md) -## Deploy Langflow on [Jina AI Cloud](https://github.com/jina-ai/langchain-serve) - -Langflow integrates with langchain-serve to provide a one-command deployment to Jina AI Cloud. - -Start by installing `langchain-serve` with - -```bash -pip install langflow[deploy] -# or -pip install -U langchain-serve -``` - -Then, run: - -```bash -langflow --jcloud -``` - -```text -๐ŸŽ‰ Langflow server successfully deployed on Jina AI Cloud ๐ŸŽ‰ -๐Ÿ”— Click on the link to open the server (please allow ~1-2 minutes for the server to startup): https://.wolf.jina.ai/ -๐Ÿ“– Read more about managing the server: https://github.com/jina-ai/langchain-serve -``` - -
- Show complete (example) output - -```text - ๐Ÿš€ Deploying Langflow server on Jina AI Cloud - โ•ญโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ ๐ŸŽ‰ Flow is available! โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฎ - โ”‚ โ”‚ - โ”‚ ID langflow-e3dd8820ec โ”‚ - โ”‚ Gateway (Websocket) wss://langflow-e3dd8820ec.wolf.jina.ai โ”‚ - โ”‚ Dashboard https://dashboard.wolf.jina.ai/flow/e3dd8820ec โ”‚ - โ”‚ โ”‚ - โ•ฐโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฏ - โ•ญโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฎ - โ”‚ App ID โ”‚ langflow-e3dd8820ec โ”‚ - โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค - โ”‚ Phase โ”‚ Serving โ”‚ - โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค - โ”‚ Endpoint โ”‚ wss://langflow-e3dd8820ec.wolf.jina.ai โ”‚ - โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค - โ”‚ App logs โ”‚ dashboards.wolf.jina.ai โ”‚ - โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค - โ”‚ Swagger UI โ”‚ https://langflow-e3dd8820ec.wolf.jina.ai/docs โ”‚ - โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค - โ”‚ OpenAPI JSON โ”‚ https://langflow-e3dd8820ec.wolf.jina.ai/openapi.json โ”‚ - โ•ฐโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ดโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฏ - - ๐ŸŽ‰ Langflow server successfully deployed on Jina AI Cloud ๐ŸŽ‰ - ๐Ÿ”— Click on the link to open the server (please allow ~1-2 minutes for the server to startup): https://langflow-e3dd8820ec.wolf.jina.ai/ - ๐Ÿ“– Read more about managing the server: https://github.com/jina-ai/langchain-serve -``` - -
- -#### API Usage - -You can use Langflow directly on your browser, or use the API endpoints on Jina AI Cloud to interact with the server. - -
- Show API usage (with python) - -```python -import requests - -BASE_API_URL = "https://langflow-e3dd8820ec.wolf.jina.ai/api/v1/predict" -FLOW_ID = "864c4f98-2e59-468b-8e13-79cd8da07468" -# You can tweak the flow by adding a tweaks dictionary -# e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}} -TWEAKS = { -"ChatOpenAI-g4jEr": {}, -"ConversationChain-UidfJ": {} -} - -def run_flow(message: str, flow_id: str, tweaks: dict = None) -> dict: - """ - Run a flow with a given message and optional tweaks. - - :param message: The message to send to the flow - :param flow_id: The ID of the flow to run - :param tweaks: Optional tweaks to customize the flow - :return: The JSON response from the flow - """ - api_url = f"{BASE_API_URL}/{flow_id}" - - payload = {"message": message} - - if tweaks: - payload["tweaks"] = tweaks - - response = requests.post(api_url, json=payload) - return response.json() - -# Setup any tweaks you want to apply to the flow -print(run_flow("Your message", flow_id=FLOW_ID, tweaks=TWEAKS)) -``` - -```json -{ - "result": "Great choice! Bangalore in the 1920s was a vibrant city with a rich cultural and political scene. Here are some suggestions for things to see and do:\n\n1. Visit the Bangalore Palace - built in 1887, this stunning palace is a perfect example of Tudor-style architecture. It was home to the Maharaja of Mysore and is now open to the public.\n\n2. Attend a performance at the Ravindra Kalakshetra - this cultural center was built in the 1920s and is still a popular venue for music and dance performances.\n\n3. Explore the neighborhoods of Basavanagudi and Malleswaram - both of these areas have retained much of their old-world charm and are great places to walk around and soak up the atmosphere.\n\n4. Check out the Bangalore Club - founded in 1868, this exclusive social club was a favorite haunt of the British expat community in the 1920s.\n\n5. Attend a meeting of the Indian National Congress - founded in 1885, the INC was a major force in the Indian independence movement and held many meetings and rallies in Bangalore in the 1920s.\n\nHope you enjoy your trip to 1920s Bangalore!" -} -``` - -
- -> Read more about resource customization, cost, and management of Langflow apps on Jina AI Cloud in the **[langchain-serve](https://github.com/jina-ai/langchain-serve)** repository. ## Deploy on Railway -[![Deploy on Railway](https://railway.app/button.svg)](https://railway.app/template/Emy2sU?referralCode=MnPSdg) +[![Deploy on Railway](https://railway.app/button.svg)](https://railway.app/template/JMXEWp?referralCode=MnPSdg) ## Deploy on Render diff --git a/base.Dockerfile b/base.Dockerfile new file mode 100644 index 000000000..c76bbbcda --- /dev/null +++ b/base.Dockerfile @@ -0,0 +1,97 @@ + + +# syntax=docker/dockerfile:1 +# Keep this syntax directive! It's used to enable Docker BuildKit + +# Based on https://github.com/python-poetry/poetry/discussions/1879?sort=top#discussioncomment-216865 +# but I try to keep it updated (see history) + +################################ +# PYTHON-BASE +# Sets up all our shared environment variables +################################ +FROM python:3.10-slim as python-base + +# python +ENV PYTHONUNBUFFERED=1 \ + # prevents python creating .pyc files + PYTHONDONTWRITEBYTECODE=1 \ + \ + # pip + PIP_DISABLE_PIP_VERSION_CHECK=on \ + PIP_DEFAULT_TIMEOUT=100 \ + \ + # poetry + # https://python-poetry.org/docs/configuration/#using-environment-variables + POETRY_VERSION=1.5.1 \ + # make poetry install to this location + POETRY_HOME="/opt/poetry" \ + # make poetry create the virtual environment in the project's root + # it gets named `.venv` + POETRY_VIRTUALENVS_IN_PROJECT=true \ + # do not ask any interactive question + POETRY_NO_INTERACTION=1 \ + \ + # paths + # this is where our requirements + virtual environment will live + PYSETUP_PATH="/opt/pysetup" \ + VENV_PATH="/opt/pysetup/.venv" + + +# prepend poetry and venv to path +ENV PATH="$POETRY_HOME/bin:$VENV_PATH/bin:$PATH" + + +################################ +# BUILDER-BASE +# Used to build deps + create our virtual environment +################################ +FROM python-base as builder-base +RUN apt-get update \ + && apt-get install --no-install-recommends -y \ + # deps for installing poetry + curl \ + # deps for building python deps + build-essential + + +# install poetry - respects $POETRY_VERSION & $POETRY_HOME +# The --mount will mount the buildx cache directory to where +# Poetry and Pip store their cache so that they can re-use it +RUN --mount=type=cache,target=/root/.cache \ + curl -sSL https://install.python-poetry.org | python3 - + +# copy project requirement files here to ensure they will be cached. +WORKDIR $PYSETUP_PATH +COPY poetry.lock pyproject.toml ./ +COPY ./src/backend/langflow/main.py ./src/backend/langflow/main.py +# Copy README.md to the build context +COPY README.md . +# install runtime deps - uses $POETRY_VIRTUALENVS_IN_PROJECT internally +RUN --mount=type=cache,target=/root/.cache \ + poetry install --without dev --extras deploy + + +################################ +# DEVELOPMENT +# Image used during development / testing +################################ +FROM python-base as development +WORKDIR $PYSETUP_PATH + +# copy in our built poetry + venv +COPY --from=builder-base $POETRY_HOME $POETRY_HOME +COPY --from=builder-base $PYSETUP_PATH $PYSETUP_PATH + +# Copy just one file to avoid rebuilding the whole image +COPY ./src/backend/langflow/__init__.py ./src/backend/langflow/__init__.py +# quicker install as runtime deps are already installed +RUN --mount=type=cache,target=/root/.cache \ + poetry install --with=dev --extras deploy + +# copy in our app code +COPY ./src/backend ./src/backend +RUN --mount=type=cache,target=/root/.cache \ + poetry install --with=dev --extras deploy +COPY ./tests ./tests= + diff --git a/deploy/.env.example b/deploy/.env.example new file mode 100644 index 000000000..e67307406 --- /dev/null +++ b/deploy/.env.example @@ -0,0 +1,57 @@ +DOMAIN=localhost +STACK_NAME=langflow-stack +ENVIRONMENT=development + +TRAEFIK_PUBLIC_NETWORK=traefik-public +TRAEFIK_TAG=langflow-traefik +TRAEFIK_PUBLIC_TAG=traefik-public + +# RabbitMQ configuration +RABBITMQ_DEFAULT_USER=langflow +RABBITMQ_DEFAULT_PASS=langflow + +# Database configuration +DB_USER=langflow +DB_PASSWORD=langflow +DB_HOST=db +DB_PORT=5432 +DB_NAME=langflow + +# Logging configuration +LOG_LEVEL=debug + +# DB configuration +POSTGRES_USER=langflow +POSTGRES_PASSWORD=langflow +POSTGRES_DB=langflow +POSTGRES_PORT=5432 + +# Flower configuration +LANGFLOW_CACHE_TYPE=redis +LANGFLOW_REDIS_HOST=result_backend +LANGFLOW_REDIS_PORT=6379 +LANGFLOW_REDIS_DB=0 +LANGFLOW_REDIS_EXPIRE=3600 +LANGFLOW_REDIS_PASSWORD= +FLOWER_UNAUTHENTICATED_API=True +BROKER_URL=amqp://langflow:langflow@broker:5672 +RESULT_BACKEND=redis://result_backend:6379/0 +C_FORCE_ROOT="true" + +# Frontend configuration +VITE_PROXY_TARGET=http://backend:7860/api/ +BACKEND_URL=http://backend:7860 + +# PGAdmin configuration +PGADMIN_DEFAULT_EMAIL=admin@admin.com +PGADMIN_DEFAULT_PASSWORD=admin + +# OpenAI configuration (for testing purposes) +OPENAI_API_KEY=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 + +# Superuser configuration +LANGFLOW_SUPERUSER=superuser +LANGFLOW_SUPERUSER_PASSWORD=superuser + +# New user configuration +LANGFLOW_NEW_USER_IS_ACTIVE=False \ No newline at end of file diff --git a/deploy/.gitignore b/deploy/.gitignore new file mode 100644 index 000000000..fdbcefec6 --- /dev/null +++ b/deploy/.gitignore @@ -0,0 +1 @@ +pgadmin \ No newline at end of file diff --git a/deploy/base.Dockerfile b/deploy/base.Dockerfile new file mode 100644 index 000000000..323663283 --- /dev/null +++ b/deploy/base.Dockerfile @@ -0,0 +1,92 @@ + + +# syntax=docker/dockerfile:1 +# Keep this syntax directive! It's used to enable Docker BuildKit + +# Based on https://github.com/python-poetry/poetry/discussions/1879?sort=top#discussioncomment-216865 +# but I try to keep it updated (see history) + +################################ +# PYTHON-BASE +# Sets up all our shared environment variables +################################ +FROM python:3.10-slim as python-base + +# python +ENV PYTHONUNBUFFERED=1 \ + # prevents python creating .pyc files + PYTHONDONTWRITEBYTECODE=1 \ + \ + # pip + PIP_DISABLE_PIP_VERSION_CHECK=on \ + PIP_DEFAULT_TIMEOUT=100 \ + \ + # poetry + # https://python-poetry.org/docs/configuration/#using-environment-variables + POETRY_VERSION=1.5.1 \ + # make poetry install to this location + POETRY_HOME="/opt/poetry" \ + # make poetry create the virtual environment in the project's root + # it gets named `.venv` + POETRY_VIRTUALENVS_IN_PROJECT=true \ + # do not ask any interactive question + POETRY_NO_INTERACTION=1 \ + \ + # paths + # this is where our requirements + virtual environment will live + PYSETUP_PATH="/opt/pysetup" \ + VENV_PATH="/opt/pysetup/.venv" + + +# prepend poetry and venv to path +ENV PATH="$POETRY_HOME/bin:$VENV_PATH/bin:$PATH" + + +################################ +# BUILDER-BASE +# Used to build deps + create our virtual environment +################################ +FROM python-base as builder-base +RUN apt-get update \ + && apt-get install --no-install-recommends -y \ + # deps for installing poetry + curl \ + # deps for building python deps + build-essential + +# install poetry - respects $POETRY_VERSION & $POETRY_HOME +# The --mount will mount the buildx cache directory to where +# Poetry and Pip store their cache so that they can re-use it +RUN --mount=type=cache,target=/root/.cache \ + curl -sSL https://install.python-poetry.org | python3 - + +# copy project requirement files here to ensure they will be cached. +WORKDIR $PYSETUP_PATH +COPY ./poetry.lock ./pyproject.toml ./ +# Copy README.md to the build context +COPY ./README.md ./ +# install runtime deps - uses $POETRY_VIRTUALENVS_IN_PROJECT internally +RUN --mount=type=cache,target=/root/.cache \ + poetry install --without dev --extras deploy + + +################################ +# DEVELOPMENT +# Image used during development / testing +################################ +FROM python-base as development +WORKDIR $PYSETUP_PATH + +# copy in our built poetry + venv +COPY --from=builder-base $POETRY_HOME $POETRY_HOME +COPY --from=builder-base $PYSETUP_PATH $PYSETUP_PATH + +# Copy just one file to avoid rebuilding the whole image +COPY ./src/backend/langflow/__init__.py ./src/backend/langflow/__init__.py +# quicker install as runtime deps are already installed +RUN --mount=type=cache,target=/root/.cache \ + poetry install --with=dev --extras deploy + +# copy in our app code +COPY ./src/backend ./src/backend +COPY ./tests ./tests diff --git a/deploy/docker-compose.override.yml b/deploy/docker-compose.override.yml new file mode 100644 index 000000000..1992916fb --- /dev/null +++ b/deploy/docker-compose.override.yml @@ -0,0 +1,67 @@ +version: "3.8" + +services: + proxy: + ports: + - "80:80" + - "8090:8080" + command: + # Enable Docker in Traefik, so that it reads labels from Docker services + - --providers.docker + # Add a constraint to only use services with the label for this stack + # from the env var TRAEFIK_TAG + - --providers.docker.constraints=Label(`traefik.constraint-label-stack`, `${TRAEFIK_TAG?Variable not set}`) + # Do not expose all Docker services, only the ones explicitly exposed + - --providers.docker.exposedbydefault=false + # Disable Docker Swarm mode for local development + # - --providers.docker.swarmmode + # Enable the access log, with HTTP requests + - --accesslog + # Enable the Traefik log, for configurations and errors + - --log + # Enable the Dashboard and API + - --api + # Enable the Dashboard and API in insecure mode for local development + - --api.insecure=true + labels: + - traefik.enable=true + - traefik.http.routers.${STACK_NAME?Variable not set}-traefik-public-http.rule=Host(`${DOMAIN?Variable not set}`) + - traefik.http.services.${STACK_NAME?Variable not set}-traefik-public.loadbalancer.server.port=80 + + result_backend: + ports: + - "6379:6379" + + pgadmin: + ports: + - "5050:5050" + + flower: + ports: + - "5555:5555" + + backend: + labels: + - traefik.enable=true + - traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set} + - traefik.http.routers.${STACK_NAME?Variable not set}-backend-http.rule=PathPrefix(`/api/v1`) || PathPrefix(`/docs`) || PathPrefix(`/health`) + - traefik.http.services.${STACK_NAME?Variable not set}-backend.loadbalancer.server.port=7860 + + frontend: + labels: + - traefik.enable=true + - traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set} + - traefik.http.routers.${STACK_NAME?Variable not set}-frontend-http.rule=PathPrefix(`/`) + - traefik.http.services.${STACK_NAME?Variable not set}-frontend.loadbalancer.server.port=80 + + celeryworker: + labels: + - traefik.enable=true + - traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set} + - traefik.http.routers.${STACK_NAME?Variable not set}-celeryworker-http.rule=PathPrefix(`/api/v1`) || PathPrefix(`/docs`) || PathPrefix(`/health`) + - traefik.http.services.${STACK_NAME?Variable not set}-celeryworker.loadbalancer.server.port=7860 + +networks: + traefik-public: + # For local dev, don't expect an external Traefik network + external: false diff --git a/deploy/docker-compose.with_tests.yml b/deploy/docker-compose.with_tests.yml new file mode 100644 index 000000000..c16c14197 --- /dev/null +++ b/deploy/docker-compose.with_tests.yml @@ -0,0 +1,277 @@ +version: "3.8" + +services: + proxy: + image: traefik:v3.0 + env_file: + - .env + networks: + - ${TRAEFIK_PUBLIC_NETWORK?Variable not set} + - default + volumes: + - /var/run/docker.sock:/var/run/docker.sock + command: + # Enable Docker in Traefik, so that it reads labels from Docker services + - --providers.docker + # Add a constraint to only use services with the label for this stack + # from the env var TRAEFIK_TAG + - --providers.docker.constraints=Label(`traefik.constraint-label-stack`, `${TRAEFIK_TAG?Variable not set}`) + # Do not expose all Docker services, only the ones explicitly exposed + - --providers.docker.exposedbydefault=false + # Enable the access log, with HTTP requests + - --accesslog + # Enable the Traefik log, for configurations and errors + - --log + # Enable the Dashboard and API + - --api + deploy: + placement: + constraints: + - node.role == manager + labels: + # Enable Traefik for this service, to make it available in the public network + - traefik.enable=true + # Use the traefik-public network (declared below) + - traefik.docker.network=${TRAEFIK_PUBLIC_NETWORK?Variable not set} + # Use the custom label "traefik.constraint-label=traefik-public" + # This public Traefik will only use services with this label + - traefik.constraint-label=${TRAEFIK_PUBLIC_TAG?Variable not set} + # traefik-http set up only to use the middleware to redirect to https + - traefik.http.middlewares.${STACK_NAME?Variable not set}-https-redirect.redirectscheme.scheme=https + - traefik.http.middlewares.${STACK_NAME?Variable not set}-https-redirect.redirectscheme.permanent=true + # Handle host with and without "www" to redirect to only one of them + # Uses environment variable DOMAIN + # To disable www redirection remove the Host() you want to discard, here and + # below for HTTPS + - traefik.http.routers.${STACK_NAME?Variable not set}-proxy-http.rule=Host(`${DOMAIN?Variable not set}`) || Host(`www.${DOMAIN?Variable not set}`) + - traefik.http.routers.${STACK_NAME?Variable not set}-proxy-http.entrypoints=http + # traefik-https the actual router using HTTPS + - traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.rule=Host(`${DOMAIN?Variable not set}`) || Host(`www.${DOMAIN?Variable not set}`) + - traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.entrypoints=https + - traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.tls=true + # Use the "le" (Let's Encrypt) resolver created below + - traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.tls.certresolver=le + # Define the port inside of the Docker service to use + - traefik.http.services.${STACK_NAME?Variable not set}-proxy.loadbalancer.server.port=80 + # Handle domain with and without "www" to redirect to only one + # To disable www redirection remove the next line + - traefik.http.middlewares.${STACK_NAME?Variable not set}-www-redirect.redirectregex.regex=^https?://(www.)?(${DOMAIN?Variable not set})/(.*) + # Redirect a domain with www to non-www + # To disable it remove the next line + - traefik.http.middlewares.${STACK_NAME?Variable not set}-www-redirect.redirectregex.replacement=https://${DOMAIN?Variable not set}/$${3} + # Redirect a domain without www to www + # To enable it remove the previous line and uncomment the next + # - traefik.http.middlewares.${STACK_NAME}-www-redirect.redirectregex.replacement=https://www.${DOMAIN}/$${3} + # Middleware to redirect www, to disable it remove the next line + - traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.middlewares=${STACK_NAME?Variable not set}-www-redirect + # Middleware to redirect www, and redirect HTTP to HTTPS + # to disable www redirection remove the section: ${STACK_NAME?Variable not set}-www-redirect, + - traefik.http.routers.${STACK_NAME?Variable not set}-proxy-http.middlewares=${STACK_NAME?Variable not set}-www-redirect,${STACK_NAME?Variable not set}-https-redirect + + backend: &backend + image: "ogabrielluiz/langflow:latest" + build: + context: ../ + dockerfile: base.Dockerfile + depends_on: + - db + - broker + - result_backend + env_file: + - .env + volumes: + - ../:/app + - ./startup-backend.sh:/startup-backend.sh # Ensure the paths match + command: /startup-backend.sh # Fixed the path + healthcheck: + test: "exit 0" + deploy: + labels: + - traefik.enable=true + - traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set} + - traefik.http.routers.${STACK_NAME?Variable not set}-backend-http.rule=PathPrefix(`/api/v1`) || PathPrefix(`/docs`) || PathPrefix(`/health`) + - traefik.http.services.${STACK_NAME?Variable not set}-backend.loadbalancer.server.port=7860 + + db: + image: postgres:15.4 + volumes: + - app-db-data:/var/lib/postgresql/data/pgdata + environment: + - PGDATA=/var/lib/postgresql/data/pgdata + deploy: + placement: + constraints: + - node.labels.app-db-data == true + healthcheck: + test: "exit 0" + env_file: + - .env + + pgadmin: + image: dpage/pgadmin4 + networks: + - ${TRAEFIK_PUBLIC_NETWORK?Variable not set} + - default + volumes: + - pgadmin-data:/var/lib/pgadmin + env_file: + - .env + deploy: + labels: + - traefik.enable=true + - traefik.docker.network=${TRAEFIK_PUBLIC_NETWORK?Variable not set} + - traefik.constraint-label=${TRAEFIK_PUBLIC_TAG?Variable not set} + - traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-http.rule=Host(`pgadmin.${DOMAIN?Variable not set}`) + - traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-http.entrypoints=http + - traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-http.middlewares=${STACK_NAME?Variable not set}-https-redirect + - traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.rule=Host(`pgadmin.${DOMAIN?Variable not set}`) + - traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.entrypoints=https + - traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.tls=true + - traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.tls.certresolver=le + - traefik.http.services.${STACK_NAME?Variable not set}-pgadmin.loadbalancer.server.port=5050 + + result_backend: + image: redis:6.2.5 + env_file: + - .env + # ports: + # - 6379:6379 + healthcheck: + test: "exit 0" + + celeryworker: + <<: *backend + env_file: + - .env + build: + context: ../ + dockerfile: base.Dockerfile + command: celery -A langflow.worker.celery_app worker --loglevel=INFO --concurrency=1 -n lf-worker@%h + healthcheck: + test: "exit 0" + deploy: + replicas: 1 + + flower: + <<: *backend + env_file: + - .env + networks: + - default + build: + context: ../ + dockerfile: base.Dockerfile + environment: + - FLOWER_PORT=5555 + + command: /bin/sh -c "celery -A langflow.worker.celery_app --broker=${BROKER_URL?Variable not set} flower --port=5555" + deploy: + labels: + - traefik.enable=true + - traefik.docker.network=${TRAEFIK_PUBLIC_NETWORK?Variable not set} + - traefik.constraint-label=${TRAEFIK_PUBLIC_TAG?Variable not set} + - traefik.http.routers.${STACK_NAME?Variable not set}-flower-http.rule=Host(`flower.${DOMAIN?Variable not set}`) + - traefik.http.routers.${STACK_NAME?Variable not set}-flower-http.entrypoints=http + - traefik.http.routers.${STACK_NAME?Variable not set}-flower-http.middlewares=${STACK_NAME?Variable not set}-https-redirect + - traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.rule=Host(`flower.${DOMAIN?Variable not set}`) + - traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.entrypoints=https + - traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.tls=true + - traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.tls.certresolver=le + - traefik.http.services.${STACK_NAME?Variable not set}-flower.loadbalancer.server.port=5555 + + frontend: + image: "ogabrielluiz/langflow_frontend:latest" + env_file: + - .env + # user: your-non-root-user + build: + context: ../src/frontend + dockerfile: Dockerfile + args: + - BACKEND_URL=http://backend:7860 + restart: on-failure + deploy: + labels: + - traefik.enable=true + - traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set} + - traefik.http.routers.${STACK_NAME?Variable not set}-frontend-http.rule=PathPrefix(`/`) + - traefik.http.services.${STACK_NAME?Variable not set}-frontend.loadbalancer.server.port=80 + + broker: + # RabbitMQ management console + image: rabbitmq:3-management + environment: + - RABBITMQ_DEFAULT_USER=${RABBITMQ_DEFAULT_USER:-admin} + - RABBITMQ_DEFAULT_PASS=${RABBITMQ_DEFAULT_PASS:-admin} + volumes: + - rabbitmq_data:/etc/rabbitmq/ + - rabbitmq_data:/var/lib/rabbitmq/ + - rabbitmq_log:/var/log/rabbitmq/ + ports: + - 5672:5672 + - 15672:15672 + + prometheus: + image: prom/prometheus:v2.37.9 + env_file: + - .env + volumes: + - ./prometheus.yml:/etc/prometheus/prometheus.yml + command: + - "--config.file=/etc/prometheus/prometheus.yml" + # ports: + # - 9090:9090 + healthcheck: + test: "exit 0" + deploy: + labels: + - traefik.enable=true + - traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set} + - traefik.http.routers.${STACK_NAME?Variable not set}-prometheus-http.rule=PathPrefix(`/metrics`) + - traefik.http.services.${STACK_NAME?Variable not set}-prometheus.loadbalancer.server.port=9090 + + grafana: + image: grafana/grafana:8.2.6 + env_file: + - .env + # ports: + # - 3000:3000 + volumes: + - grafana_data:/var/lib/grafana + deploy: + labels: + - traefik.enable=true + - traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set} + - traefik.http.routers.${STACK_NAME?Variable not set}-grafana-http.rule=PathPrefix(`/grafana`) + - traefik.http.services.${STACK_NAME?Variable not set}-grafana.loadbalancer.server.port=3000 + + tests: + extends: + file: docker-compose.yml + service: backend + env_file: + - .env + build: + context: ../ + dockerfile: base.Dockerfile + command: pytest -vv + healthcheck: + test: "exit 0" + # override deploy labels to avoid conflicts with the backend service + labels: + - traefik.enable=true + - traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set} + - traefik.http.routers.${STACK_NAME?Variable not set}-tests-http.rule=PathPrefix(`/api/v1`) || PathPrefix(`/docs`) || PathPrefix(`/health`) + - traefik.http.services.${STACK_NAME?Variable not set}-tests.loadbalancer.server.port=7861 + +volumes: + grafana_data: + app-db-data: + rabbitmq_data: + rabbitmq_log: + pgadmin-data: + +networks: + traefik-public: + # Allow setting it to false for testing + external: false # ${TRAEFIK_PUBLIC_NETWORK_IS_EXTERNAL-true} diff --git a/deploy/docker-compose.yml b/deploy/docker-compose.yml new file mode 100644 index 000000000..4eda0dc37 --- /dev/null +++ b/deploy/docker-compose.yml @@ -0,0 +1,258 @@ +version: "3.8" + +services: + proxy: + image: traefik:v3.0 + env_file: + - .env + networks: + - ${TRAEFIK_PUBLIC_NETWORK?Variable not set} + - default + volumes: + - /var/run/docker.sock:/var/run/docker.sock + command: + # Enable Docker in Traefik, so that it reads labels from Docker services + - --providers.docker + # Add a constraint to only use services with the label for this stack + # from the env var TRAEFIK_TAG + - --providers.docker.constraints=Label(`traefik.constraint-label-stack`, `${TRAEFIK_TAG?Variable not set}`) + # Do not expose all Docker services, only the ones explicitly exposed + - --providers.docker.exposedbydefault=false + # Enable the access log, with HTTP requests + - --accesslog + # Enable the Traefik log, for configurations and errors + - --log + # Enable the Dashboard and API + - --api + deploy: + placement: + constraints: + - node.role == manager + labels: + # Enable Traefik for this service, to make it available in the public network + - traefik.enable=true + # Use the traefik-public network (declared below) + - traefik.docker.network=${TRAEFIK_PUBLIC_NETWORK?Variable not set} + # Use the custom label "traefik.constraint-label=traefik-public" + # This public Traefik will only use services with this label + - traefik.constraint-label=${TRAEFIK_PUBLIC_TAG?Variable not set} + # traefik-http set up only to use the middleware to redirect to https + - traefik.http.middlewares.${STACK_NAME?Variable not set}-https-redirect.redirectscheme.scheme=https + - traefik.http.middlewares.${STACK_NAME?Variable not set}-https-redirect.redirectscheme.permanent=true + # Handle host with and without "www" to redirect to only one of them + # Uses environment variable DOMAIN + # To disable www redirection remove the Host() you want to discard, here and + # below for HTTPS + - traefik.http.routers.${STACK_NAME?Variable not set}-proxy-http.rule=Host(`${DOMAIN?Variable not set}`) || Host(`www.${DOMAIN?Variable not set}`) + - traefik.http.routers.${STACK_NAME?Variable not set}-proxy-http.entrypoints=http + # traefik-https the actual router using HTTPS + - traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.rule=Host(`${DOMAIN?Variable not set}`) || Host(`www.${DOMAIN?Variable not set}`) + - traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.entrypoints=https + - traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.tls=true + # Use the "le" (Let's Encrypt) resolver created below + - traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.tls.certresolver=le + # Define the port inside of the Docker service to use + - traefik.http.services.${STACK_NAME?Variable not set}-proxy.loadbalancer.server.port=80 + # Handle domain with and without "www" to redirect to only one + # To disable www redirection remove the next line + - traefik.http.middlewares.${STACK_NAME?Variable not set}-www-redirect.redirectregex.regex=^https?://(www.)?(${DOMAIN?Variable not set})/(.*) + # Redirect a domain with www to non-www + # To disable it remove the next line + - traefik.http.middlewares.${STACK_NAME?Variable not set}-www-redirect.redirectregex.replacement=https://${DOMAIN?Variable not set}/$${3} + # Redirect a domain without www to www + # To enable it remove the previous line and uncomment the next + # - traefik.http.middlewares.${STACK_NAME}-www-redirect.redirectregex.replacement=https://www.${DOMAIN}/$${3} + # Middleware to redirect www, to disable it remove the next line + - traefik.http.routers.${STACK_NAME?Variable not set}-proxy-https.middlewares=${STACK_NAME?Variable not set}-www-redirect + # Middleware to redirect www, and redirect HTTP to HTTPS + # to disable www redirection remove the section: ${STACK_NAME?Variable not set}-www-redirect, + - traefik.http.routers.${STACK_NAME?Variable not set}-proxy-http.middlewares=${STACK_NAME?Variable not set}-www-redirect,${STACK_NAME?Variable not set}-https-redirect + + backend: &backend + image: "ogabrielluiz/langflow:latest" + build: + context: ../ + dockerfile: base.Dockerfile + depends_on: + - db + - broker + - result_backend + env_file: + - .env + volumes: + - ../:/app + - ./startup-backend.sh:/startup-backend.sh # Ensure the paths match + command: /startup-backend.sh # Fixed the path + healthcheck: + test: "exit 0" + deploy: + labels: + - traefik.enable=true + - traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set} + - traefik.http.routers.${STACK_NAME?Variable not set}-backend-http.rule=PathPrefix(`/api/v1`) || PathPrefix(`/docs`) || PathPrefix(`/health`) + - traefik.http.services.${STACK_NAME?Variable not set}-backend.loadbalancer.server.port=7860 + + db: + image: postgres:15.4 + volumes: + - app-db-data:/var/lib/postgresql/data/pgdata + environment: + - PGDATA=/var/lib/postgresql/data/pgdata + deploy: + placement: + constraints: + - node.labels.app-db-data == true + healthcheck: + test: "exit 0" + env_file: + - .env + + pgadmin: + image: dpage/pgadmin4 + networks: + - ${TRAEFIK_PUBLIC_NETWORK?Variable not set} + - default + volumes: + - pgadmin-data:/var/lib/pgadmin + env_file: + - .env + deploy: + labels: + - traefik.enable=true + - traefik.docker.network=${TRAEFIK_PUBLIC_NETWORK?Variable not set} + - traefik.constraint-label=${TRAEFIK_PUBLIC_TAG?Variable not set} + - traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-http.rule=Host(`pgadmin.${DOMAIN?Variable not set}`) + - traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-http.entrypoints=http + - traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-http.middlewares=${STACK_NAME?Variable not set}-https-redirect + - traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.rule=Host(`pgadmin.${DOMAIN?Variable not set}`) + - traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.entrypoints=https + - traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.tls=true + - traefik.http.routers.${STACK_NAME?Variable not set}-pgadmin-https.tls.certresolver=le + - traefik.http.services.${STACK_NAME?Variable not set}-pgadmin.loadbalancer.server.port=5050 + + result_backend: + image: redis:6.2.5 + env_file: + - .env + # ports: + # - 6379:6379 + healthcheck: + test: "exit 0" + + celeryworker: + <<: *backend + env_file: + - .env + build: + context: ../ + dockerfile: base.Dockerfile + command: celery -A langflow.worker.celery_app worker --loglevel=INFO --concurrency=1 -n lf-worker@%h + healthcheck: + test: "exit 0" + deploy: + replicas: 1 + + flower: + <<: *backend + env_file: + - .env + networks: + - default + build: + context: ../ + dockerfile: base.Dockerfile + environment: + - FLOWER_PORT=5555 + + command: /bin/sh -c "celery -A langflow.worker.celery_app --broker=${BROKER_URL?Variable not set} flower --port=5555" + deploy: + labels: + - traefik.enable=true + - traefik.docker.network=${TRAEFIK_PUBLIC_NETWORK?Variable not set} + - traefik.constraint-label=${TRAEFIK_PUBLIC_TAG?Variable not set} + - traefik.http.routers.${STACK_NAME?Variable not set}-flower-http.rule=Host(`flower.${DOMAIN?Variable not set}`) + - traefik.http.routers.${STACK_NAME?Variable not set}-flower-http.entrypoints=http + - traefik.http.routers.${STACK_NAME?Variable not set}-flower-http.middlewares=${STACK_NAME?Variable not set}-https-redirect + - traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.rule=Host(`flower.${DOMAIN?Variable not set}`) + - traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.entrypoints=https + - traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.tls=true + - traefik.http.routers.${STACK_NAME?Variable not set}-flower-https.tls.certresolver=le + - traefik.http.services.${STACK_NAME?Variable not set}-flower.loadbalancer.server.port=5555 + + frontend: + image: "ogabrielluiz/langflow_frontend:latest" + env_file: + - .env + # user: your-non-root-user + build: + context: ../src/frontend + dockerfile: Dockerfile + args: + - BACKEND_URL=http://backend:7860 + restart: on-failure + deploy: + labels: + - traefik.enable=true + - traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set} + - traefik.http.routers.${STACK_NAME?Variable not set}-frontend-http.rule=PathPrefix(`/`) + - traefik.http.services.${STACK_NAME?Variable not set}-frontend.loadbalancer.server.port=80 + + broker: + # RabbitMQ management console + image: rabbitmq:3-management + environment: + - RABBITMQ_DEFAULT_USER=${RABBITMQ_DEFAULT_USER:-admin} + - RABBITMQ_DEFAULT_PASS=${RABBITMQ_DEFAULT_PASS:-admin} + volumes: + - rabbitmq_data:/etc/rabbitmq/ + - rabbitmq_data:/var/lib/rabbitmq/ + - rabbitmq_log:/var/log/rabbitmq/ + ports: + - 5672:5672 + - 15672:15672 + + prometheus: + image: prom/prometheus:v2.37.9 + env_file: + - .env + volumes: + - ./prometheus.yml:/etc/prometheus/prometheus.yml + command: + - "--config.file=/etc/prometheus/prometheus.yml" + # ports: + # - 9090:9090 + healthcheck: + test: "exit 0" + deploy: + labels: + - traefik.enable=true + - traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set} + - traefik.http.routers.${STACK_NAME?Variable not set}-prometheus-http.rule=PathPrefix(`/metrics`) + - traefik.http.services.${STACK_NAME?Variable not set}-prometheus.loadbalancer.server.port=9090 + + grafana: + image: grafana/grafana:8.2.6 + env_file: + - .env + # ports: + # - 3000:3000 + volumes: + - grafana_data:/var/lib/grafana + deploy: + labels: + - traefik.enable=true + - traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set} + - traefik.http.routers.${STACK_NAME?Variable not set}-grafana-http.rule=PathPrefix(`/grafana`) + - traefik.http.services.${STACK_NAME?Variable not set}-grafana.loadbalancer.server.port=3000 + +volumes: + grafana_data: + app-db-data: + rabbitmq_data: + rabbitmq_log: + pgadmin-data: + +networks: + traefik-public: + # Allow setting it to false for testing + external: false # ${TRAEFIK_PUBLIC_NETWORK_IS_EXTERNAL-true} diff --git a/deploy/prometheus.yml b/deploy/prometheus.yml new file mode 100644 index 000000000..1d53d5b5f --- /dev/null +++ b/deploy/prometheus.yml @@ -0,0 +1,11 @@ +global: + scrape_interval: 15s + evaluation_interval: 15s + +scrape_configs: + - job_name: prometheus + static_configs: + - targets: ["prometheus:9090"] + - job_name: flower + static_configs: + - targets: ["flower:5555"] diff --git a/deploy/startup-backend.sh b/deploy/startup-backend.sh new file mode 100755 index 000000000..75c8002a5 --- /dev/null +++ b/deploy/startup-backend.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +export LANGFLOW_DATABASE_URL="postgresql://${DB_USER}:${DB_PASSWORD}@${DB_HOST}:${DB_PORT}/${DB_NAME}" + + +# Your command to start the backend + +# If the ENVIRONMENT variable is set to "development", then start the backend in development mode +# else start the backend in production mode with guvicorn +if [ "$ENVIRONMENT" = "development" ]; then + echo "Starting backend in development mode" + exec python -m uvicorn --factory langflow.main:create_app --host 0.0.0.0 --port 7860 --log-level ${LOG_LEVEL:-info} --workers 2 --reload +else + echo "Starting backend in production mode" + exec langflow run --host 0.0.0.0 --port 7860 --log-level ${LOG_LEVEL:-info} --workers -1 --backend-only +fi + diff --git a/docker_example/README.md b/docker_example/README.md new file mode 100644 index 000000000..9e72dc645 --- /dev/null +++ b/docker_example/README.md @@ -0,0 +1,9 @@ +# LangFlow Docker Running + +```sh +git clone git@github.com:logspace-ai/langflow.git +cd langflow/docker_example +docker compose up +``` + +The web UI will be accessible on port [7860](http://localhost:7860/) \ No newline at end of file diff --git a/docs/docs/components/custom.mdx b/docs/docs/components/custom.mdx index ffa747c1b..90282a73e 100644 --- a/docs/docs/components/custom.mdx +++ b/docs/docs/components/custom.mdx @@ -33,6 +33,7 @@ The CustomComponent class serves as the foundation for creating custom component | Supported Types | | --------------------------------------------------------- | | _`str`_, _`int`_, _`float`_, _`bool`_, _`list`_, _`dict`_ | + | _`langflow.field_typing.NestedDict`_ | | _`langchain.chains.base.Chain`_ | | _`langchain.PromptTemplate`_ | | _`langchain.llms.base.BaseLLM`_ | @@ -44,6 +45,8 @@ The CustomComponent class serves as the foundation for creating custom component | _`langchain.embeddings.base.Embeddings`_ | | _`langchain.schema.BaseRetriever`_ | + The difference between _`dict`_ and _`langflow.field_typing.NestedDict`_ is that one adds a simple key-value pair field, while the other opens a more robust dictionary editor. + Unlike Langchain types, base Python types do not add a [handle](../guidelines/components) to the field by default. To add handles, diff --git a/docs/docs/components/text-splitters.mdx b/docs/docs/components/text-splitters.mdx index 6c91cc1a0..c6efe4553 100644 --- a/docs/docs/components/text-splitters.mdx +++ b/docs/docs/components/text-splitters.mdx @@ -1,11 +1,13 @@ -import Admonition from '@theme/Admonition'; +import Admonition from "@theme/Admonition"; # Text Splitters -

- We appreciate your understanding as we polish our documentation โ€“ it may contain some rough edges. Share your feedback or report issues to help us improve! ๐Ÿ› ๏ธ๐Ÿ“ -

+

+ We appreciate your understanding as we polish our documentation โ€“ it may + contain some rough edges. Share your feedback or report issues to help us + improve! ๐Ÿ› ๏ธ๐Ÿ“ +

A text splitter is a tool that divides a document or text into smaller chunks or segments. It is used to break down large texts into more manageable pieces for analysis or processing. @@ -22,13 +24,13 @@ The `CharacterTextSplitter` is used to split a long text into smaller chunks bas - **chunk_overlap:** Determines the number of characters that overlap between consecutive chunks when splitting text. It specifies how much of the previous chunk should be included in the next chunk. - For example, if the `chunk_overlap` is set to 20 and the `chunk_size` is set to 100, the splitter will create chunks of 100 characters each, but the last 20 characters of each chunk will overlap with the first 20 characters of the next chunk. This allows for a smoother transition between chunks and ensures that no information is lost โ€“ defaults to `200`. + For example, if the `chunk_overlap` is set to 20 and the `chunk_size` is set to 100, the splitter will create chunks of 100 characters each, but the last 20 characters of each chunk will overlap with the first 20 characters of the next chunk. This allows for a smoother transition between chunks and ensures that no information is lost โ€“ defaults to `200`. - **chunk_size:** Determines the maximum number of characters in each chunk when splitting a text. It specifies the size or length of each chunk. - For example, if the chunk_size is set to 100, the splitter will create chunks of 100 characters each. If the text is longer than 100 characters, it will be divided into multiple chunks of equal size, except for the last chunk, which may be smaller if there are remaining characters โ€“defaults to `1000`. + For example, if the chunk_size is set to 100, the splitter will create chunks of 100 characters each. If the text is longer than 100 characters, it will be divided into multiple chunks of equal size, except for the last chunk, which may be smaller if there are remaining characters โ€“defaults to `1000`. -- **separator:** Specifies the character that will be used to split the text into chunks โ€“ defaults to `.` +- **separator:** Specifies the character that will be used to split the text into chunks โ€“ defaults to `.` --- @@ -44,6 +46,18 @@ Theย `RecursiveCharacterTextSplitter`ย splits the text by trying to keep paragra - **chunk_size:** Determines the maximum number of characters in each chunk when splitting a text. It specifies the size or length of each chunk. -- **separator_type:** The parameter allows the user to split the code with multiple language support. It supports various languages such as Text, Ruby, Python, Solidity, Java, and more. Defaults to `Text`. +- **separators:** The `separators` in RecursiveCharacterTextSplitter are the characters used to split the text into chunks. The text splitter tries to create chunks based on splitting on the first character in the list of `separators`. If any chunks are too large, it moves on to the next character in the list and continues splitting. Defaults to ["\n\n", "\n", " ", ""]. -- **separators:** The `separators` in RecursiveCharacterTextSplitter are the characters used to split the text into chunks. The text splitter tries to create chunks based on splitting on the first character in the list of `separators`. If any chunks are too large, it moves on to the next character in the list and continues splitting. Defaults to `.` \ No newline at end of file +### LanguageRecursiveTextSplitter + +The `LanguageRecursiveTextSplitter` is a text splitter that splits the text into smaller chunks based on the (programming) language of the text. + +**Params** + +- **Documents:** Input documents to split. + +- **chunk_overlap:** Determines the number of characters that overlap between consecutive chunks when splitting text. It specifies how much of the previous chunk should be included in the next chunk. + +- **chunk_size:** Determines the maximum number of characters in each chunk when splitting a text. It specifies the size or length of each chunk. + +- **separator_type:** The parameter allows the user to split the code with multiple language support. It supports various languages such as Ruby, Python, Solidity, Java, and more. Defaults to `Python`. diff --git a/docs/docs/components/utilities.mdx b/docs/docs/components/utilities.mdx index f510990ce..593864213 100644 --- a/docs/docs/components/utilities.mdx +++ b/docs/docs/components/utilities.mdx @@ -1,10 +1,76 @@ -import Admonition from '@theme/Admonition'; +import Admonition from "@theme/Admonition"; # Utilities -

- We appreciate your understanding as we polish our documentation โ€“ it may contain some rough edges. Share your feedback or report issues to help us improve! ๐Ÿ› ๏ธ๐Ÿ“ -

+

+ We appreciate your understanding as we polish our documentation โ€“ it may + contain some rough edges. Share your feedback or report issues to help us + improve! ๐Ÿ› ๏ธ๐Ÿ“ +

+Utilities are a set of actions that can be used to perform common tasks in a flow. They are available in the **Utilities** section in the sidebar. + +--- + +### GET Request + +Make a GET request to the given URL. + +**Params** + +- **URL:** The URL to make the request to. There can be more than one URL, in which case the request will be made to each URL in order. +- **Headers:** A dictionary of headers to send with the request. + +**Output** + +- **List of Documents:** A list of Documents containing the JSON response from each request. + +--- + +### POST Request + +Make a POST request to the given URL. + +**Params** + +- **URL:** The URL to make the request to. +- **Headers:** A dictionary of headers to send with the request. +- **Document:** The Document containing a JSON object to send with the request. + +**Output** + +- **Document:** The JSON response from the request as a Document. + +--- + +### Update Request + +Make a PATCH or PUT request to the given URL. + +**Params** + +- **URL:** The URL to make the request to. +- **Headers:** A dictionary of headers to send with the request. +- **Document:** The Document containing a JSON object to send with the request. +- **Method:** The HTTP method to use for the request. Can be either `PATCH` or `PUT`. + +**Output** + +- **Document:** The JSON response from the request as a Document. + +--- + +### JSON Document Builder + +Build a Document containing a JSON object using a key and another Document page content. + +**Params** + +- **Key:** The key to use for the JSON object. +- **Document:** The Document page to use for the JSON object. + +**Output** + +- **List of Documents:** A list containing the Document with the JSON object. diff --git a/docs/docs/deployment/jina-deployment.md b/docs/docs/deployment/jina-deployment.md index bf9df051d..e69de29bb 100644 --- a/docs/docs/deployment/jina-deployment.md +++ b/docs/docs/deployment/jina-deployment.md @@ -1,101 +0,0 @@ -# Deploy on Jina AI Cloud - -Langflow integrates with langchain-serve to provide a one-command deployment to [Jina AI Cloud](https://github.com/jina-ai/langchain-serve). - -Start by installing `langchain-serve` with - -```bash -pip install -U langchain-serve -``` - -Then, run: - -```bash -langflow --jcloud -``` - -```text -๐ŸŽ‰ Langflow server successfully deployed on Jina AI Cloud ๐ŸŽ‰ -๐Ÿ”— Click on the link to open the server (please allow ~1-2 minutes for the server to startup): https://.wolf.jina.ai/ -๐Ÿ“– Read more about managing the server: https://github.com/jina-ai/langchain-serve -``` - -**Complete (example) output:** - -```text - ๐Ÿš€ Deploying Langflow server on Jina AI Cloud - โ•ญโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ ๐ŸŽ‰ Flow is available! โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฎ - โ”‚ โ”‚ - โ”‚ ID langflow-e3dd8820ec โ”‚ - โ”‚ Gateway (Websocket) wss://langflow-e3dd8820ec.wolf.jina.ai โ”‚ - โ”‚ Dashboard https://dashboard.wolf.jina.ai/flow/e3dd8820ec โ”‚ - โ”‚ โ”‚ - โ•ฐโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฏ - โ•ญโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฎ - โ”‚ App ID โ”‚ langflow-e3dd8820ec โ”‚ - โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค - โ”‚ Phase โ”‚ Serving โ”‚ - โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค - โ”‚ Endpoint โ”‚ wss://langflow-e3dd8820ec.wolf.jina.ai โ”‚ - โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค - โ”‚ App logs โ”‚ dashboards.wolf.jina.ai โ”‚ - โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค - โ”‚ Swagger UI โ”‚ https://langflow-e3dd8820ec.wolf.jina.ai/docs โ”‚ - โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค - โ”‚ OpenAPI JSON โ”‚ https://langflow-e3dd8820ec.wolf.jina.ai/openapi.json โ”‚ - โ•ฐโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ดโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฏ - - ๐ŸŽ‰ Langflow server successfully deployed on Jina AI Cloud ๐ŸŽ‰ - ๐Ÿ”— Click on the link to open the server (please allow ~1-2 minutes for the server to startup): https://langflow-e3dd8820ec.wolf.jina.ai/ - ๐Ÿ“– Read more about managing the server: https://github.com/jina-ai/langchain-serve - ``` -## API Usage (with python) - -You can use Langflow directly on your browser or the API endpoints on Jina AI Cloud to interact with the server. - -```python -import requests - -BASE_API_URL = "https://langflow-e3dd8820ec.wolf.jina.ai/api/v1/predict" -FLOW_ID = "864c4f98-2e59-468b-8e13-79cd8da07468" -# You can tweak the flow by adding a tweaks dictionary -# e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}} -TWEAKS = { -"ChatOpenAI-g4jEr": {}, -"ConversationChain-UidfJ": {} -} - -def run_flow(message: str, flow_id: str, tweaks: dict = None) -> dict: - """ - Run a flow with a given message and optional tweaks. - - :param message: The message to send to the flow - :param flow_id: The ID of the flow to run - :param tweaks: Optional tweaks to customize the flow - :return: The JSON response from the flow - """ - api_url = f"{BASE_API_URL}/{flow_id}" - - payload = {"message": message} - - if tweaks: - payload["tweaks"] = tweaks - - response = requests.post(api_url, json=payload) - return response.json() - -# Setup any tweaks you want to apply to the flow -print(run_flow("Your message", flow_id=FLOW_ID, tweaks=TWEAKS)) - ``` - - ```json -{ - "result": "Great choice! Bangalore in the 1920s was a vibrant city with a rich cultural and political scene. Here are some suggestions for things to see and do:\n\n1. Visit the Bangalore Palace - built in 1887, this stunning palace is a perfect example of Tudor-style architecture. It was home to the Maharaja of Mysore and is now open to the public.\n\n2. Attend a performance at the Ravindra Kalakshetra - this cultural center was built in the 1920s and is still a popular venue for music and dance performances.\n\n3. Explore the neighborhoods of Basavanagudi and Malleswaram - both of these areas have retained much of their old-world charm and are great places to walk around and soak up the atmosphere.\n\n4. Check out the Bangalore Club - founded in 1868, this exclusive social club was a favorite haunt of the British expat community in the 1920s.\n\n5. Attend a meeting of the Indian National Congress - founded in 1885, the INC was a major force in the Indian independence movement and held many meetings and rallies in Bangalore in the 1920s.\n\nHope you enjoy your trip to 1920s Bangalore!" -} - ``` - -:::info - -Read more about resource customization, cost, and management of Langflow apps on Jina AI Cloud in the **[langchain-serve](https://github.com/jina-ai/langchain-serve)** repository. - -::: \ No newline at end of file diff --git a/docs/docs/guidelines/api.mdx b/docs/docs/guidelines/api.mdx new file mode 100644 index 000000000..97d2db76e --- /dev/null +++ b/docs/docs/guidelines/api.mdx @@ -0,0 +1,147 @@ +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; + +# API Keys + +## Introduction + +Langflow offers an API Key functionality that allows users to access their individual components and flows without going through traditional login authentication. The API Key is a user-specific token that can be included in the request's header or query parameter to authenticate API calls. The following documentation outlines how to generate, use, and manage these API Keys in Langflow. + +## Generating an API Key + +### Through Langflow UI + +{/* add image img/api-key.png */} + + + +1. Click on the "API Key" icon. +2. Click on "Create new secret key". +3. Give it an optional name. +4. Click on "Create secret key". +5. Copy the API key and store it in a secure location. + +## Using the API Key + +### Using the `x-api-key` Header + +Include the `x-api-key` in the HTTP header when making API requests: + +```bash +curl -X POST \ + http://localhost:3000/api/v1/process/ \ + -H 'Content-Type: application/json'\ + -H 'x-api-key: '\ + -d '{"inputs": {"text":""}, "tweaks": {}}' +``` + +With Python using `requests`: + +```python +import requests +from typing import Optional + +BASE_API_URL = "http://localhost:3001/api/v1/process" +FLOW_ID = "4441b773-0724-434e-9cee-19d995d8f2df" +# You can tweak the flow by adding a tweaks dictionary +# e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}} +TWEAKS = {} + +def run_flow(inputs: dict, + flow_id: str, + tweaks: Optional[dict] = None, + apiKey: Optional[str] = None) -> dict: + """ + Run a flow with a given message and optional tweaks. + + :param message: The message to send to the flow + :param flow_id: The ID of the flow to run + :param tweaks: Optional tweaks to customize the flow + :return: The JSON response from the flow + """ + api_url = f"{BASE_API_URL}/{flow_id}" + + payload = {"inputs": inputs} + headers = {} + + if tweaks: + payload["tweaks"] = tweaks + if apiKey: + headers = {"x-api-key": apiKey} + + response = requests.post(api_url, json=payload, headers=headers) + return response.json() + +# Setup any tweaks you want to apply to the flow +inputs = {"text":""} +api_key = "" +print(run_flow(inputs, flow_id=FLOW_ID, tweaks=TWEAKS, apiKey=api_key)) +``` + +### Using the Query Parameter + +Alternatively, you can include the API key as a query parameter in the URL: + +```bash +curl -X POST \ + http://localhost:3000/api/v1/process/?x-api-key= \ + -H 'Content-Type: application/json'\ + -d '{"inputs": {"text":""}, "tweaks": {}}' +``` + +Or with Python: + +```python +import requests + +BASE_API_URL = "http://localhost:3001/api/v1/process" +FLOW_ID = "4441b773-0724-434e-9cee-19d995d8f2df" +# You can tweak the flow by adding a tweaks dictionary +# e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}} +TWEAKS = {} + +def run_flow(inputs: dict, + flow_id: str, + tweaks: Optional[dict] = None, + apiKey: Optional[str] = None) -> dict: + """ + Run a flow with a given message and optional tweaks. + + :param message: The message to send to the flow + :param flow_id: The ID of the flow to run + :param tweaks: Optional tweaks to customize the flow + :return: The JSON response from the flow + """ + api_url = f"{BASE_API_URL}/{flow_id}" + + payload = {"inputs": inputs} + headers = {} + + if tweaks: + payload["tweaks"] = tweaks + if apiKey: + api_url += f"?x-api-key={apiKey}" + + response = requests.post(api_url, json=payload, headers=headers) + return response.json() + +# Setup any tweaks you want to apply to the flow +inputs = {"text":""} +api_key = "" +print(run_flow(inputs, flow_id=FLOW_ID, tweaks=TWEAKS, apiKey=api_key)) +``` + +## Security Considerations + +- **Visibility**: The API key won't be retrievable again through the UI for security reasons. +- **Scope**: The key only allows access to the flows and components of the specific user to whom it was issued. + +## Revoking an API Key + +To revoke an API key, simply delete it from the UI. This will immediately invalidate the key and prevent it from being used again. diff --git a/docs/docs/guidelines/async-api.mdx b/docs/docs/guidelines/async-api.mdx new file mode 100644 index 000000000..c5473812e --- /dev/null +++ b/docs/docs/guidelines/async-api.mdx @@ -0,0 +1,73 @@ +import Admonition from "@theme/Admonition"; + +# Asynchronous Processing + +## Introduction + +Starting from version 0.5, Langflow introduces a new feature to its API: the _`sync`_ flag. This flag allows users to opt for asynchronous processing of their flows, freeing up resources and enabling better control over long-running tasks. +This feature supports running tasks in a Celery worker queue and AnyIO task groups for now. + + + This is an experimental feature. The default behavior of the API is still + synchronous processing. The API may change in the future. + + +## The _`sync`_ Flag + +The _`sync`_ flag can be included in the payload of your POST request to the _`/api/v1/process/`_ endpoint. +When set to _`false`_, the API will initiate an asynchronous task instead of processing the flow synchronously. + +### API Request with _`sync`_ flag + +```bash +curl -X POST \ + http://localhost:3000/api/v1/process/ \ + -H 'Content-Type: application/json' \ + -H 'x-api-key: ' \ + -d '{"inputs": {"text": ""}, "tweaks": {}, "sync": false}' +``` + +Response: + +```json +{ + "result": { + "output": "..." + }, + "task": { + "id": "...", + "href": "api/v1/task/" + }, + "session_id": "...", + "backend": "..." // celery or anyio +} +``` + +## Checking Task Status + +You can check the status of an asynchronous task by making a GET request to the `/task/{task_id}` endpoint. + +```bash +curl -X GET \ + http://localhost:3000/api/v1/task/ \ + -H 'x-api-key: ' +``` + +### Response + +The endpoint will return the current status of the task and, if completed, the result of the task. Possible statuses include: + +- _`PENDING`_: The task is waiting for execution. +- _`SUCCESS`_: The task has completed successfully. +- _`FAILURE`_: The task has failed. + +Example response for a completed task: + +```json +{ + "status": "SUCCESS", + "result": { + "output": "..." + } +} +``` diff --git a/docs/docs/guidelines/custom-component.mdx b/docs/docs/guidelines/custom-component.mdx index 816f8356b..4f18d0375 100644 --- a/docs/docs/guidelines/custom-component.mdx +++ b/docs/docs/guidelines/custom-component.mdx @@ -387,7 +387,7 @@ Your structure should look something like this: The recommended way to load custom components is to set the _`LANGFLOW_COMPONENTS_PATH`_ environment variable to the path of your custom components directory. Then, run the Langflow CLI as usual. ```bash -export LANGFLOW_COMPONENTS_PATH=/path/to/components +export LANGFLOW_COMPONENTS_PATH='["/path/to/components"]' langflow ``` diff --git a/docs/docs/guidelines/login.mdx b/docs/docs/guidelines/login.mdx new file mode 100644 index 000000000..85ca1371f --- /dev/null +++ b/docs/docs/guidelines/login.mdx @@ -0,0 +1,128 @@ +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; +import ReactPlayer from "react-player"; +import Admonition from "@theme/Admonition"; + +# Sign up and Sign in + +## Introduction + +The login functionality in Langflow serves to authenticate users and protect sensitive routes in the application. Starting from version 0.5, Langflow introduces an enhanced login mechanism that is governed by a few environment variables. This allows new secure features. + +## Environment Variables + +The following environment variables are crucial in configuring the login settings: + +- _`LANGFLOW_AUTO_LOGIN`_: Determines whether Langflow should automatically log users in. Default is `True`. +- _`LANGFLOW_SUPERUSER`_: The username of the superuser. +- _`LANGFLOW_SUPERUSER_PASSWORD`_: The password for the superuser. +- _`LANGFLOW_SECRET_KEY`_: A key used for encrypting the superuser's password. +- _`LANGFLOW_NEW_USER_IS_ACTIVE`_: Determines whether new users are automatically activated. Default is `False`. + +All of these variables can be passed to the CLI command _`langflow run`_ through the _`--env-file`_ option. For example: + +```bash +langflow run --env-file .env +``` + + + It is critical not to expose these environment variables in your code + repository. Always set them securely in your deployment environment, for + example, using Docker secrets, Kubernetes ConfigMaps/Secrets, or dedicated + secure environment configuration systems like AWS Secrets Manager. + + +### _`LANGFLOW_AUTO_LOGIN`_ + +By default, this variable is set to `True`. When enabled (`True`), Langflow operates as it did in versions prior to 0.5โ€”automatic login without requiring explicit user authentication. + +To disable automatic login and enforce user authentication: + +```bash +export LANGFLOW_AUTO_LOGIN=False +``` + +### _`LANGFLOW_SUPERUSER`_ and _`LANGFLOW_SUPERUSER_PASSWORD`_ + +These environment variables are only relevant when `LANGFLOW_AUTO_LOGIN` is set to `False`. They specify the username and password for the superuser, which is essential for administrative tasks. + +To create a superuser manually: + +```bash +export LANGFLOW_SUPERUSER=admin +export LANGFLOW_SUPERUSER_PASSWORD=securepassword +``` + +You can also use the CLI command `langflow superuser` to set up a superuser interactively. + +### _`LANGFLOW_SECRET_KEY`_ + +This environment variable holds a secret key used for encrypting the superuser's password. Make sure to set this to a secure, randomly generated string. + +```bash +export LANGFLOW_SECRET_KEY=randomly_generated_secure_key +``` + +### _`LANGFLOW_NEW_USER_IS_ACTIVE`_ + +By default, this variable is set to `False`. When enabled (`True`), new users are automatically activated and can log in without requiring explicit activation by the superuser. + +## Command-Line Interface + +Langflow provides a command-line utility for managing superusers: + +```bash +langflow superuser +``` + +This command prompts you to enter the username and password for the superuser, unless they are already set using environment variables. + +## Sign-up + +With _`LANGFLOW_AUTO_LOGIN`_ set to _`False`_, Langflow requires users to sign up before they can log in. The sign-up page is the default landing page when a user visits Langflow for the first time. + + + +## Profile settings + +Users can change their profile settings by clicking on the profile icon in the top right corner of the application. This opens a dropdown menu with the following options: + +- **Admin Page**: Opens the admin page, which is only accessible to the superuser. +- **Profile Settings**: Opens the profile settings page. +- **Sign Out**: Logs the user out. + + + +By clicking on **Profile Settings**, the user is taken to the profile settings page, where they can change their password and their profile picture. + + + +By clicking on **Admin Page**, the superuser is taken to the admin page, where they can manage users and groups. + + diff --git a/docs/docs/guides/async-tasks.mdx b/docs/docs/guides/async-tasks.mdx new file mode 100644 index 000000000..e865fe4b6 --- /dev/null +++ b/docs/docs/guides/async-tasks.mdx @@ -0,0 +1,44 @@ +import Admonition from "@theme/Admonition"; + +# Async API + +## Introduction + + + This implementation is still in development. Contributions are welcome! + + +The Async API is an implementation of the Langflow API that uses [Celery](https://docs.celeryproject.org/en/stable/) +to run the tasks asynchronously, using a message broker to send and receive messages, a result backend to store the results and a cache to store the task states and session data. + +### Configuration + +The folder _`./deploy`_ in the [Github repository](https://github.com/logspace-ai/langflow) contains a _`.env.example`_ file that can be used to configure a Langflow deployment. +The file contains the variables required to configure a Celery worker queue, Redis cache and result backend and a RabbitMQ message broker. + +To set it up locally you can copy the file to _`.env`_ and run the following command: + +```bash +docker compose up -d +``` + +This will set up the following containers: + +- Langflow API +- Celery worker +- RabbitMQ message broker +- Redis cache +- PostgreSQL database +- PGAdmin +- Flower +- Traefik +- Grafana +- Prometheus + +### Testing + +To run the tests for the Async API, you can run the following command: + +```bash +docker compose -f docker-compose.with_tests.yml up --exit-code-from tests tests result_backend broker celeryworker db --build +``` diff --git a/docs/docs/guides/langfuse_integration.mdx b/docs/docs/guides/langfuse_integration.mdx new file mode 100644 index 000000000..81f06e787 --- /dev/null +++ b/docs/docs/guides/langfuse_integration.mdx @@ -0,0 +1,49 @@ +# Integrating Langfuse with Langflow + +## Introduction + +Langfuse is an open-source tracing and analytics tool designed for LLM applications. Integrating Langfuse with Langflow provides detailed production traces and granular insights into quality, cost, and latency. This integration allows you to monitor and debug your Langflow's chat or APIs easily. + +## Step-by-Step Instructions + +### Step 1: Create a Langfuse account + +1. Go to [Langfuse](https://langfuse.com) and click on the "Sign In" button in the top right corner. +2. Click on the "Sign Up" button and create an account. +3. Once logged in, click on "Settings" and then on "Create new API keys." +4. Copy the Public key and the Secret Key and save them somewhere safe. + {/* Add these keys to your environment variables in the following step. */} + +### Step 2: Set up Langfuse in Langflow + +1. **Export the Environment Variables**: You'll need to export the environment variables `LANGFLOW_LANGFUSE_SECRET_KEY` and `LANGFLOW_LANGFUSE_PUBLIC_KEY` with the values obtained in Step 1. + + You can do this by executing the following commands in your terminal: + + ```bash + export LANGFLOW_LANGFUSE_SECRET_KEY= + export LANGFLOW_LANGFUSE_PUBLIC_KEY= + ``` + + Alternatively, you can run the Langflow CLI command: + + ```bash + LANGFLOW_LANGFUSE_SECRET_KEY= LANGFLOW_LANGFUSE_PUBLIC_KEY= langflow + ``` + + If you are self-hosting Langfuse, you can also set the environment variable `LANGFLOW_LANGFUSE_HOST` to point to your Langfuse instance. By default, Langfuse points to the cloud instance at `https://cloud.langfuse.com`. + +2. **Verify Integration**: Ensure that the environment variables are set correctly by checking their existence in your environment, for example by running: + + ```bash + echo $LANGFLOW_LANGFUSE_SECRET_KEY + echo $LANGFLOW_LANGFUSE_PUBLIC_KEY + ``` + +3. **Monitor Langflow**: Now, whenever you use Langflow's chat or API, you will be able to see the tracing of your conversations in Langfuse. + +That's it! You have successfully integrated Langfuse with Langflow, enhancing observability and debugging capabilities for your LLM application. + +--- + +Note: For more details or customized configurations, please refer to the official [Langfuse documentation](https://langfuse.com/docs/integrations/langchain). diff --git a/docs/docs/guides/superuser.mdx b/docs/docs/guides/superuser.mdx new file mode 100644 index 000000000..04e0f96af --- /dev/null +++ b/docs/docs/guides/superuser.mdx @@ -0,0 +1,7 @@ +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; +import ReactPlayer from "react-player"; + +Now, we need to explain what are the permissions the superuser gets. Once logged in, they can activate new users, +edit them, diff --git a/docs/package-lock.json b/docs/package-lock.json index ed79230c6..c0c8a73fd 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -16,7 +16,7 @@ "@docusaurus/theme-classic": "^2.4.1", "@docusaurus/theme-search-algolia": "^2.4.1", "@mdx-js/react": "^2.3.0", - "@mendable/search": "^0.0.114", + "@mendable/search": "^0.0.154", "@pbe/react-yandex-maps": "^1.2.4", "@prismicio/client": "^7.0.1", "@uiball/loaders": "^1.2.6", @@ -3250,10 +3250,11 @@ } }, "node_modules/@mendable/search": { - "version": "0.0.114", - "resolved": "https://registry.npmjs.org/@mendable/search/-/search-0.0.114.tgz", - "integrity": "sha512-0uR+zxONuu/16bpLli49Jocr5fee1WIjs06KzU1AnHsR+fdFBmfrlpgTDWctgGuXPzS5Dorlw4VMlR5dPW5qVQ==", + "version": "0.0.154", + "resolved": "https://registry.npmjs.org/@mendable/search/-/search-0.0.154.tgz", + "integrity": "sha512-adNwXlIaMXVMCkPU2uUdghfn05Dmxb0BnE95SRLQJ6evHajsNFQdRl5Ltj3WijG+qo4ozTIJcPOBYrDPKMTPVw==", "dependencies": { + "html-react-parser": "^4.2.0", "posthog-js": "^1.45.1" }, "peerDependencies": { @@ -9351,6 +9352,33 @@ "safe-buffer": "~5.1.0" } }, + "node_modules/html-dom-parser": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/html-dom-parser/-/html-dom-parser-4.0.0.tgz", + "integrity": "sha512-TUa3wIwi80f5NF8CVWzkopBVqVAtlawUzJoLwVLHns0XSJGynss4jiY0mTWpiDOsuyw+afP+ujjMgRh9CoZcXw==", + "dependencies": { + "domhandler": "5.0.3", + "htmlparser2": "9.0.0" + } + }, + "node_modules/html-dom-parser/node_modules/htmlparser2": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-9.0.0.tgz", + "integrity": "sha512-uxbSI98wmFT/G4P2zXx4OVx04qWUmyFPrD2/CNepa2Zo3GPNaCaaxElDgwUrwYWkK1nr9fft0Ya8dws8coDLLQ==", + "funding": [ + "https://github.com/fb55/htmlparser2?sponsor=1", + { + "type": "github", + "url": "https://github.com/sponsors/fb55" + } + ], + "dependencies": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3", + "domutils": "^3.1.0", + "entities": "^4.5.0" + } + }, "node_modules/html-entities": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/html-entities/-/html-entities-2.4.0.tgz", @@ -9394,6 +9422,20 @@ "node": ">= 12" } }, + "node_modules/html-react-parser": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/html-react-parser/-/html-react-parser-4.2.1.tgz", + "integrity": "sha512-Dxzdowj5Zu/+7mr8X8PzCFbPXGuwCwGB2u4cB6oxZGES9inw85qlvnlfPD75VGKUGjcgsXs+9Dpj+THWNQyOBw==", + "dependencies": { + "domhandler": "5.0.3", + "html-dom-parser": "4.0.0", + "react-property": "2.0.0", + "style-to-js": "1.1.3" + }, + "peerDependencies": { + "react": "0.14 || 15 || 16 || 17 || 18" + } + }, "node_modules/html-tags": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/html-tags/-/html-tags-3.3.1.tgz", @@ -15324,6 +15366,11 @@ "react": ">=16.6.0" } }, + "node_modules/react-property": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/react-property/-/react-property-2.0.0.tgz", + "integrity": "sha512-kzmNjIgU32mO4mmH5+iUyrqlpFQhF8K2k7eZ4fdLSOPFrD1XgEuSBv9LDEgxRXTMBqMd8ppT0x6TIzqE5pdGdw==" + }, "node_modules/react-router": { "version": "5.3.4", "resolved": "https://registry.npmjs.org/react-router/-/react-router-5.3.4.tgz", @@ -17510,6 +17557,22 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/style-to-js": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/style-to-js/-/style-to-js-1.1.3.tgz", + "integrity": "sha512-zKI5gN/zb7LS/Vm0eUwjmjrXWw8IMtyA8aPBJZdYiQTXj4+wQ3IucOLIOnF7zCHxvW8UhIGh/uZh/t9zEHXNTQ==", + "dependencies": { + "style-to-object": "0.4.1" + } + }, + "node_modules/style-to-js/node_modules/style-to-object": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-0.4.1.tgz", + "integrity": "sha512-HFpbb5gr2ypci7Qw+IOhnP2zOU7e77b+rzM+wTzXzfi1PrtBCX0E7Pk4wL4iTLnhzZ+JgEGAhX81ebTg/aYjQw==", + "dependencies": { + "inline-style-parser": "0.1.1" + } + }, "node_modules/style-to-object": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-0.3.0.tgz", diff --git a/docs/package.json b/docs/package.json index 856e66ebe..277f959a8 100644 --- a/docs/package.json +++ b/docs/package.json @@ -22,7 +22,7 @@ "@docusaurus/theme-classic": "^2.4.1", "@docusaurus/theme-search-algolia": "^2.4.1", "@mdx-js/react": "^2.3.0", - "@mendable/search": "^0.0.114", + "@mendable/search": "^0.0.154", "@pbe/react-yandex-maps": "^1.2.4", "@prismicio/client": "^7.0.1", "@uiball/loaders": "^1.2.6", @@ -69,4 +69,4 @@ "engines": { "node": ">=16.14" } -} +} \ No newline at end of file diff --git a/docs/sidebars.js b/docs/sidebars.js index fbabab150..f500d7728 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -16,6 +16,9 @@ module.exports = { label: "Guidelines", collapsed: false, items: [ + "guidelines/login", + "guidelines/api", + "guidelines/async-api", "guidelines/components", "guidelines/features", "guidelines/collection", @@ -42,6 +45,7 @@ module.exports = { "components/text-splitters", "components/toolkits", "components/tools", + "components/utilities", "components/vector-stores", "components/wrappers", ], @@ -50,7 +54,12 @@ module.exports = { type: "category", label: "Step-by-Step Guides", collapsed: false, - items: ["guides/loading_document", "guides/chatprompttemplate_guide"], + items: [ + "guides/async-tasks", + "guides/loading_document", + "guides/chatprompttemplate_guide", + "guides/langfuse_integration", + ], }, // { // type: 'category', @@ -82,7 +91,7 @@ module.exports = { type: "category", label: "Deployment", collapsed: false, - items: ["deployment/gcp-deployment", "deployment/jina-deployment"], + items: ["deployment/gcp-deployment"], }, { type: "category", diff --git a/docs/src/theme/Footer.js b/docs/src/theme/Footer.js index 403eb4382..d2a56bf5a 100644 --- a/docs/src/theme/Footer.js +++ b/docs/src/theme/Footer.js @@ -37,7 +37,7 @@ export default function FooterWrapper(props) { const mendableFloatingButton = React.createElement(MendableFloatingButton, { floatingButtonStyle: { color: "#000000", backgroundColor: "#f6f6f6" }, - anon_key: customFields.mendableAnonKey, // Mendable Search Public ANON key, ok to be public + anon_key: 'b7f52734-297c-41dc-8737-edbd13196394', // Mendable Search Public ANON key, ok to be public showSimpleSearch: true, icon: icon, }); diff --git a/docs/static/data/organizations-100.csv b/docs/static/data/organizations-100.csv index 93dcac9f3..a111992d7 100644 --- a/docs/static/data/organizations-100.csv +++ b/docs/static/data/organizations-100.csv @@ -1,101 +1,101 @@ -Index,Organization Id,Name,Website,Country,Description,Founded,Industry,Number of employees -1,FAB0d41d5b5d22c,Ferrell LLC,https://price.net/,Papua New Guinea,Horizontal empowering knowledgebase,1990,Plastics,3498 -2,6A7EdDEA9FaDC52,"Mckinney, Riley and Day",http://www.hall-buchanan.info/,Finland,User-centric system-worthy leverage,2015,Glass / Ceramics / Concrete,4952 -3,0bFED1ADAE4bcC1,Hester Ltd,http://sullivan-reed.com/,China,Switchable scalable moratorium,1971,Public Safety,5287 -4,2bFC1Be8a4ce42f,Holder-Sellers,https://becker.com/,Turkmenistan,De-engineered systemic artificial intelligence,2004,Automotive,921 -5,9eE8A6a4Eb96C24,Mayer Group,http://www.brewer.com/,Mauritius,Synchronized needs-based challenge,1991,Transportation,7870 -6,cC757116fe1C085,Henry-Thompson,http://morse.net/,Bahamas,Face-to-face well-modulated customer loyalty,1992,Primary / Secondary Education,4914 -7,219233e8aFF1BC3,Hansen-Everett,https://www.kidd.org/,Pakistan,Seamless disintermediate collaboration,2018,Publishing Industry,7832 -8,ccc93DCF81a31CD,Mcintosh-Mora,https://www.brooks.com/,Heard Island and McDonald Islands,Centralized attitude-oriented capability,1970,Import / Export,4389 -9,0B4F93aA06ED03e,Carr Inc,http://ross.com/,Kuwait,Distributed impactful customer loyalty,1996,Plastics,8167 -10,738b5aDe6B1C6A5,Gaines Inc,http://sandoval-hooper.com/,Uzbekistan,Multi-lateral scalable protocol,1997,Outsourcing / Offshoring,9698 -11,AE61b8Ffebbc476,Kidd Group,http://www.lyons.com/,Bouvet Island (Bouvetoya),Proactive foreground paradigm,2001,Primary / Secondary Education,7473 -12,eb3B7D06cCdD609,Crane-Clarke,https://www.sandoval.com/,Denmark,Front-line clear-thinking encryption,2014,Food / Beverages,9011 -13,8D0c29189C9798B,"Keller, Campos and Black",https://www.garner.info/,Liberia,Ameliorated directional emulation,2020,Museums / Institutions,2862 -14,D2c91cc03CA394c,Glover-Pope,http://www.silva.biz/,United Arab Emirates,Persevering contextually-based approach,2013,Medical Practice,9079 -15,C8AC1eaf9C036F4,Pacheco-Spears,https://aguilar.com/,Sweden,Secured logistical synergy,1984,Maritime,769 -16,b5D10A14f7a8AfE,Hodge-Ayers,http://www.archer-elliott.com/,Honduras,Future-proofed radical implementation,1990,Facilities Services,8508 -17,68139b5C4De03B4,"Bowers, Guerra and Krause",http://www.carrillo-nicholson.com/,Uganda,De-engineered transitional strategy,1972,Primary / Secondary Education,6986 -18,5c2EffEfdba2BdF,Mckenzie-Melton,http://montoya-thompson.com/,Hong Kong,Reverse-engineered heuristic alliance,1998,Investment Management / Hedge Fund / Private Equity,4589 -19,ba179F19F7925f5,Branch-Mann,http://www.lozano.com/,Botswana,Adaptive intangible frame,1999,Architecture / Planning,7961 -20,c1Ce9B350BAc66b,Weiss and Sons,https://barrett.com/,Korea,Sharable optimal functionalities,2011,Plastics,5984 -21,8de40AC4e6EaCa4,"Velez, Payne and Coffey",http://burton.com/,Luxembourg,Mandatory coherent synergy,1986,Wholesale,5010 -22,Aad86a4F0385F2d,Harrell LLC,http://www.frey-rosario.com/,Guadeloupe,Reverse-engineered mission-critical moratorium,2018,Construction,2185 -23,22aC3FFd64fD703,"Eaton, Reynolds and Vargas",http://www.freeman.biz/,Monaco,Self-enabling multi-tasking process improvement,2014,Luxury Goods / Jewelry,8987 -24,5Ec4C272bCf085c,Robbins-Cummings,http://donaldson-wilkins.com/,Belgium,Organic non-volatile hierarchy,1991,Pharmaceuticals,5038 -25,5fDBeA8BB91a000,Jenkins Inc,http://www.kirk.biz/,South Africa,Front-line systematic help-desk,2002,Insurance,1215 -26,dFfD6a6F9AC2d9C,"Greene, Benjamin and Novak",http://www.kent.net/,Romania,Centralized leadingedge moratorium,2012,Museums / Institutions,4941 -27,4B217cC5a0674C5,"Dickson, Richmond and Clay",http://everett.com/,Czech Republic,Team-oriented tangible complexity,1980,Real Estate / Mortgage,3122 -28,88b1f1cDcf59a37,Prince-David,http://thompson.com/,Christmas Island,Virtual holistic methodology,1970,Banking / Mortgage,1046 -29,f9F7bBCAEeC360F,Ayala LLC,http://www.zhang.com/,Philippines,Open-source zero administration hierarchy,2021,Legal Services,7664 -30,7Cb3AeFcE4Ba31e,Rivas Group,https://hebert.org/,Australia,Open-architected well-modulated capacity,1998,Logistics / Procurement,4155 -31,ccBcC32adcbc530,"Sloan, Mays and Whitehead",http://lawson.com/,Chad,Face-to-face high-level conglomeration,1997,Civil Engineering,365 -32,f5afd686b3d05F5,"Durham, Allen and Barnes",http://chan-stafford.org/,Zimbabwe,Synergistic web-enabled framework,1993,Mechanical or Industrial Engineering,6135 -33,38C6cfC5074Fa5e,Fritz-Franklin,http://www.lambert.com/,Nepal,Automated 4thgeneration website,1972,Hospitality,4516 -34,5Cd7efccCcba38f,Burch-Ewing,http://cline.net/,Taiwan,User-centric 4thgeneration system engine,1981,Venture Capital / VC,7443 -35,9E6Acb51e3F9d6F,"Glass, Barrera and Turner",https://dunlap.com/,Kyrgyz Republic,Multi-channeled 3rdgeneration open system,2020,Utilities,2610 -36,4D4d7E18321eaeC,Pineda-Cox,http://aguilar.org/,Bolivia,Fundamental asynchronous capability,2010,Human Resources / HR,1312 -37,485f5d06B938F2b,"Baker, Mccann and Macdonald",http://www.anderson-barker.com/,Kenya,Cross-group user-facing focus group,2013,Legislative Office,1638 -38,19E3a5Bf6dBDc4F,Cuevas-Moss,https://dodson-castaneda.net/,Guatemala,Extended human-resource intranet,1994,Music,9995 -39,6883A965c7b68F7,Hahn PLC,http://newman.com/,Belarus,Organic logistical leverage,2012,Electrical / Electronic Manufacturing,3715 -40,AC5B7AA74Aa4A2E,"Valentine, Ferguson and Kramer",http://stuart.net/,Jersey,Centralized secondary time-frame,1997,Non - Profit / Volunteering,3585 -41,decab0D5027CA6a,Arroyo Inc,https://www.turner.com/,Grenada,Managed demand-driven website,2006,Writing / Editing,9067 -42,dF084FbBb613eea,Walls LLC,http://www.reese-vasquez.biz/,Cape Verde,Self-enabling fresh-thinking installation,1989,Investment Management / Hedge Fund / Private Equity,1678 -43,A2D89Ab9bCcAd4e,"Mitchell, Warren and Schneider",https://fox.biz/,Trinidad and Tobago,Enhanced intangible time-frame,2021,Capital Markets / Hedge Fund / Private Equity,3816 -44,77aDc905434a49f,Prince PLC,https://www.watts.com/,Sweden,Profit-focused coherent installation,2016,Individual / Family Services,7645 -45,235fdEFE2cfDa5F,Brock-Blackwell,http://www.small.com/,Benin,Secured foreground emulation,1986,Online Publishing,7034 -46,1eD64cFe986BBbE,Walton-Barnett,https://ashley-schaefer.com/,Western Sahara,Right-sized clear-thinking flexibility,2001,Luxury Goods / Jewelry,1746 -47,CbBbFcdd0eaE2cF,Bartlett-Arroyo,https://cruz.com/,Northern Mariana Islands,Realigned didactic function,1976,Civic / Social Organization,3987 -48,49aECbDaE6aBD53,"Wallace, Madden and Morris",http://www.blevins-fernandez.biz/,Germany,Persistent real-time customer loyalty,2016,Pharmaceuticals,9443 -49,7b3fe6e7E72bFa4,Berg-Sparks,https://cisneros-love.com/,Canada,Stand-alone static implementation,1974,Arts / Crafts,2073 -50,c6DedA82A8aef7E,Gonzales Ltd,http://bird.com/,Tonga,Managed human-resource policy,1988,Consumer Goods,9069 -51,7D9FBF85cdC3871,Lawson and Sons,https://www.wong.com/,French Southern Territories,Compatible analyzing intranet,2021,Arts / Crafts,3527 -52,7dd18Fb7cB07b65,"Mcguire, Mcconnell and Olsen",https://melton-briggs.com/,Korea,Profound client-server frame,1988,Printing,8445 -53,EF5B55FadccB8Fe,Charles-Phillips,https://bowman.com/,Cote d'Ivoire,Monitored client-server implementation,2012,Mental Health Care,3450 -54,f8D4B99e11fAF5D,Odom Ltd,https://www.humphrey-hess.com/,Cote d'Ivoire,Advanced static process improvement,2012,Management Consulting,1825 -55,e24D21BFd3bF1E5,Richard PLC,https://holden-coleman.net/,Mayotte,Object-based optimizing model,1971,Broadcast Media,4942 -56,B9BdfEB6D3Ca44E,Sampson Ltd,https://blevins.com/,Cayman Islands,Intuitive local adapter,2005,Farming,1418 -57,2a74D6f3D3B268e,"Cherry, Le and Callahan",https://waller-delacruz.biz/,Nigeria,Universal human-resource collaboration,2017,Entertainment / Movie Production,7202 -58,Bf3F3f62c8aBC33,Cherry PLC,https://www.avila.info/,Marshall Islands,Persistent tertiary website,1980,Plastics,8245 -59,aeBe26B80a7a23c,Melton-Nichols,https://kennedy.com/,Palau,User-friendly clear-thinking productivity,2021,Legislative Office,8741 -60,aAeb29ad43886C6,Potter-Walsh,http://thomas-french.org/,Turkey,Optional non-volatile open system,2008,Human Resources / HR,6923 -61,bD1bc6bB6d1FeD3,Freeman-Chen,https://mathis.com/,Timor-Leste,Phased next generation adapter,1973,International Trade / Development,346 -62,EB9f456e8b7022a,Soto Group,https://norris.info/,Vietnam,Enterprise-wide executive installation,1988,Business Supplies / Equipment,9097 -63,Dfef38C51D8DAe3,"Poole, Cruz and Whitney",https://reed.info/,Reunion,Balanced analyzing groupware,1978,Marketing / Advertising / Sales,2992 -64,055ffEfB2Dd95B0,Riley Ltd,http://wiley.com/,Brazil,Optional exuding superstructure,1986,Textiles,9315 -65,cBfe4dbAE1699da,"Erickson, Andrews and Bailey",https://www.hobbs-grant.com/,Eritrea,Vision-oriented secondary project,2014,Consumer Electronics,7829 -66,fdFbecbadcdCdf1,"Wilkinson, Charles and Arroyo",http://hunter-mcfarland.com/,United States Virgin Islands,Assimilated 24/7 archive,1996,Building Materials,602 -67,5DCb8A5a5ca03c0,Floyd Ltd,http://www.whitney.com/,Falkland Islands (Malvinas),Function-based fault-tolerant concept,2017,Public Relations / PR,2911 -68,ce57DCbcFD6d618,Newman-Galloway,https://www.scott.com/,Luxembourg,Enhanced foreground collaboration,1987,Information Technology / IT,3934 -69,5aaD187dc929371,Frazier-Butler,https://www.daugherty-farley.info/,Northern Mariana Islands,Persistent interactive circuit,1972,Outsourcing / Offshoring,5130 -70,902D7Ac8b6d476b,Newton Inc,https://www.richmond-manning.info/,Netherlands Antilles,Fundamental stable info-mediaries,1976,Military Industry,563 -71,32BB9Ff4d939788,Duffy-Levy,https://www.potter.com/,Guernsey,Diverse exuding installation,1982,Wireless,6146 -72,adcB0afbE58bAe3,Wagner LLC,https://decker-esparza.com/,Uruguay,Reactive attitude-oriented toolset,1987,International Affairs,6874 -73,dfcA1c84AdB61Ac,Mccall-Holmes,http://www.dean.com/,Benin,Object-based value-added database,2009,Legal Services,696 -74,208044AC2fe52F3,Massey LLC,https://frazier.biz/,Suriname,Configurable zero administration Graphical User Interface,1986,Accounting,5004 -75,f3C365f0c1A0623,Hicks LLC,http://alvarez.biz/,Pakistan,Quality-focused client-server Graphical User Interface,1970,Computer Software / Engineering,8480 -76,ec5Bdd3CBAfaB93,"Cole, Russell and Avery",http://www.blankenship.com/,Mongolia,De-engineered fault-tolerant challenge,2000,Law Enforcement,7012 -77,DDB19Be7eeB56B4,Cummings-Rojas,https://simon-pearson.com/,Svalbard & Jan Mayen Islands,User-centric modular customer loyalty,2012,Financial Services,7529 -78,dd6CA3d0bc3cAfc,"Beasley, Greene and Mahoney",http://www.petersen-lawrence.com/,Togo,Extended content-based methodology,1976,Religious Institutions,869 -79,A0B9d56e61070e3,"Beasley, Sims and Allison",http://burke.info/,Latvia,Secured zero tolerance hub,1972,Facilities Services,6182 -80,cBa7EFe5D05Adaf,Crawford-Rivera,https://black-ramirez.org/,Cuba,Persevering exuding budgetary management,1999,Online Publishing,7805 -81,Ea3f6D52Ec73563,Montes-Hensley,https://krueger.org/,Liechtenstein,Multi-tiered secondary productivity,2009,Printing,8433 -82,bC0CEd48A8000E0,Velazquez-Odom,https://stokes.com/,Djibouti,Streamlined 6thgeneration function,2002,Alternative Dispute Resolution,4044 -83,c89b9b59BC4baa1,Eaton-Morales,https://www.reeves-graham.com/,Micronesia,Customer-focused explicit frame,1990,Capital Markets / Hedge Fund / Private Equity,7013 -84,FEC51bce8421a7b,"Roberson, Pennington and Palmer",http://www.keith-fisher.com/,Cameroon,Adaptive bi-directional hierarchy,1993,Telecommunications,5571 -85,e0E8e27eAc9CAd5,"George, Russo and Guerra",https://drake.com/,Sweden,Centralized non-volatile capability,1989,Military Industry,2880 -86,B97a6CF9bf5983C,Davila Inc,https://mcconnell.info/,Cocos (Keeling) Islands,Profit-focused dedicated frame,2017,Consumer Electronics,2215 -87,a0a6f9b3DbcBEb5,Mays-Preston,http://www.browning-key.com/,Mali,User-centric heuristic focus group,2006,Military Industry,5786 -88,8cC1bDa330a5871,Pineda-Morton,https://www.carr.com/,United States Virgin Islands,Grass-roots methodical info-mediaries,1991,Printing,6168 -89,ED889CB2FE9cbd3,Huang and Sons,https://www.bolton.com/,Eritrea,Re-contextualized dynamic hierarchy,1981,Semiconductors,7484 -90,F4Dc1417BC6cb8f,Gilbert-Simon,https://www.bradford.biz/,Burundi,Grass-roots radical parallelism,1973,Newspapers / Journalism,1927 -91,7ABc3c7ecA03B34,Sampson-Griffith,http://hendricks.org/,Benin,Multi-layered composite paradigm,1972,Textiles,3881 -92,4e0719FBE38e0aB,Miles-Dominguez,http://www.turner.com/,Gibraltar,Organized empowering forecast,1996,Civic / Social Organization,897 -93,dEbDAAeDfaed00A,Rowe and Sons,https://www.simpson.org/,El Salvador,Balanced multimedia knowledgebase,1978,Facilities Services,8172 -94,61BDeCfeFD0cEF5,"Valenzuela, Holmes and Rowland",https://www.dorsey.net/,Taiwan,Persistent tertiary focus group,1999,Transportation,1483 -95,4e91eD25f486110,"Best, Wade and Shepard",https://zimmerman.com/,Zimbabwe,Innovative background definition,1991,Gambling / Casinos,4873 -96,0a0bfFbBbB8eC7c,Holmes Group,https://mcdowell.org/,Ethiopia,Right-sized zero tolerance focus group,1975,Photography,2988 -97,BA6Cd9Dae2Efd62,Good Ltd,http://duffy.com/,Anguilla,Reverse-engineered composite moratorium,1971,Consumer Services,4292 -98,E7df80C60Abd7f9,Clements-Espinoza,http://www.flowers.net/,Falkland Islands (Malvinas),Progressive modular hub,1991,Broadcast Media,236 -99,AFc285dbE2fEd24,Mendez Inc,https://www.burke.net/,Kyrgyz Republic,User-friendly exuding migration,1993,Education Management,339 -100,e9eB5A60Cef8354,Watkins-Kaiser,http://www.herring.com/,Togo,Synergistic background access,2009,Financial Services,2785 +Index,Organization Id,Name,Website,Country,Description,Founded,Industry,Number of employees +1,FAB0d41d5b5d22c,Ferrell LLC,https://price.net/,Papua New Guinea,Horizontal empowering knowledgebase,1990,Plastics,3498 +2,6A7EdDEA9FaDC52,"Mckinney, Riley and Day",http://www.hall-buchanan.info/,Finland,User-centric system-worthy leverage,2015,Glass / Ceramics / Concrete,4952 +3,0bFED1ADAE4bcC1,Hester Ltd,http://sullivan-reed.com/,China,Switchable scalable moratorium,1971,Public Safety,5287 +4,2bFC1Be8a4ce42f,Holder-Sellers,https://becker.com/,Turkmenistan,De-engineered systemic artificial intelligence,2004,Automotive,921 +5,9eE8A6a4Eb96C24,Mayer Group,http://www.brewer.com/,Mauritius,Synchronized needs-based challenge,1991,Transportation,7870 +6,cC757116fe1C085,Henry-Thompson,http://morse.net/,Bahamas,Face-to-face well-modulated customer loyalty,1992,Primary / Secondary Education,4914 +7,219233e8aFF1BC3,Hansen-Everett,https://www.kidd.org/,Pakistan,Seamless disintermediate collaboration,2018,Publishing Industry,7832 +8,ccc93DCF81a31CD,Mcintosh-Mora,https://www.brooks.com/,Heard Island and McDonald Islands,Centralized attitude-oriented capability,1970,Import / Export,4389 +9,0B4F93aA06ED03e,Carr Inc,http://ross.com/,Kuwait,Distributed impactful customer loyalty,1996,Plastics,8167 +10,738b5aDe6B1C6A5,Gaines Inc,http://sandoval-hooper.com/,Uzbekistan,Multi-lateral scalable protocol,1997,Outsourcing / Offshoring,9698 +11,AE61b8Ffebbc476,Kidd Group,http://www.lyons.com/,Bouvet Island (Bouvetoya),Proactive foreground paradigm,2001,Primary / Secondary Education,7473 +12,eb3B7D06cCdD609,Crane-Clarke,https://www.sandoval.com/,Denmark,Front-line clear-thinking encryption,2014,Food / Beverages,9011 +13,8D0c29189C9798B,"Keller, Campos and Black",https://www.garner.info/,Liberia,Ameliorated directional emulation,2020,Museums / Institutions,2862 +14,D2c91cc03CA394c,Glover-Pope,http://www.silva.biz/,United Arab Emirates,Persevering contextually-based approach,2013,Medical Practice,9079 +15,C8AC1eaf9C036F4,Pacheco-Spears,https://aguilar.com/,Sweden,Secured logistical synergy,1984,Maritime,769 +16,b5D10A14f7a8AfE,Hodge-Ayers,http://www.archer-elliott.com/,Honduras,Future-proofed radical implementation,1990,Facilities Services,8508 +17,68139b5C4De03B4,"Bowers, Guerra and Krause",http://www.carrillo-nicholson.com/,Uganda,De-engineered transitional strategy,1972,Primary / Secondary Education,6986 +18,5c2EffEfdba2BdF,Mckenzie-Melton,http://montoya-thompson.com/,Hong Kong,Reverse-engineered heuristic alliance,1998,Investment Management / Hedge Fund / Private Equity,4589 +19,ba179F19F7925f5,Branch-Mann,http://www.lozano.com/,Botswana,Adaptive intangible frame,1999,Architecture / Planning,7961 +20,c1Ce9B350BAc66b,Weiss and Sons,https://barrett.com/,Korea,Sharable optimal functionalities,2011,Plastics,5984 +21,8de40AC4e6EaCa4,"Velez, Payne and Coffey",http://burton.com/,Luxembourg,Mandatory coherent synergy,1986,Wholesale,5010 +22,Aad86a4F0385F2d,Harrell LLC,http://www.frey-rosario.com/,Guadeloupe,Reverse-engineered mission-critical moratorium,2018,Construction,2185 +23,22aC3FFd64fD703,"Eaton, Reynolds and Vargas",http://www.freeman.biz/,Monaco,Self-enabling multi-tasking process improvement,2014,Luxury Goods / Jewelry,8987 +24,5Ec4C272bCf085c,Robbins-Cummings,http://donaldson-wilkins.com/,Belgium,Organic non-volatile hierarchy,1991,Pharmaceuticals,5038 +25,5fDBeA8BB91a000,Jenkins Inc,http://www.kirk.biz/,South Africa,Front-line systematic help-desk,2002,Insurance,1215 +26,dFfD6a6F9AC2d9C,"Greene, Benjamin and Novak",http://www.kent.net/,Romania,Centralized leadingedge moratorium,2012,Museums / Institutions,4941 +27,4B217cC5a0674C5,"Dickson, Richmond and Clay",http://everett.com/,Czech Republic,Team-oriented tangible complexity,1980,Real Estate / Mortgage,3122 +28,88b1f1cDcf59a37,Prince-David,http://thompson.com/,Christmas Island,Virtual holistic methodology,1970,Banking / Mortgage,1046 +29,f9F7bBCAEeC360F,Ayala LLC,http://www.zhang.com/,Philippines,Open-source zero administration hierarchy,2021,Legal Services,7664 +30,7Cb3AeFcE4Ba31e,Rivas Group,https://hebert.org/,Australia,Open-architected well-modulated capacity,1998,Logistics / Procurement,4155 +31,ccBcC32adcbc530,"Sloan, Mays and Whitehead",http://lawson.com/,Chad,Face-to-face high-level conglomeration,1997,Civil Engineering,365 +32,f5afd686b3d05F5,"Durham, Allen and Barnes",http://chan-stafford.org/,Zimbabwe,Synergistic web-enabled framework,1993,Mechanical or Industrial Engineering,6135 +33,38C6cfC5074Fa5e,Fritz-Franklin,http://www.lambert.com/,Nepal,Automated 4thgeneration website,1972,Hospitality,4516 +34,5Cd7efccCcba38f,Burch-Ewing,http://cline.net/,Taiwan,User-centric 4thgeneration system engine,1981,Venture Capital / VC,7443 +35,9E6Acb51e3F9d6F,"Glass, Barrera and Turner",https://dunlap.com/,Kyrgyz Republic,Multi-channeled 3rdgeneration open system,2020,Utilities,2610 +36,4D4d7E18321eaeC,Pineda-Cox,http://aguilar.org/,Bolivia,Fundamental asynchronous capability,2010,Human Resources / HR,1312 +37,485f5d06B938F2b,"Baker, Mccann and Macdonald",http://www.anderson-barker.com/,Kenya,Cross-group user-facing focus group,2013,Legislative Office,1638 +38,19E3a5Bf6dBDc4F,Cuevas-Moss,https://dodson-castaneda.net/,Guatemala,Extended human-resource intranet,1994,Music,9995 +39,6883A965c7b68F7,Hahn PLC,http://newman.com/,Belarus,Organic logistical leverage,2012,Electrical / Electronic Manufacturing,3715 +40,AC5B7AA74Aa4A2E,"Valentine, Ferguson and Kramer",http://stuart.net/,Jersey,Centralized secondary time-frame,1997,Non - Profit / Volunteering,3585 +41,decab0D5027CA6a,Arroyo Inc,https://www.turner.com/,Grenada,Managed demand-driven website,2006,Writing / Editing,9067 +42,dF084FbBb613eea,Walls LLC,http://www.reese-vasquez.biz/,Cape Verde,Self-enabling fresh-thinking installation,1989,Investment Management / Hedge Fund / Private Equity,1678 +43,A2D89Ab9bCcAd4e,"Mitchell, Warren and Schneider",https://fox.biz/,Trinidad and Tobago,Enhanced intangible time-frame,2021,Capital Markets / Hedge Fund / Private Equity,3816 +44,77aDc905434a49f,Prince PLC,https://www.watts.com/,Sweden,Profit-focused coherent installation,2016,Individual / Family Services,7645 +45,235fdEFE2cfDa5F,Brock-Blackwell,http://www.small.com/,Benin,Secured foreground emulation,1986,Online Publishing,7034 +46,1eD64cFe986BBbE,Walton-Barnett,https://ashley-schaefer.com/,Western Sahara,Right-sized clear-thinking flexibility,2001,Luxury Goods / Jewelry,1746 +47,CbBbFcdd0eaE2cF,Bartlett-Arroyo,https://cruz.com/,Northern Mariana Islands,Realigned didactic function,1976,Civic / Social Organization,3987 +48,49aECbDaE6aBD53,"Wallace, Madden and Morris",http://www.blevins-fernandez.biz/,Germany,Persistent real-time customer loyalty,2016,Pharmaceuticals,9443 +49,7b3fe6e7E72bFa4,Berg-Sparks,https://cisneros-love.com/,Canada,Stand-alone static implementation,1974,Arts / Crafts,2073 +50,c6DedA82A8aef7E,Gonzales Ltd,http://bird.com/,Tonga,Managed human-resource policy,1988,Consumer Goods,9069 +51,7D9FBF85cdC3871,Lawson and Sons,https://www.wong.com/,French Southern Territories,Compatible analyzing intranet,2021,Arts / Crafts,3527 +52,7dd18Fb7cB07b65,"Mcguire, Mcconnell and Olsen",https://melton-briggs.com/,Korea,Profound client-server frame,1988,Printing,8445 +53,EF5B55FadccB8Fe,Charles-Phillips,https://bowman.com/,Cote d'Ivoire,Monitored client-server implementation,2012,Mental Health Care,3450 +54,f8D4B99e11fAF5D,Odom Ltd,https://www.humphrey-hess.com/,Cote d'Ivoire,Advanced static process improvement,2012,Management Consulting,1825 +55,e24D21BFd3bF1E5,Richard PLC,https://holden-coleman.net/,Mayotte,Object-based optimizing model,1971,Broadcast Media,4942 +56,B9BdfEB6D3Ca44E,Sampson Ltd,https://blevins.com/,Cayman Islands,Intuitive local adapter,2005,Farming,1418 +57,2a74D6f3D3B268e,"Cherry, Le and Callahan",https://waller-delacruz.biz/,Nigeria,Universal human-resource collaboration,2017,Entertainment / Movie Production,7202 +58,Bf3F3f62c8aBC33,Cherry PLC,https://www.avila.info/,Marshall Islands,Persistent tertiary website,1980,Plastics,8245 +59,aeBe26B80a7a23c,Melton-Nichols,https://kennedy.com/,Palau,User-friendly clear-thinking productivity,2021,Legislative Office,8741 +60,aAeb29ad43886C6,Potter-Walsh,http://thomas-french.org/,Turkey,Optional non-volatile open system,2008,Human Resources / HR,6923 +61,bD1bc6bB6d1FeD3,Freeman-Chen,https://mathis.com/,Timor-Leste,Phased next generation adapter,1973,International Trade / Development,346 +62,EB9f456e8b7022a,Soto Group,https://norris.info/,Vietnam,Enterprise-wide executive installation,1988,Business Supplies / Equipment,9097 +63,Dfef38C51D8DAe3,"Poole, Cruz and Whitney",https://reed.info/,Reunion,Balanced analyzing groupware,1978,Marketing / Advertising / Sales,2992 +64,055ffEfB2Dd95B0,Riley Ltd,http://wiley.com/,Brazil,Optional exuding superstructure,1986,Textiles,9315 +65,cBfe4dbAE1699da,"Erickson, Andrews and Bailey",https://www.hobbs-grant.com/,Eritrea,Vision-oriented secondary project,2014,Consumer Electronics,7829 +66,fdFbecbadcdCdf1,"Wilkinson, Charles and Arroyo",http://hunter-mcfarland.com/,United States Virgin Islands,Assimilated 24/7 archive,1996,Building Materials,602 +67,5DCb8A5a5ca03c0,Floyd Ltd,http://www.whitney.com/,Falkland Islands (Malvinas),Function-based fault-tolerant concept,2017,Public Relations / PR,2911 +68,ce57DCbcFD6d618,Newman-Galloway,https://www.scott.com/,Luxembourg,Enhanced foreground collaboration,1987,Information Technology / IT,3934 +69,5aaD187dc929371,Frazier-Butler,https://www.daugherty-farley.info/,Northern Mariana Islands,Persistent interactive circuit,1972,Outsourcing / Offshoring,5130 +70,902D7Ac8b6d476b,Newton Inc,https://www.richmond-manning.info/,Netherlands Antilles,Fundamental stable info-mediaries,1976,Military Industry,563 +71,32BB9Ff4d939788,Duffy-Levy,https://www.potter.com/,Guernsey,Diverse exuding installation,1982,Wireless,6146 +72,adcB0afbE58bAe3,Wagner LLC,https://decker-esparza.com/,Uruguay,Reactive attitude-oriented toolset,1987,International Affairs,6874 +73,dfcA1c84AdB61Ac,Mccall-Holmes,http://www.dean.com/,Benin,Object-based value-added database,2009,Legal Services,696 +74,208044AC2fe52F3,Massey LLC,https://frazier.biz/,Suriname,Configurable zero administration Graphical User Interface,1986,Accounting,5004 +75,f3C365f0c1A0623,Hicks LLC,http://alvarez.biz/,Pakistan,Quality-focused client-server Graphical User Interface,1970,Computer Software / Engineering,8480 +76,ec5Bdd3CBAfaB93,"Cole, Russell and Avery",http://www.blankenship.com/,Mongolia,De-engineered fault-tolerant challenge,2000,Law Enforcement,7012 +77,DDB19Be7eeB56B4,Cummings-Rojas,https://simon-pearson.com/,Svalbard & Jan Mayen Islands,User-centric modular customer loyalty,2012,Financial Services,7529 +78,dd6CA3d0bc3cAfc,"Beasley, Greene and Mahoney",http://www.petersen-lawrence.com/,Togo,Extended content-based methodology,1976,Religious Institutions,869 +79,A0B9d56e61070e3,"Beasley, Sims and Allison",http://burke.info/,Latvia,Secured zero tolerance hub,1972,Facilities Services,6182 +80,cBa7EFe5D05Adaf,Crawford-Rivera,https://black-ramirez.org/,Cuba,Persevering exuding budgetary management,1999,Online Publishing,7805 +81,Ea3f6D52Ec73563,Montes-Hensley,https://krueger.org/,Liechtenstein,Multi-tiered secondary productivity,2009,Printing,8433 +82,bC0CEd48A8000E0,Velazquez-Odom,https://stokes.com/,Djibouti,Streamlined 6thgeneration function,2002,Alternative Dispute Resolution,4044 +83,c89b9b59BC4baa1,Eaton-Morales,https://www.reeves-graham.com/,Micronesia,Customer-focused explicit frame,1990,Capital Markets / Hedge Fund / Private Equity,7013 +84,FEC51bce8421a7b,"Roberson, Pennington and Palmer",http://www.keith-fisher.com/,Cameroon,Adaptive bi-directional hierarchy,1993,Telecommunications,5571 +85,e0E8e27eAc9CAd5,"George, Russo and Guerra",https://drake.com/,Sweden,Centralized non-volatile capability,1989,Military Industry,2880 +86,B97a6CF9bf5983C,Davila Inc,https://mcconnell.info/,Cocos (Keeling) Islands,Profit-focused dedicated frame,2017,Consumer Electronics,2215 +87,a0a6f9b3DbcBEb5,Mays-Preston,http://www.browning-key.com/,Mali,User-centric heuristic focus group,2006,Military Industry,5786 +88,8cC1bDa330a5871,Pineda-Morton,https://www.carr.com/,United States Virgin Islands,Grass-roots methodical info-mediaries,1991,Printing,6168 +89,ED889CB2FE9cbd3,Huang and Sons,https://www.bolton.com/,Eritrea,Re-contextualized dynamic hierarchy,1981,Semiconductors,7484 +90,F4Dc1417BC6cb8f,Gilbert-Simon,https://www.bradford.biz/,Burundi,Grass-roots radical parallelism,1973,Newspapers / Journalism,1927 +91,7ABc3c7ecA03B34,Sampson-Griffith,http://hendricks.org/,Benin,Multi-layered composite paradigm,1972,Textiles,3881 +92,4e0719FBE38e0aB,Miles-Dominguez,http://www.turner.com/,Gibraltar,Organized empowering forecast,1996,Civic / Social Organization,897 +93,dEbDAAeDfaed00A,Rowe and Sons,https://www.simpson.org/,El Salvador,Balanced multimedia knowledgebase,1978,Facilities Services,8172 +94,61BDeCfeFD0cEF5,"Valenzuela, Holmes and Rowland",https://www.dorsey.net/,Taiwan,Persistent tertiary focus group,1999,Transportation,1483 +95,4e91eD25f486110,"Best, Wade and Shepard",https://zimmerman.com/,Zimbabwe,Innovative background definition,1991,Gambling / Casinos,4873 +96,0a0bfFbBbB8eC7c,Holmes Group,https://mcdowell.org/,Ethiopia,Right-sized zero tolerance focus group,1975,Photography,2988 +97,BA6Cd9Dae2Efd62,Good Ltd,http://duffy.com/,Anguilla,Reverse-engineered composite moratorium,1971,Consumer Services,4292 +98,E7df80C60Abd7f9,Clements-Espinoza,http://www.flowers.net/,Falkland Islands (Malvinas),Progressive modular hub,1991,Broadcast Media,236 +99,AFc285dbE2fEd24,Mendez Inc,https://www.burke.net/,Kyrgyz Republic,User-friendly exuding migration,1993,Education Management,339 +100,e9eB5A60Cef8354,Watkins-Kaiser,http://www.herring.com/,Togo,Synergistic background access,2009,Financial Services,2785 diff --git a/docs/static/img/admin-page.png b/docs/static/img/admin-page.png new file mode 100644 index 000000000..aa23164de Binary files /dev/null and b/docs/static/img/admin-page.png differ diff --git a/docs/static/img/api-key.png b/docs/static/img/api-key.png new file mode 100644 index 000000000..eb1c8de37 Binary files /dev/null and b/docs/static/img/api-key.png differ diff --git a/docs/static/img/logo.svg b/docs/static/img/logo.svg index 136c4c835..3972c658b 100644 Binary files a/docs/static/img/logo.svg and b/docs/static/img/logo.svg differ diff --git a/docs/static/img/my-account.png b/docs/static/img/my-account.png new file mode 100644 index 000000000..c8b887ca1 Binary files /dev/null and b/docs/static/img/my-account.png differ diff --git a/docs/static/img/new_langflow.gif b/docs/static/img/new_langflow.gif index 18b3389f3..6323d20df 100644 Binary files a/docs/static/img/new_langflow.gif and b/docs/static/img/new_langflow.gif differ diff --git a/docs/static/img/new_langflow2.gif b/docs/static/img/new_langflow2.gif index d963db0b8..587450de2 100644 Binary files a/docs/static/img/new_langflow2.gif and b/docs/static/img/new_langflow2.gif differ diff --git a/docs/static/img/profile-settings.png b/docs/static/img/profile-settings.png new file mode 100644 index 000000000..77077d463 Binary files /dev/null and b/docs/static/img/profile-settings.png differ diff --git a/docs/static/img/sign-up.png b/docs/static/img/sign-up.png new file mode 100644 index 000000000..3ffdd1793 Binary files /dev/null and b/docs/static/img/sign-up.png differ diff --git a/docs/static/videos/langflow_api.mp4 b/docs/static/videos/langflow_api.mp4 index f0daa5266..7bb270f6b 100644 Binary files a/docs/static/videos/langflow_api.mp4 and b/docs/static/videos/langflow_api.mp4 differ diff --git a/docs/static/videos/langflow_build.mp4 b/docs/static/videos/langflow_build.mp4 index 9d068fa01..fffd21aec 100644 Binary files a/docs/static/videos/langflow_build.mp4 and b/docs/static/videos/langflow_build.mp4 differ diff --git a/docs/static/videos/langflow_collection.mp4 b/docs/static/videos/langflow_collection.mp4 index 69d172776..f240d1be0 100644 Binary files a/docs/static/videos/langflow_collection.mp4 and b/docs/static/videos/langflow_collection.mp4 differ diff --git a/docs/static/videos/langflow_collection_example.mp4 b/docs/static/videos/langflow_collection_example.mp4 index e58ea31e4..31cc94961 100644 Binary files a/docs/static/videos/langflow_collection_example.mp4 and b/docs/static/videos/langflow_collection_example.mp4 differ diff --git a/docs/static/videos/langflow_fork.mp4 b/docs/static/videos/langflow_fork.mp4 index 03c280c35..c9b75bc23 100644 Binary files a/docs/static/videos/langflow_fork.mp4 and b/docs/static/videos/langflow_fork.mp4 differ diff --git a/docs/static/videos/langflow_parameters.mp4 b/docs/static/videos/langflow_parameters.mp4 index 370ca5f36..c7599e649 100644 Binary files a/docs/static/videos/langflow_parameters.mp4 and b/docs/static/videos/langflow_parameters.mp4 differ diff --git a/docs/static/videos/langflow_widget.mp4 b/docs/static/videos/langflow_widget.mp4 index 7894316f7..d5514a948 100644 Binary files a/docs/static/videos/langflow_widget.mp4 and b/docs/static/videos/langflow_widget.mp4 differ diff --git a/img/langflow-demo.gif b/img/langflow-demo.gif index 19b63eeb1..4cea58628 100644 Binary files a/img/langflow-demo.gif and b/img/langflow-demo.gif differ diff --git a/package.json b/package.json deleted file mode 100644 index b80f86e35..000000000 --- a/package.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "devDependencies": { - "@svgr/cli": "^8.0.1" - } -} diff --git a/poetry.lock b/poetry.lock index 6624a2237..0ffa6f5e4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -135,24 +135,27 @@ frozenlist = ">=1.1.0" [[package]] name = "aiostream" -version = "0.4.5" +version = "0.5.0" description = "Generator-based operators for asynchronous iteration" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "aiostream-0.4.5-py3-none-any.whl", hash = "sha256:25b7c2d9c83570d78c0ef5a20e949b7d0b8ea3b0b0a4f22c49d3f721105a6057"}, - {file = "aiostream-0.4.5.tar.gz", hash = "sha256:3ecbf87085230fbcd9605c32ca20c4fb41af02c71d076eab246ea22e35947d88"}, + {file = "aiostream-0.5.0-py3-none-any.whl", hash = "sha256:daf1979a683452fbcd9b1a6933b3d7c9237d3c240b9737f176ab6e1aaf2d8bca"}, + {file = "aiostream-0.5.0.tar.gz", hash = "sha256:d64aa63cb9b96b4dae74b09f260d2d0fa423fbbd17bce29ed39ff8e5caf95b57"}, ] +[package.dependencies] +typing-extensions = "*" + [[package]] name = "alembic" -version = "1.11.3" +version = "1.12.0" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.7" files = [ - {file = "alembic-1.11.3-py3-none-any.whl", hash = "sha256:d6c96c2482740592777c400550a523bc7a9aada4e210cae2e733354ddae6f6f8"}, - {file = "alembic-1.11.3.tar.gz", hash = "sha256:3db4ce81a9072e1b5aa44c2d202add24553182672a12daf21608d6f62a8f9cf9"}, + {file = "alembic-1.12.0-py3-none-any.whl", hash = "sha256:03226222f1cf943deee6c85d9464261a6c710cd19b4fe867a3ad1f25afda610f"}, + {file = "alembic-1.12.0.tar.gz", hash = "sha256:8e7645c32e4f200675e69f0745415335eb59a3663f5feb487abfa0b30c45888b"}, ] [package.dependencies] @@ -176,43 +179,43 @@ files = [ [[package]] name = "anthropic" -version = "0.3.10" +version = "0.3.3" description = "Client library for the anthropic API" optional = false python-versions = ">=3.7,<4.0" files = [ - {file = "anthropic-0.3.10-py3-none-any.whl", hash = "sha256:95cd73168296a4f91a5899a28660991e044322cf94442d07b99901d4ca74acd6"}, - {file = "anthropic-0.3.10.tar.gz", hash = "sha256:d1f66efc541fbff0ecfd37fd4d3690f9daaa748fc42d9ded5863a10815a5d97b"}, + {file = "anthropic-0.3.3-py3-none-any.whl", hash = "sha256:4f607095057e8ec06e8724a0f2dbcbbdd6a5750f6116d87eb51cae82801700a7"}, + {file = "anthropic-0.3.3.tar.gz", hash = "sha256:db78e49c17bb0ac905e2b2f65789ecd3d33256aa0448e45a5a484909fc6a670f"}, ] [package.dependencies] -anyio = ">=3.5.0,<4" -distro = ">=1.7.0,<2" -httpx = ">=0.23.0,<1" -pydantic = ">=1.9.0,<3" +anyio = ">=3.5.0" +distro = ">=1.7.0" +httpx = ">=0.23.0" +pydantic = ">=1.9.0,<2.0.0" tokenizers = ">=0.13.0" -typing-extensions = ">=4.5,<5" +typing-extensions = ">=4.1.1" [[package]] name = "anyio" -version = "3.7.1" +version = "4.0.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, - {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, + {file = "anyio-4.0.0-py3-none-any.whl", hash = "sha256:cfdb2b588b9fc25ede96d8db56ed50848b0b649dca3dd1df0b11f683bb9e0b5f"}, + {file = "anyio-4.0.0.tar.gz", hash = "sha256:f7ed51751b2c2add651e5747c891b47e26d2a21be5d32d9311dfe9692f3e5d7a"}, ] [package.dependencies] -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["Sphinx", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.2.2)", "sphinxcontrib-jquery"] -test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (<0.22)"] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.22)"] [[package]] name = "appdirs" @@ -236,17 +239,6 @@ files = [ {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, ] -[[package]] -name = "argilla" -version = "0.0.1" -description = "" -optional = false -python-versions = "*" -files = [ - {file = "argilla-0.0.1-py3-none-any.whl", hash = "sha256:8bdc3c505bcfb47ba4b91f5658034eae53bf7d4f9317980397605c0c55817396"}, - {file = "argilla-0.0.1.tar.gz", hash = "sha256:5017854754e89f573b31af25b25b803f51cea9ca1fa0bcf00505dee1f45cf7c9"}, -] - [[package]] name = "asgiref" version = "3.7.2" @@ -266,17 +258,17 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] [[package]] name = "asttokens" -version = "2.2.1" +version = "2.4.0" description = "Annotate AST trees with source code positions" optional = false python-versions = "*" files = [ - {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, - {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, + {file = "asttokens-2.4.0-py2.py3-none-any.whl", hash = "sha256:cf8fc9e61a86461aa9fb161a14a0841a03c405fa829ac6b202670b3495d2ce69"}, + {file = "asttokens-2.4.0.tar.gz", hash = "sha256:2e0171b991b2c959acc6c49318049236844a5da1d65ba2672c4880c1c894834e"}, ] [package.dependencies] -six = "*" +six = ">=1.12.0" [package.extras] test = ["astroid", "pytest"] @@ -398,35 +390,46 @@ soupsieve = ">1.2" html5lib = ["html5lib"] lxml = ["lxml"] +[[package]] +name = "billiard" +version = "4.1.0" +description = "Python multiprocessing fork with improvements and bugfixes" +optional = true +python-versions = ">=3.7" +files = [ + {file = "billiard-4.1.0-py3-none-any.whl", hash = "sha256:0f50d6be051c6b2b75bfbc8bfd85af195c5739c281d3f5b86a5640c65563614a"}, + {file = "billiard-4.1.0.tar.gz", hash = "sha256:1ad2eeae8e28053d729ba3373d34d9d6e210f6e4d8bf0a9c64f92bd053f1edf5"}, +] + [[package]] name = "black" -version = "23.7.0" +version = "23.9.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, - {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, - {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, - {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, - {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, - {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, - {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, - {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, - {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, - {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, - {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, + {file = "black-23.9.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:d6bc09188020c9ac2555a498949401ab35bb6bf76d4e0f8ee251694664df6301"}, + {file = "black-23.9.1-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:13ef033794029b85dfea8032c9d3b92b42b526f1ff4bf13b2182ce4e917f5100"}, + {file = "black-23.9.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:75a2dc41b183d4872d3a500d2b9c9016e67ed95738a3624f4751a0cb4818fe71"}, + {file = "black-23.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13a2e4a93bb8ca74a749b6974925c27219bb3df4d42fc45e948a5d9feb5122b7"}, + {file = "black-23.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:adc3e4442eef57f99b5590b245a328aad19c99552e0bdc7f0b04db6656debd80"}, + {file = "black-23.9.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:8431445bf62d2a914b541da7ab3e2b4f3bc052d2ccbf157ebad18ea126efb91f"}, + {file = "black-23.9.1-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:8fc1ddcf83f996247505db6b715294eba56ea9372e107fd54963c7553f2b6dfe"}, + {file = "black-23.9.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:7d30ec46de88091e4316b17ae58bbbfc12b2de05e069030f6b747dfc649ad186"}, + {file = "black-23.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:031e8c69f3d3b09e1aa471a926a1eeb0b9071f80b17689a655f7885ac9325a6f"}, + {file = "black-23.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:538efb451cd50f43aba394e9ec7ad55a37598faae3348d723b59ea8e91616300"}, + {file = "black-23.9.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:638619a559280de0c2aa4d76f504891c9860bb8fa214267358f0a20f27c12948"}, + {file = "black-23.9.1-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:a732b82747235e0542c03bf352c126052c0fbc458d8a239a94701175b17d4855"}, + {file = "black-23.9.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:cf3a4d00e4cdb6734b64bf23cd4341421e8953615cba6b3670453737a72ec204"}, + {file = "black-23.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf99f3de8b3273a8317681d8194ea222f10e0133a24a7548c73ce44ea1679377"}, + {file = "black-23.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:14f04c990259576acd093871e7e9b14918eb28f1866f91968ff5524293f9c573"}, + {file = "black-23.9.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:c619f063c2d68f19b2d7270f4cf3192cb81c9ec5bc5ba02df91471d0b88c4c5c"}, + {file = "black-23.9.1-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:6a3b50e4b93f43b34a9d3ef00d9b6728b4a722c997c99ab09102fd5efdb88325"}, + {file = "black-23.9.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c46767e8df1b7beefb0899c4a95fb43058fa8500b6db144f4ff3ca38eb2f6393"}, + {file = "black-23.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50254ebfa56aa46a9fdd5d651f9637485068a1adf42270148cd101cdf56e0ad9"}, + {file = "black-23.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:403397c033adbc45c2bd41747da1f7fc7eaa44efbee256b53842470d4ac5a70f"}, + {file = "black-23.9.1-py3-none-any.whl", hash = "sha256:6ccd59584cc834b6d127628713e4b6b968e5f79572da66284532525a042549f9"}, + {file = "black-23.9.1.tar.gz", hash = "sha256:24b6b3ff5c6d9ea08a8888f6977eae858e1f340d7260cf56d70a49823236b62d"}, ] [package.dependencies] @@ -436,7 +439,7 @@ packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] @@ -455,6 +458,62 @@ files = [ {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, ] +[[package]] +name = "celery" +version = "5.3.4" +description = "Distributed Task Queue." +optional = true +python-versions = ">=3.8" +files = [ + {file = "celery-5.3.4-py3-none-any.whl", hash = "sha256:1e6ed40af72695464ce98ca2c201ad0ef8fd192246f6c9eac8bba343b980ad34"}, + {file = "celery-5.3.4.tar.gz", hash = "sha256:9023df6a8962da79eb30c0c84d5f4863d9793a466354cc931d7f72423996de28"}, +] + +[package.dependencies] +billiard = ">=4.1.0,<5.0" +click = ">=8.1.2,<9.0" +click-didyoumean = ">=0.3.0" +click-plugins = ">=1.1.1" +click-repl = ">=0.2.0" +kombu = ">=5.3.2,<6.0" +python-dateutil = ">=2.8.2" +redis = {version = ">=4.5.2,<4.5.5 || >4.5.5,<5.0.0", optional = true, markers = "extra == \"redis\""} +tzdata = ">=2022.7" +vine = ">=5.0.0,<6.0" + +[package.extras] +arangodb = ["pyArango (>=2.0.2)"] +auth = ["cryptography (==41.0.3)"] +azureblockblob = ["azure-storage-blob (>=12.15.0)"] +brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"] +cassandra = ["cassandra-driver (>=3.25.0,<4)"] +consul = ["python-consul2 (==0.1.5)"] +cosmosdbsql = ["pydocumentdb (==2.3.5)"] +couchbase = ["couchbase (>=3.0.0)"] +couchdb = ["pycouchdb (==1.14.2)"] +django = ["Django (>=2.2.28)"] +dynamodb = ["boto3 (>=1.26.143)"] +elasticsearch = ["elasticsearch (<8.0)"] +eventlet = ["eventlet (>=0.32.0)"] +gevent = ["gevent (>=1.5.0)"] +librabbitmq = ["librabbitmq (>=2.0.0)"] +memcache = ["pylibmc (==1.6.3)"] +mongodb = ["pymongo[srv] (>=4.0.2)"] +msgpack = ["msgpack (==1.0.5)"] +pymemcache = ["python-memcached (==1.59)"] +pyro = ["pyro4 (==4.82)"] +pytest = ["pytest-celery (==0.0.0)"] +redis = ["redis (>=4.5.2,!=4.5.5,<5.0.0)"] +s3 = ["boto3 (>=1.26.143)"] +slmq = ["softlayer-messaging (>=1.0.3)"] +solar = ["ephem (==4.1.4)"] +sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] +sqs = ["boto3 (>=1.26.143)", "kombu[sqs] (>=5.3.0)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] +tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"] +yaml = ["PyYAML (>=3.10)"] +zookeeper = ["kazoo (>=1.3.1)"] +zstd = ["zstandard (==0.21.0)"] + [[package]] name = "certifi" version = "2023.7.22" @@ -682,81 +741,80 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "clickhouse-connect" -version = "0.6.8" +version = "0.6.14" description = "ClickHouse Database Core Driver for Python, Pandas, and Superset" optional = false python-versions = "~=3.7" files = [ - {file = "clickhouse-connect-0.6.8.tar.gz", hash = "sha256:a8729f4f100f891d058944342d9a0838f4a51bd91504af6b6af106a01aa08426"}, - {file = "clickhouse_connect-0.6.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8155c2a976d2de82a3df526dc980b77a07e815353f67e9bf49c15e09faa8b620"}, - {file = "clickhouse_connect-0.6.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:068244b05ea155a6cf6a6b4379b39c078f70659480366a152161f3235444dd4e"}, - {file = "clickhouse_connect-0.6.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4f3d53a3c49002c57590abd17e616374559f3fa4c96d1aaf9500aef7655879a"}, - {file = "clickhouse_connect-0.6.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e28e4e0e70433c3339a945d73c219f58fcf7b58481ff6f3077da0ae20a295d0"}, - {file = "clickhouse_connect-0.6.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:783cf21680c37b1fc2e48859ae4ba7cab8d648d36d466d936645e2a27ffb2050"}, - {file = "clickhouse_connect-0.6.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97844fd19faacf135c56c43d3f1b8493840c9e283931ba0773931a9505b62da4"}, - {file = "clickhouse_connect-0.6.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:286c97ff8dd0a32cde48fe1b7024b483bf900038620441a019647f25c1928c0c"}, - {file = "clickhouse_connect-0.6.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:49ee463569d6f9f92e2ed83ab843e139d07ae87fb477956cd7659c1458ac2360"}, - {file = "clickhouse_connect-0.6.8-cp310-cp310-win32.whl", hash = "sha256:a0ed55deccdae2d9b2ce545b0c1b554389bb89f03dc4afd7ee0cc0ddb26acb22"}, - {file = "clickhouse_connect-0.6.8-cp310-cp310-win_amd64.whl", hash = "sha256:4bf40ea3d41d75421e2f489894e14b7a5cd2bf4000bbb927364e4b43066d2595"}, - {file = "clickhouse_connect-0.6.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5646f7eee3609da6277d2dc8ccb4be16959b5e4959bb408eb52eed628b4e29f2"}, - {file = "clickhouse_connect-0.6.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8b2c9be52670222231e3b7284267121568ab15fb6d48f163695fa26c5ebaf193"}, - {file = "clickhouse_connect-0.6.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:675f02e3fb586172da9d7231d0c10a10c47b8063b0f43fce04719d03584ac55e"}, - {file = "clickhouse_connect-0.6.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a48301444f5f0931a51ec9c0b8b7458a4ba6ddb9128c352102bbe869c96a3ba0"}, - {file = "clickhouse_connect-0.6.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:03233693f9d2a38bd781786a8b6a435df9e2c2d85d1a6f84bcc21ecd32c87b84"}, - {file = "clickhouse_connect-0.6.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c6b136213d042262a2257ba2db3a9a091c465a2535311172be0e87f0c66653b5"}, - {file = "clickhouse_connect-0.6.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c96c69570d17b83b45c9e14fd1a92be4d180b3accb52539d1333ff4bff4d1f7b"}, - {file = "clickhouse_connect-0.6.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:86fc223b2e84c7eb7d4e6082f81a1304164c32112db9a8e41720b1077d53e206"}, - {file = "clickhouse_connect-0.6.8-cp311-cp311-win32.whl", hash = "sha256:529a35e03069b4d31b1624506021e804c26907ae1faa2753678590414681c37f"}, - {file = "clickhouse_connect-0.6.8-cp311-cp311-win_amd64.whl", hash = "sha256:4f7d84456939eec83d0670050ed00c84ef11ff6f1d15e1151d9cfbed4094218c"}, - {file = "clickhouse_connect-0.6.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4eeb30c4a5a2e404eece76891baa4554ee85cc34e17d9714f8da79df4c162abe"}, - {file = "clickhouse_connect-0.6.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:610f4946495acf1fc465e882e006c8076344cbdd0b0f34a10b113c9a2adc49fb"}, - {file = "clickhouse_connect-0.6.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc87597bc68e0d9b8cb322e47e427423565e9a774de0fe080611001d7cd1f657"}, - {file = "clickhouse_connect-0.6.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0672dcf6c75aeea7a20104538dcaecc4a10915b83e1eb2f0bafa2922368b7e8c"}, - {file = "clickhouse_connect-0.6.8-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:190b2b7de1b15506dd5c7c34df403bd5cde94dc4c98f082ab6f1453475af5efc"}, - {file = "clickhouse_connect-0.6.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a1ba4dbd0ce87a6e430b834e6790f00c21a9a5a49587b834ee94e93c42a83891"}, - {file = "clickhouse_connect-0.6.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fcca53c9b186c5d18ccbeb3e90bb58e6c7777c7eb22ea5166da2f83661b595a7"}, - {file = "clickhouse_connect-0.6.8-cp37-cp37m-win32.whl", hash = "sha256:07350c635ddbd21cc4680401680436a360f6182e3fa2b8b9da6cecd2104acd28"}, - {file = "clickhouse_connect-0.6.8-cp37-cp37m-win_amd64.whl", hash = "sha256:4ec1e5f4cb1e276f6f885695466ec53664cb6d6749c4f214778f746cd4c169c4"}, - {file = "clickhouse_connect-0.6.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e195d7cbb0305df40fce6d3b16d556c2eccc166807d19f49b76730ad6cab9ac6"}, - {file = "clickhouse_connect-0.6.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1fe67217d369e30c446d7998186d6bedfab0ef6ad41c63bc4a65cd5c16b11e7c"}, - {file = "clickhouse_connect-0.6.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05bd58f417a98061e4d4dff2238a1d0b80f0c1c54e489cfb37b6ac0f55a5117d"}, - {file = "clickhouse_connect-0.6.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:715acb48161687cf860b6244d064646d90069d9ed6705d4e71c07ee54a98ede1"}, - {file = "clickhouse_connect-0.6.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c05c2fefb326c49d798f1adce99a30fc25804311c0adbb6537a08bd7b9fb4a6"}, - {file = "clickhouse_connect-0.6.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ba6335faec2a0525d14bc5dfc8268924878d5219831acd51ca88ef69b9d4186f"}, - {file = "clickhouse_connect-0.6.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d3122339282e41dc863c1d2ad6f812424aba9023a5733cfe5e801b02b36e2596"}, - {file = "clickhouse_connect-0.6.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:397f803dfe502329be9ac209ac55a7229ee6bd81d0e9291faf77ae4c86f1d99c"}, - {file = "clickhouse_connect-0.6.8-cp38-cp38-win32.whl", hash = "sha256:0136a99842694219c3b1dbb3f6baf0866112b7ffa6a5c9109fef1b891a788467"}, - {file = "clickhouse_connect-0.6.8-cp38-cp38-win_amd64.whl", hash = "sha256:4340e4d62119ea56c2cbd15673813fe99be8c9de1af96e593ca3b42113a9ae4f"}, - {file = "clickhouse_connect-0.6.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:64c9f52ce8eec80ab6958702ea910904723e228dc26a63be89b621aa7af80aec"}, - {file = "clickhouse_connect-0.6.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5169c22b0765ef52d7498108df0f9158da05c35342fc60ba3c6527d9fe6d15f3"}, - {file = "clickhouse_connect-0.6.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f324a827c6c3c56a651dd5f2c33cdd74c6cea16e4237cb2638a89aefc9bccb"}, - {file = "clickhouse_connect-0.6.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91e9f9ddb8adeaa6996cd3aa97cbb72b5cc69769d59419cf3a65f5d654af1e92"}, - {file = "clickhouse_connect-0.6.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09f83ceef6ced6a9a44a3e74c0904cfc5c5f25e563cabc46b922051f4b1e0bf8"}, - {file = "clickhouse_connect-0.6.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6fae23b60e1653efa03abd850b3b36fdf5c930b5b37a7f70ced2a1f164a11f51"}, - {file = "clickhouse_connect-0.6.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4203aa258cd396716d3acdebfa405f849dae83e162dad22b3ce326e54e4cfa92"}, - {file = "clickhouse_connect-0.6.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bf094318639675d02068dbb93299507694c0f4d66dcb530084044a877e547c83"}, - {file = "clickhouse_connect-0.6.8-cp39-cp39-win32.whl", hash = "sha256:28d4022ac7db92a2c405a55e6bfffdfd9afdda2075c5a906ac1ea42d24e95e8d"}, - {file = "clickhouse_connect-0.6.8-cp39-cp39-win_amd64.whl", hash = "sha256:5754e2b12645ab18d4afde101391beb019685b0d0c1b5717733ba8d91fc942cf"}, - {file = "clickhouse_connect-0.6.8-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:160fb485ce6c280c7e30fbc8a2a33e6ecb663e24db43242ad555901e363ef613"}, - {file = "clickhouse_connect-0.6.8-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f660664341ab6f93f61d7245e3983f41fc7bf06520ce44088431d5e48713a8e1"}, - {file = "clickhouse_connect-0.6.8-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99f66263e8014c6c1ba657cfe408c39c9f178c257d159158c7f4a12f4379312a"}, - {file = "clickhouse_connect-0.6.8-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:050681b8ad33fc3210461a6b5e396cca8b202944ef24c436094202702ae84be7"}, - {file = "clickhouse_connect-0.6.8-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:342be4094746bb068496555a3d9ecf4f67574b7bcf9c57a39f76700a7155cc13"}, - {file = "clickhouse_connect-0.6.8-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d956358c9966006b246f450146c816d198972c19531e69e02d3fc8f3fac8b5a2"}, - {file = "clickhouse_connect-0.6.8-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8def8771307ee5b964dcf9436c3a4dfca46618df72bbf9e73c7882c05812e481"}, - {file = "clickhouse_connect-0.6.8-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e05f0504edddfae4a08d4c8ceb2e263982b3d2b453fc54b8c9fa9db1c3ad47a"}, - {file = "clickhouse_connect-0.6.8-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f45986e195cd9352ffab5116df488f21e790c3b33e3ce79910a70730aa90f5bc"}, - {file = "clickhouse_connect-0.6.8-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3c7fde400669245efa572cf0810c4c0a4ca6bb05d6c787a84472a4f958207770"}, - {file = "clickhouse_connect-0.6.8-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2830d1f1b58904c4f1100315345d0e54fd5ed5346b391b25d82b1a0b695c266a"}, - {file = "clickhouse_connect-0.6.8-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8f055198176252a8298497f05420aa5b7b3d099d77a72b0b25398630303004a"}, - {file = "clickhouse_connect-0.6.8-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f90b135933cb4297bc67b3d74e3bca57bad80a430883ef2aabf50f985820be3"}, - {file = "clickhouse_connect-0.6.8-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e87ffc88ebda5cab7fb4de8c832d155f9e50e4f18c43ed44e2c058449b6370ab"}, - {file = "clickhouse_connect-0.6.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:691a12359f88431bbe6c743dab62ef330b3a6dde0aa88b7fa3b332265ee25a8e"}, + {file = "clickhouse-connect-0.6.14.tar.gz", hash = "sha256:0531bbd5b8bdee616bf1cca5ddcb0af86db12e2b48fd39257a8ecdf32200bd57"}, + {file = "clickhouse_connect-0.6.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:04affbd255fb8b1e4a882ddc1336c86530976d05578f47bb65e3a53471d291e4"}, + {file = "clickhouse_connect-0.6.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f5bd61f2665f1890fa632b1181df2900ea838cf152cd9a3f775841ea2deab680"}, + {file = "clickhouse_connect-0.6.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79897a0987008993f32737e17045a5c1982f9193f7511a3832a7ba3429cbf6b4"}, + {file = "clickhouse_connect-0.6.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa95c8a96bdff593924407b074d616ee8a1bfb989579c17b330c6f3b27febfe3"}, + {file = "clickhouse_connect-0.6.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:501c0d843be30c86719b61089fb1de6298ac44b3670594f0a1cb0dc3ad97651e"}, + {file = "clickhouse_connect-0.6.14-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1ec9672c9ed9d5e62f66ac14d6470b9b6be9946d6d24ddac87376437863b8f59"}, + {file = "clickhouse_connect-0.6.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:92173354a6c7c5862fab09dab338197b86a192e0c117137e899e8cf92cc3b5b7"}, + {file = "clickhouse_connect-0.6.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:757b4c05ebf10bdcb916334c3021ee571a61238907cdeee8c54bcf0550cd0d19"}, + {file = "clickhouse_connect-0.6.14-cp310-cp310-win32.whl", hash = "sha256:2e74badf6c7569e1a0ad32f3be250a3ebf28a9df3b15c9709104e5f050486016"}, + {file = "clickhouse_connect-0.6.14-cp310-cp310-win_amd64.whl", hash = "sha256:7b56c422467df5a0b2790e0943b747639f1f172fac7f8d9585adb3302c961fb1"}, + {file = "clickhouse_connect-0.6.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d2aa6d28d79eb5ca94d7c756ec4dc599d2354897f5ef40fd0d8bdc579a81dd94"}, + {file = "clickhouse_connect-0.6.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:70cd5b2e0d80dc030355d09db213c73caa78ef259f2b04ce30c1c8cb513bf45b"}, + {file = "clickhouse_connect-0.6.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:826c85e37555443af945a0d977598814ba7cb09447b0cdd167eae57dfd3f0724"}, + {file = "clickhouse_connect-0.6.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cdb1f843d134a1e30828900bc51c9c1b4f4e638aac693767685e512fb095af5"}, + {file = "clickhouse_connect-0.6.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a8ea6ca6e0d6b1af50078413e280f271559c462a8644541002e44c2cb5c371"}, + {file = "clickhouse_connect-0.6.14-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8b72a5e5d54069dff419a6ec9bbc7f3896fe558551cae6a2b2cba60eaa0607a3"}, + {file = "clickhouse_connect-0.6.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c531ed454ca9b6d85e739de3770a82eec2087ed2cb9660fb8ff0e62f7f1446cc"}, + {file = "clickhouse_connect-0.6.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ae6ebf7c507f9d0fece9d1e387c9eec77762693f91647bca18f588cf1d594d24"}, + {file = "clickhouse_connect-0.6.14-cp311-cp311-win32.whl", hash = "sha256:cf1e3067c2da8525b6f59a37f8e13cd6c4524f439be8fd7d8fa03f007f96c863"}, + {file = "clickhouse_connect-0.6.14-cp311-cp311-win_amd64.whl", hash = "sha256:15a040210877cc34155943c7870bf78247d4d4fa3bd4e0595ca22e97760679b7"}, + {file = "clickhouse_connect-0.6.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:eb91e4ee0435088fc8bd36de51a93ff9286a514d82ac373b57b2d6cad4655d77"}, + {file = "clickhouse_connect-0.6.14-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48108bb1cfe99b6ff60344838859aec1315213dfa618f6ca4b92c0c6e5ae8d41"}, + {file = "clickhouse_connect-0.6.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c75d4bd8ef0b90f9e89ea70c16ff099278e4bb8f1e045008376ac34c6122b73d"}, + {file = "clickhouse_connect-0.6.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:897f40eda84e9c45d0bdaf3a9e638e614e236a4a5eeab5cddd920857f9f8f22a"}, + {file = "clickhouse_connect-0.6.14-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5ecc88656df05ae49e70062aee7022982eec3f87fb14db97c25276fef6633d7c"}, + {file = "clickhouse_connect-0.6.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:76cec48265774ae3fa61a77b290dcc8385aad4312a8d7dfcaffb9fc00f79458e"}, + {file = "clickhouse_connect-0.6.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dba280e00ec4cfe0e4d69f88baa9a0491bc1ed83ec57336e5197adae8d42d0c9"}, + {file = "clickhouse_connect-0.6.14-cp37-cp37m-win32.whl", hash = "sha256:6c77f537e04747702e009c05f4a7f6f96cbe1696bb89d29f72e39e7370924836"}, + {file = "clickhouse_connect-0.6.14-cp37-cp37m-win_amd64.whl", hash = "sha256:d0eceaff68a53f71384bb9aee7fc1630f68ac10538727c8516ae0af1103f2580"}, + {file = "clickhouse_connect-0.6.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9dfa09948caeed539cdd019a1e341a379a1dcacdd755b278d12484b4a703afa3"}, + {file = "clickhouse_connect-0.6.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a30d99cb1fd57b8fed4449632e51d48386d0eec1673f905572c5fc7059215c20"}, + {file = "clickhouse_connect-0.6.14-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e88de4fe66ae2b1c15726760cc87a703e4d1162de52a19c8d8b57a4429f08e"}, + {file = "clickhouse_connect-0.6.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03d721de610beae823068665d6c604a5f390a99e7b2354264b17136a3a520b13"}, + {file = "clickhouse_connect-0.6.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a627762f2f692e226b3cb574a83133844213c6507c6313d3fefd8a3de08e5798"}, + {file = "clickhouse_connect-0.6.14-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:62a596f8d9db8592477a2032c329be7449ea32d133cdc4e5d6f804e251b8617a"}, + {file = "clickhouse_connect-0.6.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e8ab9e5a61968c328a0fdc254b02b96142ebb4ec2bc1623f9498538f0ebfdc7c"}, + {file = "clickhouse_connect-0.6.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6c21fe379b1b8822eb9644600e38220b5c4b530fd0f2b1da824a0918120a8f01"}, + {file = "clickhouse_connect-0.6.14-cp38-cp38-win32.whl", hash = "sha256:2a17b336781d3fbb67ed556918c17e63c7d462709aa6a953bb3410ddb67fd7f4"}, + {file = "clickhouse_connect-0.6.14-cp38-cp38-win_amd64.whl", hash = "sha256:838a008c0f7d911ab81f741ea27a64ef7bdcc2508698b70f018987dfc742ffa9"}, + {file = "clickhouse_connect-0.6.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:083649a97c3f366f66f0f2578b9f88d86c1d3a40b9015c9403db524fda36a952"}, + {file = "clickhouse_connect-0.6.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9e9bd6849852b2c55e51a477e10bc8b61990c5f37f31cce5ea6fc970b447b5af"}, + {file = "clickhouse_connect-0.6.14-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9152c45423f488cf6229bce1f9e695cd81e7ffcd3ae0f1e40e5e62079b18d4a5"}, + {file = "clickhouse_connect-0.6.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341e068d4a6a423ed22fb3b96cfe16be0d6305943c3fb1cc48251b7d9729931d"}, + {file = "clickhouse_connect-0.6.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ead7acb841524bd7a73b1f10592a36e901d63bc89af3270ab76b89a11d44fe20"}, + {file = "clickhouse_connect-0.6.14-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8bce432f72dcf6679c2d0bac4e3a82a126389ad7951d316f213109cee6925c7c"}, + {file = "clickhouse_connect-0.6.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1f403499f169574cafb05888dfdaf18065cc49ff1321e5e108c504c8c220e172"}, + {file = "clickhouse_connect-0.6.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3189fcd339bfd7ae4e703ff40b110b9740d6b1ec8385ed8bd1547663fd046578"}, + {file = "clickhouse_connect-0.6.14-cp39-cp39-win32.whl", hash = "sha256:a30de3f0997a9157e840c2d4e07fd9c6fc6e359f1ff9f3a46386b5abdca73c1a"}, + {file = "clickhouse_connect-0.6.14-cp39-cp39-win_amd64.whl", hash = "sha256:c3476a95780374e94dfba2a28093d15f8370bfa6f4cb46a02e0af8813e5f7368"}, + {file = "clickhouse_connect-0.6.14-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:22affe46983e67e3923e9330336d21e9ec4b4812b6fbeb1865514145b3870170"}, + {file = "clickhouse_connect-0.6.14-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62727090af8875631115911f58442967386b31cd4efa93c951c2aa7e57d1ce4b"}, + {file = "clickhouse_connect-0.6.14-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee4ea5ac58de0580f2e12b46cfd2f8d13c1e690378bf9775bfed0c935232de71"}, + {file = "clickhouse_connect-0.6.14-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a126fe486dd02fa5f8adb0b9d8fd0fc701fb73b2275e1040ed210afadd189f90"}, + {file = "clickhouse_connect-0.6.14-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:de6bf773c8776033ca5fb5a6a376729ae69afdd0b19a71d1460d1a221fc5a627"}, + {file = "clickhouse_connect-0.6.14-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d27d2c9698d1acb550ac8c30c4d9440c7d826a16444e4aea4dacf11ed7ec8988"}, + {file = "clickhouse_connect-0.6.14-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f57efbe536dfbfb7e10dd16ced6fe02441fb174450760f0b29b2b60d23c6462f"}, + {file = "clickhouse_connect-0.6.14-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c134483da38a3d3e38c44da9f3d519d73e177998052d36129e21863c7a3497ee"}, + {file = "clickhouse_connect-0.6.14-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2d6ae7ccb4ca3d310c2971ead9839935890e40da8602dcc92ecda9bbbb24366"}, + {file = "clickhouse_connect-0.6.14-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:0acf6b69b11b757d60545b0ccac3df4980f69351994e30074df84729bb5af5d1"}, + {file = "clickhouse_connect-0.6.14-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e043b3b606749d23eca7601a1a44f188c6f117ae57a2852c66c21f11b7296fe4"}, + {file = "clickhouse_connect-0.6.14-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a887dfef3f3914454c7d7a428db8063b1678c66678cbabcd6368f0b67876f1"}, + {file = "clickhouse_connect-0.6.14-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e255e7c9c38fb9bceefc659374d04914ef2222a6f121fccf86a865b81110e96b"}, + {file = "clickhouse_connect-0.6.14-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2be9a6ba1d3055bb6956be218ffecfa3bfbe47121dfa34467815aa883f15d159"}, + {file = "clickhouse_connect-0.6.14-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:59faa034fdd58c1e7c8b2f4a033e9c611a0c58e193339cdd62d9d91a62f11195"}, ] [package.dependencies] certifi = "*" -importlib-metadata = "*" lz4 = "*" pytz = "*" urllib3 = ">=1.26" @@ -835,65 +893,80 @@ lint = ["black (>=22.6.0)", "mdformat (>0.7)", "mdformat-gfm (>=0.3.5)", "ruff ( test = ["pytest"] typing = ["mypy (>=0.990)"] +[[package]] +name = "configargparse" +version = "1.7" +description = "A drop-in replacement for argparse that allows options to also be set via config files and/or environment variables." +optional = false +python-versions = ">=3.5" +files = [ + {file = "ConfigArgParse-1.7-py3-none-any.whl", hash = "sha256:d249da6591465c6c26df64a9f73d2536e743be2f244eb3ebe61114af2f94f86b"}, + {file = "ConfigArgParse-1.7.tar.gz", hash = "sha256:e7067471884de5478c58a511e529f0f9bd1c66bfef1dea90935438d6c23306d1"}, +] + +[package.extras] +test = ["PyYAML", "mock", "pytest"] +yaml = ["PyYAML"] + [[package]] name = "coverage" -version = "7.3.0" +version = "7.3.1" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db76a1bcb51f02b2007adacbed4c88b6dee75342c37b05d1822815eed19edee5"}, - {file = "coverage-7.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c02cfa6c36144ab334d556989406837336c1d05215a9bdf44c0bc1d1ac1cb637"}, - {file = "coverage-7.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:477c9430ad5d1b80b07f3c12f7120eef40bfbf849e9e7859e53b9c93b922d2af"}, - {file = "coverage-7.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce2ee86ca75f9f96072295c5ebb4ef2a43cecf2870b0ca5e7a1cbdd929cf67e1"}, - {file = "coverage-7.3.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68d8a0426b49c053013e631c0cdc09b952d857efa8f68121746b339912d27a12"}, - {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3eb0c93e2ea6445b2173da48cb548364f8f65bf68f3d090404080d338e3a689"}, - {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:90b6e2f0f66750c5a1178ffa9370dec6c508a8ca5265c42fbad3ccac210a7977"}, - {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:96d7d761aea65b291a98c84e1250cd57b5b51726821a6f2f8df65db89363be51"}, - {file = "coverage-7.3.0-cp310-cp310-win32.whl", hash = "sha256:63c5b8ecbc3b3d5eb3a9d873dec60afc0cd5ff9d9f1c75981d8c31cfe4df8527"}, - {file = "coverage-7.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:97c44f4ee13bce914272589b6b41165bbb650e48fdb7bd5493a38bde8de730a1"}, - {file = "coverage-7.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74c160285f2dfe0acf0f72d425f3e970b21b6de04157fc65adc9fd07ee44177f"}, - {file = "coverage-7.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b543302a3707245d454fc49b8ecd2c2d5982b50eb63f3535244fd79a4be0c99d"}, - {file = "coverage-7.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad0f87826c4ebd3ef484502e79b39614e9c03a5d1510cfb623f4a4a051edc6fd"}, - {file = "coverage-7.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13c6cbbd5f31211d8fdb477f0f7b03438591bdd077054076eec362cf2207b4a7"}, - {file = "coverage-7.3.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fac440c43e9b479d1241fe9d768645e7ccec3fb65dc3a5f6e90675e75c3f3e3a"}, - {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3c9834d5e3df9d2aba0275c9f67989c590e05732439b3318fa37a725dff51e74"}, - {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4c8e31cf29b60859876474034a83f59a14381af50cbe8a9dbaadbf70adc4b214"}, - {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7a9baf8e230f9621f8e1d00c580394a0aa328fdac0df2b3f8384387c44083c0f"}, - {file = "coverage-7.3.0-cp311-cp311-win32.whl", hash = "sha256:ccc51713b5581e12f93ccb9c5e39e8b5d4b16776d584c0f5e9e4e63381356482"}, - {file = "coverage-7.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:887665f00ea4e488501ba755a0e3c2cfd6278e846ada3185f42d391ef95e7e70"}, - {file = "coverage-7.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d000a739f9feed900381605a12a61f7aaced6beae832719ae0d15058a1e81c1b"}, - {file = "coverage-7.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:59777652e245bb1e300e620ce2bef0d341945842e4eb888c23a7f1d9e143c446"}, - {file = "coverage-7.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9737bc49a9255d78da085fa04f628a310c2332b187cd49b958b0e494c125071"}, - {file = "coverage-7.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5247bab12f84a1d608213b96b8af0cbb30d090d705b6663ad794c2f2a5e5b9fe"}, - {file = "coverage-7.3.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ac9a1de294773b9fa77447ab7e529cf4fe3910f6a0832816e5f3d538cfea9a"}, - {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:85b7335c22455ec12444cec0d600533a238d6439d8d709d545158c1208483873"}, - {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:36ce5d43a072a036f287029a55b5c6a0e9bd73db58961a273b6dc11a2c6eb9c2"}, - {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:211a4576e984f96d9fce61766ffaed0115d5dab1419e4f63d6992b480c2bd60b"}, - {file = "coverage-7.3.0-cp312-cp312-win32.whl", hash = "sha256:56afbf41fa4a7b27f6635bc4289050ac3ab7951b8a821bca46f5b024500e6321"}, - {file = "coverage-7.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:7f297e0c1ae55300ff688568b04ff26b01c13dfbf4c9d2b7d0cb688ac60df479"}, - {file = "coverage-7.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac0dec90e7de0087d3d95fa0533e1d2d722dcc008bc7b60e1143402a04c117c1"}, - {file = "coverage-7.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:438856d3f8f1e27f8e79b5410ae56650732a0dcfa94e756df88c7e2d24851fcd"}, - {file = "coverage-7.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1084393c6bda8875c05e04fce5cfe1301a425f758eb012f010eab586f1f3905e"}, - {file = "coverage-7.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49ab200acf891e3dde19e5aa4b0f35d12d8b4bd805dc0be8792270c71bd56c54"}, - {file = "coverage-7.3.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a67e6bbe756ed458646e1ef2b0778591ed4d1fcd4b146fc3ba2feb1a7afd4254"}, - {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f39c49faf5344af36042b293ce05c0d9004270d811c7080610b3e713251c9b0"}, - {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7df91fb24c2edaabec4e0eee512ff3bc6ec20eb8dccac2e77001c1fe516c0c84"}, - {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:34f9f0763d5fa3035a315b69b428fe9c34d4fc2f615262d6be3d3bf3882fb985"}, - {file = "coverage-7.3.0-cp38-cp38-win32.whl", hash = "sha256:bac329371d4c0d456e8d5f38a9b0816b446581b5f278474e416ea0c68c47dcd9"}, - {file = "coverage-7.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b859128a093f135b556b4765658d5d2e758e1fae3e7cc2f8c10f26fe7005e543"}, - {file = "coverage-7.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fc0ed8d310afe013db1eedd37176d0839dc66c96bcfcce8f6607a73ffea2d6ba"}, - {file = "coverage-7.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61260ec93f99f2c2d93d264b564ba912bec502f679793c56f678ba5251f0393"}, - {file = "coverage-7.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97af9554a799bd7c58c0179cc8dbf14aa7ab50e1fd5fa73f90b9b7215874ba28"}, - {file = "coverage-7.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3558e5b574d62f9c46b76120a5c7c16c4612dc2644c3d48a9f4064a705eaee95"}, - {file = "coverage-7.3.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37d5576d35fcb765fca05654f66aa71e2808d4237d026e64ac8b397ffa66a56a"}, - {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:07ea61bcb179f8f05ffd804d2732b09d23a1238642bf7e51dad62082b5019b34"}, - {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:80501d1b2270d7e8daf1b64b895745c3e234289e00d5f0e30923e706f110334e"}, - {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4eddd3153d02204f22aef0825409091a91bf2a20bce06fe0f638f5c19a85de54"}, - {file = "coverage-7.3.0-cp39-cp39-win32.whl", hash = "sha256:2d22172f938455c156e9af2612650f26cceea47dc86ca048fa4e0b2d21646ad3"}, - {file = "coverage-7.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:60f64e2007c9144375dd0f480a54d6070f00bb1a28f65c408370544091c9bc9e"}, - {file = "coverage-7.3.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:5492a6ce3bdb15c6ad66cb68a0244854d9917478877a25671d70378bdc8562d0"}, - {file = "coverage-7.3.0.tar.gz", hash = "sha256:49dbb19cdcafc130f597d9e04a29d0a032ceedf729e41b181f51cd170e6ee865"}, + {file = "coverage-7.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cd0f7429ecfd1ff597389907045ff209c8fdb5b013d38cfa7c60728cb484b6e3"}, + {file = "coverage-7.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:966f10df9b2b2115da87f50f6a248e313c72a668248be1b9060ce935c871f276"}, + {file = "coverage-7.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0575c37e207bb9b98b6cf72fdaaa18ac909fb3d153083400c2d48e2e6d28bd8e"}, + {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:245c5a99254e83875c7fed8b8b2536f040997a9b76ac4c1da5bff398c06e860f"}, + {file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c96dd7798d83b960afc6c1feb9e5af537fc4908852ef025600374ff1a017392"}, + {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:de30c1aa80f30af0f6b2058a91505ea6e36d6535d437520067f525f7df123887"}, + {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:50dd1e2dd13dbbd856ffef69196781edff26c800a74f070d3b3e3389cab2600d"}, + {file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9c0c19f70d30219113b18fe07e372b244fb2a773d4afde29d5a2f7930765136"}, + {file = "coverage-7.3.1-cp310-cp310-win32.whl", hash = "sha256:770f143980cc16eb601ccfd571846e89a5fe4c03b4193f2e485268f224ab602f"}, + {file = "coverage-7.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:cdd088c00c39a27cfa5329349cc763a48761fdc785879220d54eb785c8a38520"}, + {file = "coverage-7.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74bb470399dc1989b535cb41f5ca7ab2af561e40def22d7e188e0a445e7639e3"}, + {file = "coverage-7.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:025ded371f1ca280c035d91b43252adbb04d2aea4c7105252d3cbc227f03b375"}, + {file = "coverage-7.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6191b3a6ad3e09b6cfd75b45c6aeeffe7e3b0ad46b268345d159b8df8d835f9"}, + {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb0b188f30e41ddd659a529e385470aa6782f3b412f860ce22b2491c89b8593"}, + {file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c8f0df9dfd8ff745bccff75867d63ef336e57cc22b2908ee725cc552689ec8"}, + {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7eb3cd48d54b9bd0e73026dedce44773214064be93611deab0b6a43158c3d5a0"}, + {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ac3c5b7e75acac31e490b7851595212ed951889918d398b7afa12736c85e13ce"}, + {file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b4ee7080878077af0afa7238df1b967f00dc10763f6e1b66f5cced4abebb0a3"}, + {file = "coverage-7.3.1-cp311-cp311-win32.whl", hash = "sha256:229c0dd2ccf956bf5aeede7e3131ca48b65beacde2029f0361b54bf93d36f45a"}, + {file = "coverage-7.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:c6f55d38818ca9596dc9019eae19a47410d5322408140d9a0076001a3dcb938c"}, + {file = "coverage-7.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5289490dd1c3bb86de4730a92261ae66ea8d44b79ed3cc26464f4c2cde581fbc"}, + {file = "coverage-7.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca833941ec701fda15414be400c3259479bfde7ae6d806b69e63b3dc423b1832"}, + {file = "coverage-7.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd694e19c031733e446c8024dedd12a00cda87e1c10bd7b8539a87963685e969"}, + {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aab8e9464c00da5cb9c536150b7fbcd8850d376d1151741dd0d16dfe1ba4fd26"}, + {file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87d38444efffd5b056fcc026c1e8d862191881143c3aa80bb11fcf9dca9ae204"}, + {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8a07b692129b8a14ad7a37941a3029c291254feb7a4237f245cfae2de78de037"}, + {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2829c65c8faaf55b868ed7af3c7477b76b1c6ebeee99a28f59a2cb5907a45760"}, + {file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f111a7d85658ea52ffad7084088277135ec5f368457275fc57f11cebb15607f"}, + {file = "coverage-7.3.1-cp312-cp312-win32.whl", hash = "sha256:c397c70cd20f6df7d2a52283857af622d5f23300c4ca8e5bd8c7a543825baa5a"}, + {file = "coverage-7.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:5ae4c6da8b3d123500f9525b50bf0168023313963e0e2e814badf9000dd6ef92"}, + {file = "coverage-7.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca70466ca3a17460e8fc9cea7123c8cbef5ada4be3140a1ef8f7b63f2f37108f"}, + {file = "coverage-7.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f2781fd3cabc28278dc982a352f50c81c09a1a500cc2086dc4249853ea96b981"}, + {file = "coverage-7.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6407424621f40205bbe6325686417e5e552f6b2dba3535dd1f90afc88a61d465"}, + {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04312b036580ec505f2b77cbbdfb15137d5efdfade09156961f5277149f5e344"}, + {file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9ad38204887349853d7c313f53a7b1c210ce138c73859e925bc4e5d8fc18e7"}, + {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:53669b79f3d599da95a0afbef039ac0fadbb236532feb042c534fbb81b1a4e40"}, + {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:614f1f98b84eb256e4f35e726bfe5ca82349f8dfa576faabf8a49ca09e630086"}, + {file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f1a317fdf5c122ad642db8a97964733ab7c3cf6009e1a8ae8821089993f175ff"}, + {file = "coverage-7.3.1-cp38-cp38-win32.whl", hash = "sha256:defbbb51121189722420a208957e26e49809feafca6afeef325df66c39c4fdb3"}, + {file = "coverage-7.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:f4f456590eefb6e1b3c9ea6328c1e9fa0f1006e7481179d749b3376fc793478e"}, + {file = "coverage-7.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f12d8b11a54f32688b165fd1a788c408f927b0960984b899be7e4c190ae758f1"}, + {file = "coverage-7.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f09195dda68d94a53123883de75bb97b0e35f5f6f9f3aa5bf6e496da718f0cb6"}, + {file = "coverage-7.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6601a60318f9c3945be6ea0f2a80571f4299b6801716f8a6e4846892737ebe4"}, + {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07d156269718670d00a3b06db2288b48527fc5f36859425ff7cec07c6b367745"}, + {file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:636a8ac0b044cfeccae76a36f3b18264edcc810a76a49884b96dd744613ec0b7"}, + {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5d991e13ad2ed3aced177f524e4d670f304c8233edad3210e02c465351f785a0"}, + {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:586649ada7cf139445da386ab6f8ef00e6172f11a939fc3b2b7e7c9082052fa0"}, + {file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4aba512a15a3e1e4fdbfed2f5392ec221434a614cc68100ca99dcad7af29f3f8"}, + {file = "coverage-7.3.1-cp39-cp39-win32.whl", hash = "sha256:6bc6f3f4692d806831c136c5acad5ccedd0262aa44c087c46b7101c77e139140"}, + {file = "coverage-7.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:553d7094cb27db58ea91332e8b5681bac107e7242c23f7629ab1316ee73c4981"}, + {file = "coverage-7.3.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:220eb51f5fb38dfdb7e5d54284ca4d0cd70ddac047d750111a68ab1798945194"}, + {file = "coverage-7.3.1.tar.gz", hash = "sha256:6cb7fe1581deb67b782c153136541e20901aa312ceedaf1467dcb35255787952"}, ] [package.dependencies] @@ -904,34 +977,34 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.3" +version = "41.0.4" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507"}, - {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116"}, - {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c"}, - {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae"}, - {file = "cryptography-41.0.3-cp37-abi3-win32.whl", hash = "sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306"}, - {file = "cryptography-41.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4"}, - {file = "cryptography-41.0.3.tar.gz", hash = "sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34"}, + {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839"}, + {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143"}, + {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397"}, + {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860"}, + {file = "cryptography-41.0.4-cp37-abi3-win32.whl", hash = "sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd"}, + {file = "cryptography-41.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311"}, + {file = "cryptography-41.0.4.tar.gz", hash = "sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a"}, ] [package.dependencies] @@ -949,13 +1022,13 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "ctransformers" -version = "0.2.22" +version = "0.2.27" description = "Python bindings for the Transformer models implemented in C/C++ using GGML library." optional = true python-versions = "*" files = [ - {file = "ctransformers-0.2.22-py3-none-any.whl", hash = "sha256:cce72b4ffff3f29d49ea6488110686453b3354c285e96b9c7dd16f273c4b4fc4"}, - {file = "ctransformers-0.2.22.tar.gz", hash = "sha256:ffd15dfe8a6ad45568ac0423bbbb13d7d71b680a7d0b59871a193df8a4b8fdca"}, + {file = "ctransformers-0.2.27-py3-none-any.whl", hash = "sha256:6a3ba47556471850d95fdbc59299a82ab91c9dc8b40201c5e7e82d71360772d9"}, + {file = "ctransformers-0.2.27.tar.gz", hash = "sha256:25653d4be8a5ed4e2d3756544c1e9881bf95404be5371c3ed506a256c28663d5"}, ] [package.dependencies] @@ -984,29 +1057,29 @@ typing-inspect = ">=0.4.0,<1" [[package]] name = "debugpy" -version = "1.6.7.post1" +version = "1.8.0" description = "An implementation of the Debug Adapter Protocol for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "debugpy-1.6.7.post1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:903bd61d5eb433b6c25b48eae5e23821d4c1a19e25c9610205f5aeaccae64e32"}, - {file = "debugpy-1.6.7.post1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d16882030860081e7dd5aa619f30dec3c2f9a421e69861125f83cc372c94e57d"}, - {file = "debugpy-1.6.7.post1-cp310-cp310-win32.whl", hash = "sha256:eea8d8cfb9965ac41b99a61f8e755a8f50e9a20330938ad8271530210f54e09c"}, - {file = "debugpy-1.6.7.post1-cp310-cp310-win_amd64.whl", hash = "sha256:85969d864c45f70c3996067cfa76a319bae749b04171f2cdeceebe4add316155"}, - {file = "debugpy-1.6.7.post1-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:890f7ab9a683886a0f185786ffbda3b46495c4b929dab083b8c79d6825832a52"}, - {file = "debugpy-1.6.7.post1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4ac7a4dba28801d184b7fc0e024da2635ca87d8b0a825c6087bb5168e3c0d28"}, - {file = "debugpy-1.6.7.post1-cp37-cp37m-win32.whl", hash = "sha256:3370ef1b9951d15799ef7af41f8174194f3482ee689988379763ef61a5456426"}, - {file = "debugpy-1.6.7.post1-cp37-cp37m-win_amd64.whl", hash = "sha256:65b28435a17cba4c09e739621173ff90c515f7b9e8ea469b92e3c28ef8e5cdfb"}, - {file = "debugpy-1.6.7.post1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:92b6dae8bfbd497c90596bbb69089acf7954164aea3228a99d7e43e5267f5b36"}, - {file = "debugpy-1.6.7.post1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72f5d2ecead8125cf669e62784ef1e6300f4067b0f14d9f95ee00ae06fc7c4f7"}, - {file = "debugpy-1.6.7.post1-cp38-cp38-win32.whl", hash = "sha256:f0851403030f3975d6e2eaa4abf73232ab90b98f041e3c09ba33be2beda43fcf"}, - {file = "debugpy-1.6.7.post1-cp38-cp38-win_amd64.whl", hash = "sha256:3de5d0f97c425dc49bce4293df6a04494309eedadd2b52c22e58d95107e178d9"}, - {file = "debugpy-1.6.7.post1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:38651c3639a4e8bbf0ca7e52d799f6abd07d622a193c406be375da4d510d968d"}, - {file = "debugpy-1.6.7.post1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:038c51268367c9c935905a90b1c2d2dbfe304037c27ba9d19fe7409f8cdc710c"}, - {file = "debugpy-1.6.7.post1-cp39-cp39-win32.whl", hash = "sha256:4b9eba71c290852f959d2cf8a03af28afd3ca639ad374d393d53d367f7f685b2"}, - {file = "debugpy-1.6.7.post1-cp39-cp39-win_amd64.whl", hash = "sha256:973a97ed3b434eab0f792719a484566c35328196540676685c975651266fccf9"}, - {file = "debugpy-1.6.7.post1-py2.py3-none-any.whl", hash = "sha256:1093a5c541af079c13ac8c70ab8b24d1d35c8cacb676306cf11e57f699c02926"}, - {file = "debugpy-1.6.7.post1.zip", hash = "sha256:fe87ec0182ef624855d05e6ed7e0b7cb1359d2ffa2a925f8ec2d22e98b75d0ca"}, + {file = "debugpy-1.8.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7fb95ca78f7ac43393cd0e0f2b6deda438ec7c5e47fa5d38553340897d2fbdfb"}, + {file = "debugpy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef9ab7df0b9a42ed9c878afd3eaaff471fce3fa73df96022e1f5c9f8f8c87ada"}, + {file = "debugpy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:a8b7a2fd27cd9f3553ac112f356ad4ca93338feadd8910277aff71ab24d8775f"}, + {file = "debugpy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5d9de202f5d42e62f932507ee8b21e30d49aae7e46d5b1dd5c908db1d7068637"}, + {file = "debugpy-1.8.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:ef54404365fae8d45cf450d0544ee40cefbcb9cb85ea7afe89a963c27028261e"}, + {file = "debugpy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60009b132c91951354f54363f8ebdf7457aeb150e84abba5ae251b8e9f29a8a6"}, + {file = "debugpy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:8cd0197141eb9e8a4566794550cfdcdb8b3db0818bdf8c49a8e8f8053e56e38b"}, + {file = "debugpy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:a64093656c4c64dc6a438e11d59369875d200bd5abb8f9b26c1f5f723622e153"}, + {file = "debugpy-1.8.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:b05a6b503ed520ad58c8dc682749113d2fd9f41ffd45daec16e558ca884008cd"}, + {file = "debugpy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c6fb41c98ec51dd010d7ed650accfd07a87fe5e93eca9d5f584d0578f28f35f"}, + {file = "debugpy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:46ab6780159eeabb43c1495d9c84cf85d62975e48b6ec21ee10c95767c0590aa"}, + {file = "debugpy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:bdc5ef99d14b9c0fcb35351b4fbfc06ac0ee576aeab6b2511702e5a648a2e595"}, + {file = "debugpy-1.8.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:61eab4a4c8b6125d41a34bad4e5fe3d2cc145caecd63c3fe953be4cc53e65bf8"}, + {file = "debugpy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:125b9a637e013f9faac0a3d6a82bd17c8b5d2c875fb6b7e2772c5aba6d082332"}, + {file = "debugpy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:57161629133113c97b387382045649a2b985a348f0c9366e22217c87b68b73c6"}, + {file = "debugpy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:e3412f9faa9ade82aa64a50b602544efcba848c91384e9f93497a458767e6926"}, + {file = "debugpy-1.8.0-py2.py3-none-any.whl", hash = "sha256:9c9b0ac1ce2a42888199df1a1906e45e6f3c9555497643a85e0bf2406e3ffbc4"}, + {file = "debugpy-1.8.0.zip", hash = "sha256:12af2c55b419521e33d5fb21bd022df0b5eb267c3e178f1d374a63a2a6bdccd0"}, ] [[package]] @@ -1067,13 +1140,13 @@ graph = ["objgraph (>=1.7.2)"] [[package]] name = "diskcache" -version = "5.6.1" +version = "5.6.3" description = "Disk Cache -- Disk and file backed persistent cache." optional = true python-versions = ">=3" files = [ - {file = "diskcache-5.6.1-py3-none-any.whl", hash = "sha256:558c6a2d5d7c721bb00e40711803d6804850c9f76c426ed81ecc627fe9d2ce2d"}, - {file = "diskcache-5.6.1.tar.gz", hash = "sha256:e4c978532feff5814c4cc00fe1e11e40501985946643d73220d41ee7737c72c3"}, + {file = "diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19"}, + {file = "diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc"}, ] [[package]] @@ -1168,63 +1241,47 @@ files = [ [[package]] name = "duckdb" -version = "0.8.1" +version = "0.9.0" description = "DuckDB embedded database" optional = false -python-versions = "*" +python-versions = ">=3.7.0" files = [ - {file = "duckdb-0.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:14781d21580ee72aba1f5dcae7734674c9b6c078dd60470a08b2b420d15b996d"}, - {file = "duckdb-0.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f13bf7ab0e56ddd2014ef762ae4ee5ea4df5a69545ce1191b8d7df8118ba3167"}, - {file = "duckdb-0.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4032042d8363e55365bbca3faafc6dc336ed2aad088f10ae1a534ebc5bcc181"}, - {file = "duckdb-0.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a71bd8f0b0ca77c27fa89b99349ef22599ffefe1e7684ae2e1aa2904a08684"}, - {file = "duckdb-0.8.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24568d6e48f3dbbf4a933109e323507a46b9399ed24c5d4388c4987ddc694fd0"}, - {file = "duckdb-0.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297226c0dadaa07f7c5ae7cbdb9adba9567db7b16693dbd1b406b739ce0d7924"}, - {file = "duckdb-0.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5792cf777ece2c0591194006b4d3e531f720186102492872cb32ddb9363919cf"}, - {file = "duckdb-0.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:12803f9f41582b68921d6b21f95ba7a51e1d8f36832b7d8006186f58c3d1b344"}, - {file = "duckdb-0.8.1-cp310-cp310-win32.whl", hash = "sha256:d0953d5a2355ddc49095e7aef1392b7f59c5be5cec8cdc98b9d9dc1f01e7ce2b"}, - {file = "duckdb-0.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:6e6583c98a7d6637e83bcadfbd86e1f183917ea539f23b6b41178f32f813a5eb"}, - {file = "duckdb-0.8.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fad7ed0d4415f633d955ac24717fa13a500012b600751d4edb050b75fb940c25"}, - {file = "duckdb-0.8.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81ae602f34d38d9c48dd60f94b89f28df3ef346830978441b83c5b4eae131d08"}, - {file = "duckdb-0.8.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7d75cfe563aaa058d3b4ccaaa371c6271e00e3070df5de72361fd161b2fe6780"}, - {file = "duckdb-0.8.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dbb55e7a3336f2462e5e916fc128c47fe1c03b6208d6bd413ac11ed95132aa0"}, - {file = "duckdb-0.8.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6df53efd63b6fdf04657385a791a4e3c4fb94bfd5db181c4843e2c46b04fef5"}, - {file = "duckdb-0.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b188b80b70d1159b17c9baaf541c1799c1ce8b2af4add179a9eed8e2616be96"}, - {file = "duckdb-0.8.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5ad481ee353f31250b45d64b4a104e53b21415577943aa8f84d0af266dc9af85"}, - {file = "duckdb-0.8.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1d1b1729993611b1892509d21c21628917625cdbe824a61ce891baadf684b32"}, - {file = "duckdb-0.8.1-cp311-cp311-win32.whl", hash = "sha256:2d8f9cc301e8455a4f89aa1088b8a2d628f0c1f158d4cf9bc78971ed88d82eea"}, - {file = "duckdb-0.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:07457a43605223f62d93d2a5a66b3f97731f79bbbe81fdd5b79954306122f612"}, - {file = "duckdb-0.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d2c8062c3e978dbcd80d712ca3e307de8a06bd4f343aa457d7dd7294692a3842"}, - {file = "duckdb-0.8.1-cp36-cp36m-win32.whl", hash = "sha256:fad486c65ae944eae2de0d590a0a4fb91a9893df98411d66cab03359f9cba39b"}, - {file = "duckdb-0.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:86fa4506622c52d2df93089c8e7075f1c4d0ba56f4bf27faebde8725355edf32"}, - {file = "duckdb-0.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:60e07a62782f88420046e30cc0e3de842d0901c4fd5b8e4d28b73826ec0c3f5e"}, - {file = "duckdb-0.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f18563675977f8cbf03748efee0165b4c8ef64e0cbe48366f78e2914d82138bb"}, - {file = "duckdb-0.8.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16e179443832bea8439ae4dff93cf1e42c545144ead7a4ef5f473e373eea925a"}, - {file = "duckdb-0.8.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a413d5267cb41a1afe69d30dd6d4842c588256a6fed7554c7e07dad251ede095"}, - {file = "duckdb-0.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3784680df59eadd683b0a4c2375d451a64470ca54bd171c01e36951962b1d332"}, - {file = "duckdb-0.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:67a1725c2b01f9b53571ecf3f92959b652f60156c1c48fb35798302e39b3c1a2"}, - {file = "duckdb-0.8.1-cp37-cp37m-win32.whl", hash = "sha256:197d37e2588c5ad063e79819054eedb7550d43bf1a557d03ba8f8f67f71acc42"}, - {file = "duckdb-0.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:3843feb79edf100800f5037c32d5d5a5474fb94b32ace66c707b96605e7c16b2"}, - {file = "duckdb-0.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:624c889b0f2d656794757b3cc4fc58030d5e285f5ad2ef9fba1ea34a01dab7fb"}, - {file = "duckdb-0.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fcbe3742d77eb5add2d617d487266d825e663270ef90253366137a47eaab9448"}, - {file = "duckdb-0.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47516c9299d09e9dbba097b9fb339b389313c4941da5c54109df01df0f05e78c"}, - {file = "duckdb-0.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf1ba718b7522d34399446ebd5d4b9fcac0b56b6ac07bfebf618fd190ec37c1d"}, - {file = "duckdb-0.8.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e36e35d38a9ae798fe8cf6a839e81494d5b634af89f4ec9483f4d0a313fc6bdb"}, - {file = "duckdb-0.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23493313f88ce6e708a512daacad13e83e6d1ea0be204b175df1348f7fc78671"}, - {file = "duckdb-0.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1fb9bf0b6f63616c8a4b9a6a32789045e98c108df100e6bac783dc1e36073737"}, - {file = "duckdb-0.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:12fc13ecd5eddd28b203b9e3999040d3a7374a8f4b833b04bd26b8c5685c2635"}, - {file = "duckdb-0.8.1-cp38-cp38-win32.whl", hash = "sha256:a12bf4b18306c9cb2c9ba50520317e6cf2de861f121d6f0678505fa83468c627"}, - {file = "duckdb-0.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:e4e809358b9559c00caac4233e0e2014f3f55cd753a31c4bcbbd1b55ad0d35e4"}, - {file = "duckdb-0.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7acedfc00d97fbdb8c3d120418c41ef3cb86ef59367f3a9a30dff24470d38680"}, - {file = "duckdb-0.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:99bfe264059cdc1e318769103f656f98e819cd4e231cd76c1d1a0327f3e5cef8"}, - {file = "duckdb-0.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:538b225f361066231bc6cd66c04a5561de3eea56115a5dd773e99e5d47eb1b89"}, - {file = "duckdb-0.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae0be3f71a18cd8492d05d0fc1bc67d01d5a9457b04822d025b0fc8ee6efe32e"}, - {file = "duckdb-0.8.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd82ba63b58672e46c8ec60bc9946aa4dd7b77f21c1ba09633d8847ad9eb0d7b"}, - {file = "duckdb-0.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:780a34559aaec8354e83aa4b7b31b3555f1b2cf75728bf5ce11b89a950f5cdd9"}, - {file = "duckdb-0.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:01f0d4e9f7103523672bda8d3f77f440b3e0155dd3b2f24997bc0c77f8deb460"}, - {file = "duckdb-0.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:31f692decb98c2d57891da27180201d9e93bb470a3051fcf413e8da65bca37a5"}, - {file = "duckdb-0.8.1-cp39-cp39-win32.whl", hash = "sha256:e7fe93449cd309bbc67d1bf6f6392a6118e94a9a4479ab8a80518742e855370a"}, - {file = "duckdb-0.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:81d670bc6807672f038332d9bf587037aabdd741b0810de191984325ed307abd"}, - {file = "duckdb-0.8.1.tar.gz", hash = "sha256:a54d37f4abc2afc4f92314aaa56ecf215a411f40af4bffe1e86bd25e62aceee9"}, + {file = "duckdb-0.9.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ffb28663810679c77c7a4d6e799a45991b13af413fc9e012e65221e48ef58a8d"}, + {file = "duckdb-0.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8770917c52007bd4873aa81a9fab13e015be45cabd2f32092e4cd04bbd043da7"}, + {file = "duckdb-0.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7d26ac0d59e1e1c9c0faaafa94487016abde4f431b45e3da7a4c37f2ffbcecae"}, + {file = "duckdb-0.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:516f6d982cf28d8257dedf371c85d00523114f600018f4e013b01d2072f8f7b9"}, + {file = "duckdb-0.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c144c18da348af5201d9526556d3f3bddbf4882a71106a4ced9acca0ea579938"}, + {file = "duckdb-0.9.0-cp310-cp310-win32.whl", hash = "sha256:ed4fff7a46a0fb3780bde1dc9ce07f2e2947f625e3f26d41771b5d54150f0a3f"}, + {file = "duckdb-0.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:577e4d5d356634ebd4a02a5e0a1c8e980d6e5dc032f958cc310f68686840745d"}, + {file = "duckdb-0.9.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b8900a9b2dc5eff4849025755801be156cfb16326ff8d0bc5f3787321f1553be"}, + {file = "duckdb-0.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2bad2145350a81f32852e09f00cea43533cce2dd41e974792f60ce24ecdae30"}, + {file = "duckdb-0.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:139c0af678dd7ca4a37c744db67a32dd65ee62424dcb09f4ac6c839115ec67a4"}, + {file = "duckdb-0.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15dc13cac315741c7fa97907ed8b999b31c10fb3347a142d367c8ce0ed8be63e"}, + {file = "duckdb-0.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2d5d8b3e6cad93ab9ac92ce1a2db7ea4836cc2a310efc8e4d8d79648050e2804"}, + {file = "duckdb-0.9.0-cp311-cp311-win32.whl", hash = "sha256:c3b677cade05bf99a225c164ca46384d0182e7d09b47998174df5b4711717a62"}, + {file = "duckdb-0.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:99ed27ef0f1bc8b4f64bed124331d474b0f3106de91fd086e00fa98c670df488"}, + {file = "duckdb-0.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1407e2e75c10cd9988bae7ad0450208223660bc821d56e06114d5cb8f5358580"}, + {file = "duckdb-0.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2388e9dcc38c08efef8d74e5f63db691329dc9ed5c68b7a6619aae9678f10ebc"}, + {file = "duckdb-0.9.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:347b7aa85aef8cfd0a53143aded0e121aed51cf74713b264ade22fedd3af2bd8"}, + {file = "duckdb-0.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e4636a4d14590719bf67c317b3df04ef3269f68829340457ce0c47e8eb759d39"}, + {file = "duckdb-0.9.0-cp37-cp37m-win32.whl", hash = "sha256:745138da6645286e098dff3be55fbd64eba8d13ba3cfb823fb0d5c0e4d5f646c"}, + {file = "duckdb-0.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ac10228540cacbbb9483842d429212c7e61fc02bd94d07ab6eee69beb4e1f5d2"}, + {file = "duckdb-0.9.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0595110431d7f53663ed2b9d25ca393feefa3a096fbaf33af8f500d7d7a40bd9"}, + {file = "duckdb-0.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ab4a6bc961944ad8b23a477c322b9be5f3846ef52a6ff096a0076989a4a841a1"}, + {file = "duckdb-0.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cf33ea1a3882d83d744282e2b0db79fe274d64ebc2beff31e42d13f5d2093f22"}, + {file = "duckdb-0.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0af5cf448bca9b5acb8a5d2a451739fc5545bca3b5222db73a8faade6ff499be"}, + {file = "duckdb-0.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:424fd1e3dde074660865557f2138b2f988b9e499bc1eaaf391a1912d19e909dd"}, + {file = "duckdb-0.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:72cfa8e1de0b29719fa81c7afe4bb0b3011cb4d9c5c2ecf18dce14912d6a4386"}, + {file = "duckdb-0.9.0-cp38-cp38-win32.whl", hash = "sha256:36cb92ccf75d7c18d533ab959d5e9febf1d0977647eba8b85f54d60edd4ceafd"}, + {file = "duckdb-0.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:4134aad5ed0399d621180a2a09d96b7dd94b18c01682da14e41930ef16e50c78"}, + {file = "duckdb-0.9.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a0082b96c4141f87dc6440e76ef13e39f191a131534ffafadd6cf279766f1ecd"}, + {file = "duckdb-0.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e4302fb0be33cdbff9404656be0b45bba6e549a67180f03c262bfcb1e41288db"}, + {file = "duckdb-0.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a115a430c607a3287f4af26d18820a9eab56925703142b727bc3f7204b50aa3f"}, + {file = "duckdb-0.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5615e7854f042f1c7a5c4c3959de64861677b63029b08ed72c94abffcdc70483"}, + {file = "duckdb-0.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8784986a04bac9873fa538775c1fb5b677baad90ddc9371f7b4a89468b1c1ab6"}, + {file = "duckdb-0.9.0-cp39-cp39-win32.whl", hash = "sha256:584b1b63d74076c3dd4188582c2f59ac98177fe751e1fa34d75345e166e70d73"}, + {file = "duckdb-0.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:40814f12c4c1ec222c8df05763528dbbffd175c8bcc5e3c45c3950c6a9b3f209"}, + {file = "duckdb-0.9.0.tar.gz", hash = "sha256:3a52c975cc13b965580cd00af1538b2d9f6b896431f97121dbee7ce715edcd2a"}, ] [[package]] @@ -1246,16 +1303,19 @@ gmpy = ["gmpy"] gmpy2 = ["gmpy2"] [[package]] -name = "et-xmlfile" -version = "1.1.0" -description = "An implementation of lxml.xmlfile for the standard library" +name = "emoji" +version = "2.8.0" +description = "Emoji for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, - {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, + {file = "emoji-2.8.0-py2.py3-none-any.whl", hash = "sha256:a8468fd836b7ecb6d1eac054c9a591701ce0ccd6c6f7779ad71b66f76664df90"}, + {file = "emoji-2.8.0.tar.gz", hash = "sha256:8d8b5dec3c507444b58890e598fc895fcec022b3f5acb49497c6ccc5208b8b00"}, ] +[package.extras] +dev = ["coverage", "coveralls", "pytest"] + [[package]] name = "exceptiongroup" version = "1.1.3" @@ -1270,6 +1330,20 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "execnet" +version = "2.0.2" +description = "execnet: rapid multi-Python deployment" +optional = false +python-versions = ">=3.7" +files = [ + {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"}, + {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"}, +] + +[package.extras] +testing = ["hatch", "pre-commit", "pytest", "tox"] + [[package]] name = "executing" version = "1.2.0" @@ -1393,18 +1467,19 @@ zstandard = ["zstandard"] [[package]] name = "filelock" -version = "3.12.2" +version = "3.12.4" description = "A platform independent file lock." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, - {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, + {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"}, + {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"}, ] [package.extras] -docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] +typing = ["typing-extensions (>=4.7.1)"] [[package]] name = "filetype" @@ -1417,6 +1492,56 @@ files = [ {file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"}, ] +[[package]] +name = "flask" +version = "2.3.3" +description = "A simple framework for building complex web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask-2.3.3-py3-none-any.whl", hash = "sha256:f69fcd559dc907ed196ab9df0e48471709175e696d6e698dd4dbe940f96ce66b"}, + {file = "flask-2.3.3.tar.gz", hash = "sha256:09c347a92aa7ff4a8e7f3206795f30d826654baf38b873d0744cd571ca609efc"}, +] + +[package.dependencies] +blinker = ">=1.6.2" +click = ">=8.1.3" +importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} +itsdangerous = ">=2.1.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=2.3.7" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + +[[package]] +name = "flask-basicauth" +version = "0.2.0" +description = "HTTP basic access authentication for Flask." +optional = false +python-versions = "*" +files = [ + {file = "Flask-BasicAuth-0.2.0.tar.gz", hash = "sha256:df5ebd489dc0914c224419da059d991eb72988a01cdd4b956d52932ce7d501ff"}, +] + +[package.dependencies] +Flask = "*" + +[[package]] +name = "flask-cors" +version = "4.0.0" +description = "A Flask extension adding a decorator for CORS support" +optional = false +python-versions = "*" +files = [ + {file = "Flask-Cors-4.0.0.tar.gz", hash = "sha256:f268522fcb2f73e2ecdde1ef45e2fd5c71cc48fe03cffb4b441c6d1b40684eb0"}, + {file = "Flask_Cors-4.0.0-py2.py3-none-any.whl", hash = "sha256:bc3492bfd6368d27cfe79c7821df5a8a319e1a6d5eab277a3794be19bdc51783"}, +] + +[package.dependencies] +Flask = ">=0.9" + [[package]] name = "flatbuffers" version = "23.5.26" @@ -1428,6 +1553,24 @@ files = [ {file = "flatbuffers-23.5.26.tar.gz", hash = "sha256:9ea1144cac05ce5d86e2859f431c6cd5e66cd9c78c558317c7955fb8d4c78d89"}, ] +[[package]] +name = "flower" +version = "2.0.1" +description = "Celery Flower" +optional = true +python-versions = ">=3.7" +files = [ + {file = "flower-2.0.1-py2.py3-none-any.whl", hash = "sha256:9db2c621eeefbc844c8dd88be64aef61e84e2deb29b271e02ab2b5b9f01068e2"}, + {file = "flower-2.0.1.tar.gz", hash = "sha256:5ab717b979530770c16afb48b50d2a98d23c3e9fe39851dcf6bc4d01845a02a0"}, +] + +[package.dependencies] +celery = ">=5.0.5" +humanize = "*" +prometheus-client = ">=0.8.0" +pytz = "*" +tornado = ">=5.0.0,<7.0.0" + [[package]] name = "frozenlist" version = "1.4.0" @@ -1500,13 +1643,13 @@ files = [ [[package]] name = "fsspec" -version = "2023.6.0" +version = "2023.9.2" description = "File-system specification" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2023.6.0-py3-none-any.whl", hash = "sha256:1cbad1faef3e391fba6dc005ae9b5bdcbf43005c9167ce78c915549c352c869a"}, - {file = "fsspec-2023.6.0.tar.gz", hash = "sha256:d0b2f935446169753e7a5c5c55681c54ea91996cc67be93c39a154fb3a2742af"}, + {file = "fsspec-2023.9.2-py3-none-any.whl", hash = "sha256:603dbc52c75b84da501b9b2ec8c11e1f61c25984c4a0dda1f129ef391fbfc9b4"}, + {file = "fsspec-2023.9.2.tar.gz", hash = "sha256:80bfb8c70cc27b2178cc62a935ecf242fc6e8c3fb801f9c571fc01b1e715ba7d"}, ] [package.extras] @@ -1535,13 +1678,13 @@ tqdm = ["tqdm"] [[package]] name = "google-api-core" -version = "2.11.1" +version = "2.12.0" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.11.1.tar.gz", hash = "sha256:25d29e05a0058ed5f19c61c0a78b1b53adea4d9364b464d014fbda941f6d1c9a"}, - {file = "google_api_core-2.11.1-py3-none-any.whl", hash = "sha256:d92a5a92dc36dd4f4b9ee4e55528a90e432b059f93aee6ad857f9de8cc7ae94a"}, + {file = "google-api-core-2.12.0.tar.gz", hash = "sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553"}, + {file = "google_api_core-2.12.0-py3-none-any.whl", hash = "sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160"}, ] [package.dependencies] @@ -1559,13 +1702,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.97.0" +version = "2.101.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-python-client-2.97.0.tar.gz", hash = "sha256:48277291894876a1ca7ed4127e055e81f81e6343ced1b544a7200ae2c119dcd7"}, - {file = "google_api_python_client-2.97.0-py2.py3-none-any.whl", hash = "sha256:5215f4cd577753fc4192ccfbe0bb8b55d4bb5fd68fa6268ac5cf271b6305de31"}, + {file = "google-api-python-client-2.101.0.tar.gz", hash = "sha256:e9620a809251174818e1fce16604006f10a9c2ac0d3d94a139cdddcd4dbea2d8"}, + {file = "google_api_python_client-2.101.0-py2.py3-none-any.whl", hash = "sha256:71760dcf11d191b65520d1c13757a776f4f43cf87f302097a0d8e491c2ef87b0"}, ] [package.dependencies] @@ -1577,20 +1720,19 @@ uritemplate = ">=3.0.1,<5" [[package]] name = "google-auth" -version = "2.22.0" +version = "2.23.0" description = "Google Authentication Library" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "google-auth-2.22.0.tar.gz", hash = "sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce"}, - {file = "google_auth-2.22.0-py2.py3-none-any.whl", hash = "sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873"}, + {file = "google-auth-2.23.0.tar.gz", hash = "sha256:753a26312e6f1eaeec20bc6f2644a10926697da93446e1f8e24d6d32d45a922a"}, + {file = "google_auth-2.23.0-py2.py3-none-any.whl", hash = "sha256:2cec41407bd1e207f5b802638e32bb837df968bb5c05f413d0fa526fac4cf7a7"}, ] [package.dependencies] cachetools = ">=2.0.0,<6.0" pyasn1-modules = ">=0.2.1" rsa = ">=3.1.4,<5" -six = ">=1.9.0" urllib3 = "<2.0" [package.extras] @@ -1602,29 +1744,28 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "google-auth-httplib2" -version = "0.1.0" +version = "0.1.1" description = "Google Authentication Library: httplib2 transport" optional = false python-versions = "*" files = [ - {file = "google-auth-httplib2-0.1.0.tar.gz", hash = "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac"}, - {file = "google_auth_httplib2-0.1.0-py2.py3-none-any.whl", hash = "sha256:31e49c36c6b5643b57e82617cb3e021e3e1d2df9da63af67252c02fa9c1f4a10"}, + {file = "google-auth-httplib2-0.1.1.tar.gz", hash = "sha256:c64bc555fdc6dd788ea62ecf7bccffcf497bf77244887a3f3d7a5a02f8e3fc29"}, + {file = "google_auth_httplib2-0.1.1-py2.py3-none-any.whl", hash = "sha256:42c50900b8e4dcdf8222364d1f0efe32b8421fb6ed72f2613f12f75cc933478c"}, ] [package.dependencies] google-auth = "*" -httplib2 = ">=0.15.0" -six = "*" +httplib2 = ">=0.19.0" [[package]] name = "google-cloud-aiplatform" -version = "1.30.1" +version = "1.33.1" description = "Vertex AI API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-aiplatform-1.30.1.tar.gz", hash = "sha256:7552a6b2e66d7a9ff3c4b2bb95b0e9c182e7475dfb35d6347e9299f78779135a"}, - {file = "google_cloud_aiplatform-1.30.1-py2.py3-none-any.whl", hash = "sha256:ab1bbd4cf83cf583b7dea7e53421ad076f18b63e93cb22fb53c03176d5aa9258"}, + {file = "google-cloud-aiplatform-1.33.1.tar.gz", hash = "sha256:f73b564232cc0c7c6e8c60352b55b7b6c47f6a8a2d97b7c821c45cfe77f24208"}, + {file = "google_cloud_aiplatform-1.33.1-py2.py3-none-any.whl", hash = "sha256:609224406596ef67cc376042aeca51b792c23c16f9997a42e9da5b9be04c3ada"}, ] [package.dependencies] @@ -1640,17 +1781,19 @@ shapely = "<2.0.0" [package.extras] autologging = ["mlflow (>=1.27.0,<=2.1.1)"] cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] -datasets = ["pyarrow (>=3.0.0,<8.0dev)"] +datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=3.0.0,<8.0dev)"] endpoint = ["requests (>=2.28.1)"] -full = ["docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<0.76.0)", "google-cloud-bigquery-storage", "google-vizier (==0.0.4)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pyyaml (>=5.3,<7)", "requests (>=2.28.1)", "starlette (>=0.17.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)"] +full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<0.103.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (==0.0.4)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "importlib-metadata (<7.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (==5.3.1)", "ray[default] (>=2.4,<2.5)", "ray[default] (>=2.5,<2.5.1)", "requests (>=2.28.1)", "starlette (>=0.17.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)"] lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"] metadata = ["numpy (>=1.15.0)", "pandas (>=1.0.0)"] -pipelines = ["pyyaml (>=5.3,<7)"] -prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<0.76.0)", "starlette (>=0.17.1)", "uvicorn[standard] (>=0.16.0)"] +pipelines = ["pyyaml (==5.3.1)"] +prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<0.103.1)", "httpx (>=0.23.0,<0.25.0)", "starlette (>=0.17.1)", "uvicorn[standard] (>=0.16.0)"] +preview = ["cloudpickle (<3.0)", "google-cloud-logging (<4.0)", "importlib-metadata (<7.0)"] private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"] +ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "pandas (>=1.0.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "ray[default] (>=2.4,<2.5)", "ray[default] (>=2.5,<2.5.1)"] tensorboard = ["tensorflow (>=2.3.0,<3.0.0dev)"] -testing = ["docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<0.76.0)", "google-cloud-bigquery-storage", "google-vizier (==0.0.4)", "grpcio-testing", "ipython", "kfp", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3,<7)", "requests (>=2.28.1)", "scikit-learn", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"] -vizier = ["google-vizier (==0.0.4)"] +testing = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<0.103.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (==0.0.4)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "importlib-metadata (<7.0)", "ipython", "kfp", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (==5.3.1)", "ray[default] (>=2.4,<2.5)", "ray[default] (>=2.5,<2.5.1)", "requests (>=2.28.1)", "scikit-learn", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<2.13.0)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch", "torch (>=2.0.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost", "xgboost-ray"] +vizier = ["google-vizier (==0.0.4)", "google-vizier (>=0.1.6)"] xai = ["tensorflow (>=2.3.0,<3.0.0dev)"] [[package]] @@ -1705,13 +1848,13 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)"] [[package]] name = "google-cloud-resource-manager" -version = "1.10.3" +version = "1.10.4" description = "Google Cloud Resource Manager API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-resource-manager-1.10.3.tar.gz", hash = "sha256:f80efcea36f10c5a81889afe93910926e3978b4b1ceeb82f563a2fc863072d14"}, - {file = "google_cloud_resource_manager-1.10.3-py2.py3-none-any.whl", hash = "sha256:1381a4b0f522248ebe0ebd1289d8822b99c54f4e1fe03924a6e723b2ed93dd7f"}, + {file = "google-cloud-resource-manager-1.10.4.tar.gz", hash = "sha256:456b25ddda3d4cd27488a72736bbc3af04d713ae2fe3655c01b66a339d28d679"}, + {file = "google_cloud_resource_manager-1.10.4-py2.py3-none-any.whl", hash = "sha256:2ba56ba8e5280cd425bd63620da48b78da2cd299ece58a71f0f2ce3a32d56f99"}, ] [package.dependencies] @@ -1722,20 +1865,20 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4 [[package]] name = "google-cloud-storage" -version = "2.10.0" +version = "2.11.0" description = "Google Cloud Storage API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-storage-2.10.0.tar.gz", hash = "sha256:934b31ead5f3994e5360f9ff5750982c5b6b11604dc072bc452c25965e076dc7"}, - {file = "google_cloud_storage-2.10.0-py2.py3-none-any.whl", hash = "sha256:9433cf28801671de1c80434238fb1e7e4a1ba3087470e90f70c928ea77c2b9d7"}, + {file = "google-cloud-storage-2.11.0.tar.gz", hash = "sha256:6fbf62659b83c8f3a0a743af0d661d2046c97c3a5bfb587c4662c4bc68de3e31"}, + {file = "google_cloud_storage-2.11.0-py2.py3-none-any.whl", hash = "sha256:88cbd7fb3d701c780c4272bc26952db99f25eb283fb4c2208423249f00b5fe53"}, ] [package.dependencies] google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" google-auth = ">=1.25.0,<3.0dev" google-cloud-core = ">=2.3.0,<3.0dev" -google-resumable-media = ">=2.3.2" +google-resumable-media = ">=2.6.0" requests = ">=2.18.0,<3.0.0dev" [package.extras] @@ -1823,20 +1966,20 @@ testing = ["pytest"] [[package]] name = "google-resumable-media" -version = "2.5.0" +version = "2.6.0" description = "Utilities for Google Media Downloads and Resumable Uploads" optional = false python-versions = ">= 3.7" files = [ - {file = "google-resumable-media-2.5.0.tar.gz", hash = "sha256:218931e8e2b2a73a58eb354a288e03a0fd5fb1c4583261ac6e4c078666468c93"}, - {file = "google_resumable_media-2.5.0-py2.py3-none-any.whl", hash = "sha256:da1bd943e2e114a56d85d6848497ebf9be6a14d3db23e9fc57581e7c3e8170ec"}, + {file = "google-resumable-media-2.6.0.tar.gz", hash = "sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7"}, + {file = "google_resumable_media-2.6.0-py2.py3-none-any.whl", hash = "sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b"}, ] [package.dependencies] google-crc32c = ">=1.0,<2.0dev" [package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)"] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "google-auth (>=1.22.0,<2.0dev)"] requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] @@ -2365,6 +2508,20 @@ files = [ [package.dependencies] pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_version >= \"3.8\""} +[[package]] +name = "humanize" +version = "4.8.0" +description = "Python humanize utilities" +optional = true +python-versions = ">=3.8" +files = [ + {file = "humanize-4.8.0-py3-none-any.whl", hash = "sha256:8bc9e2bb9315e61ec06bf690151ae35aeb65651ab091266941edf97c90836404"}, + {file = "humanize-4.8.0.tar.gz", hash = "sha256:9783373bf1eec713a770ecaa7c2d7a7902c98398009dfa3d8a2df91eec9311e8"}, +] + +[package.extras] +tests = ["freezegun", "pytest", "pytest-cov"] + [[package]] name = "hyperframe" version = "6.0.1" @@ -2408,21 +2565,21 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs [[package]] name = "importlib-resources" -version = "6.0.1" +version = "6.1.0" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.0.1-py3-none-any.whl", hash = "sha256:134832a506243891221b88b4ae1213327eea96ceb4e407a00d790bb0626f45cf"}, - {file = "importlib_resources-6.0.1.tar.gz", hash = "sha256:4359457e42708462b9626a04657c6208ad799ceb41e5c58c57ffa0e6a098a5d4"}, + {file = "importlib_resources-6.1.0-py3-none-any.whl", hash = "sha256:aa50258bbfa56d4e33fbd8aa3ef48ded10d1735f11532b8df95388cc6bdb7e83"}, + {file = "importlib_resources-6.1.0.tar.gz", hash = "sha256:9d48dcccc213325e810fd723e7fbb45ccb39f6cf5c31f00cf2b965f5f10f3cb9"}, ] [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] [[package]] name = "iniconfig" @@ -2437,13 +2594,13 @@ files = [ [[package]] name = "ipykernel" -version = "6.25.1" +version = "6.25.2" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.25.1-py3-none-any.whl", hash = "sha256:c8a2430b357073b37c76c21c52184db42f6b4b0e438e1eb7df3c4440d120497c"}, - {file = "ipykernel-6.25.1.tar.gz", hash = "sha256:050391364c0977e768e354bdb60cbbfbee7cbb943b1af1618382021136ffd42f"}, + {file = "ipykernel-6.25.2-py3-none-any.whl", hash = "sha256:2e2ee359baba19f10251b99415bb39de1e97d04e1fab385646f24f0596510b77"}, + {file = "ipykernel-6.25.2.tar.gz", hash = "sha256:f468ddd1f17acb48c8ce67fcfa49ba6d46d4f9ac0438c1f441be7c3d1372230b"}, ] [package.dependencies] @@ -2470,13 +2627,13 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio" [[package]] name = "ipython" -version = "8.14.0" +version = "8.15.0" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.9" files = [ - {file = "ipython-8.14.0-py3-none-any.whl", hash = "sha256:248aca623f5c99a6635bc3857677b7320b9b8039f99f070ee0d20a5ca5a8e6bf"}, - {file = "ipython-8.14.0.tar.gz", hash = "sha256:1d197b907b6ba441b692c48cf2a3a2de280dc0ac91a3405b39349a50272ca0a1"}, + {file = "ipython-8.15.0-py3-none-any.whl", hash = "sha256:45a2c3a529296870a97b7de34eda4a31bee16bc7bf954e07d39abe49caf8f887"}, + {file = "ipython-8.15.0.tar.gz", hash = "sha256:2baeb5be6949eeebf532150f81746f8333e2ccce02de1c7eedde3f23ed5e9f1e"}, ] [package.dependencies] @@ -2484,6 +2641,7 @@ appnope = {version = "*", markers = "sys_platform == \"darwin\""} backcall = "*" colorama = {version = "*", markers = "sys_platform == \"win32\""} decorator = "*" +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} jedi = ">=0.16" matplotlib-inline = "*" pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} @@ -2495,9 +2653,9 @@ traitlets = ">=5" typing-extensions = {version = "*", markers = "python_version < \"3.10\""} [package.extras] -all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] black = ["black"] -doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] kernel = ["ipykernel"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] @@ -2694,7 +2852,7 @@ full = ["aiohttp", "black (==22.3.0)", "docker", "filelock", "flake8 (==4.0.1)", name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, @@ -2720,13 +2878,13 @@ files = [ [[package]] name = "jupyter-client" -version = "8.3.0" +version = "8.3.1" description = "Jupyter protocol implementation and client libraries" optional = false python-versions = ">=3.8" files = [ - {file = "jupyter_client-8.3.0-py3-none-any.whl", hash = "sha256:7441af0c0672edc5d28035e92ba5e32fadcfa8a4e608a434c228836a89df6158"}, - {file = "jupyter_client-8.3.0.tar.gz", hash = "sha256:3af69921fe99617be1670399a0b857ad67275eefcfa291e2c81a160b7b650f5f"}, + {file = "jupyter_client-8.3.1-py3-none-any.whl", hash = "sha256:5eb9f55eb0650e81de6b7e34308d8b92d04fe4ec41cd8193a913979e33d8e1a5"}, + {file = "jupyter_client-8.3.1.tar.gz", hash = "sha256:60294b2d5b869356c893f57b1a877ea6510d60d45cf4b38057f1672d85699ac9"}, ] [package.dependencies] @@ -2814,29 +2972,24 @@ files = [ langchain = ">=0.0.239" [[package]] -name = "langchain-serve" -version = "0.0.60" -description = "Langchain Serve - serve your langchain apps on Jina AI Cloud." -optional = true -python-versions = "*" +name = "langfuse" +version = "1.0.25" +description = "A client library for accessing langfuse" +optional = false +python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain-serve-0.0.60.tar.gz", hash = "sha256:bfcc2e7c2a3cd3b4cde5ff45043cc9c8d437704941b02d166185d8334a120561"}, + {file = "langfuse-1.0.25-py3-none-any.whl", hash = "sha256:7b724eba534bacfb5e7225ba9ed2be3701f893a26a8be171cb9ad8b041139df5"}, + {file = "langfuse-1.0.25.tar.gz", hash = "sha256:4a6ee9958e51f86ffa056f475e03413b2befdde366c4523007ec85863bb91c70"}, ] [package.dependencies] -click = "*" -jcloud = ">=0.2.16" -jina = "3.15.2" -jina-hubble-sdk = "*" -langchain = "*" -nest-asyncio = "*" -requests = "*" -slack_bolt = "*" -textual = "*" -toml = "*" - -[package.extras] -test = ["psutil", "pytest", "pytest-asyncio"] +attrs = ">=21.3.0" +backoff = ">=2.2.1,<3.0.0" +httpx = ">=0.15.4,<0.25.0" +langchain = ">=0.0.237,<1.0" +pydantic = ">=1.10.7,<2.0" +python-dateutil = ">=2.8.0,<3.0" +pytz = ">=2023.3,<2024.0" [[package]] name = "langsmith" @@ -2853,26 +3006,6 @@ files = [ pydantic = ">=1,<3" requests = ">=2,<3" -[[package]] -name = "linkify-it-py" -version = "2.0.2" -description = "Links recognition library with FULL unicode support." -optional = true -python-versions = ">=3.7" -files = [ - {file = "linkify-it-py-2.0.2.tar.gz", hash = "sha256:19f3060727842c254c808e99d465c80c49d2c7306788140987a1a7a29b0d6ad2"}, - {file = "linkify_it_py-2.0.2-py3-none-any.whl", hash = "sha256:a3a24428f6c96f27370d7fe61d2ac0be09017be5190d68d8658233171f1b6541"}, -] - -[package.dependencies] -uc-micro-py = "*" - -[package.extras] -benchmark = ["pytest", "pytest-benchmark"] -dev = ["black", "flake8", "isort", "pre-commit", "pyproject-flake8"] -doc = ["myst-parser", "sphinx", "sphinx-book-theme"] -test = ["coverage", "pytest", "pytest-cov"] - [[package]] name = "llama-cpp-python" version = "0.1.78" @@ -2891,15 +3024,42 @@ typing-extensions = ">=4.7.1,<5.0.0" [package.extras] server = ["fastapi (>=0.100.0)", "pydantic-settings (>=2.0.1)", "sse-starlette (>=1.6.1)", "uvicorn (>=0.23.2,<0.24.0)"] +[[package]] +name = "locust" +version = "2.16.1" +description = "Developer friendly load testing framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "locust-2.16.1-py3-none-any.whl", hash = "sha256:d0f01f9fca6a7d9be987b32185799d9e219fce3b9a3b8250ea03e88003335804"}, + {file = "locust-2.16.1.tar.gz", hash = "sha256:cd54f179b679ae927e9b3ffd2b6a7c89c1078103cfbe96b4dd53c7872774b619"}, +] + +[package.dependencies] +ConfigArgParse = ">=1.0" +flask = ">=2.0.0" +Flask-BasicAuth = ">=0.2.0" +Flask-Cors = ">=3.0.10" +gevent = ">=20.12.1" +geventhttpclient = ">=2.0.2" +msgpack = ">=0.6.2" +psutil = ">=5.6.7" +pywin32 = {version = "*", markers = "platform_system == \"Windows\""} +pyzmq = ">=22.2.1,<23.0.0 || >23.0.0" +requests = ">=2.23.0" +roundrobin = ">=0.0.2" +typing-extensions = ">=3.7.4.3" +Werkzeug = ">=2.0.0" + [[package]] name = "loguru" -version = "0.7.0" +version = "0.7.2" description = "Python logging made (stupidly) simple" optional = false python-versions = ">=3.5" files = [ - {file = "loguru-0.7.0-py3-none-any.whl", hash = "sha256:b93aa30099fa6860d4727f1b81f8718e965bb96253fa190fab2077aaad6d15d3"}, - {file = "loguru-0.7.0.tar.gz", hash = "sha256:1612053ced6ae84d7959dd7d5e431a0532642237ec21f7fd83ac73fe539e03e1"}, + {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, + {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, ] [package.dependencies] @@ -2907,7 +3067,7 @@ colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] -dev = ["Sphinx (==5.3.0)", "colorama (==0.4.5)", "colorama (==0.4.6)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v0.990)", "pre-commit (==3.2.1)", "pytest (==6.1.2)", "pytest (==7.2.1)", "pytest-cov (==2.12.1)", "pytest-cov (==4.0.0)", "pytest-mypy-plugins (==1.10.1)", "pytest-mypy-plugins (==1.9.3)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.2.0)", "tox (==3.27.1)", "tox (==4.4.6)"] +dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"] [[package]] name = "lxml" @@ -3073,24 +3233,6 @@ babel = ["Babel"] lingua = ["lingua"] testing = ["pytest"] -[[package]] -name = "markdown" -version = "3.4.4" -description = "Python implementation of John Gruber's Markdown." -optional = false -python-versions = ">=3.7" -files = [ - {file = "Markdown-3.4.4-py3-none-any.whl", hash = "sha256:a4c1b65c0957b4bd9e7d86ddc7b3c9868fb9670660f6f99f6d1bca8954d5a941"}, - {file = "Markdown-3.4.4.tar.gz", hash = "sha256:225c6123522495d4119a90b3a3ba31a1e87a70369e03f14799ea9c0d7183a3d6"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.0)", "mkdocs-nature (>=0.4)"] -testing = ["coverage", "pyyaml"] - [[package]] name = "markdown-it-py" version = "3.0.0" @@ -3103,8 +3245,6 @@ files = [ ] [package.dependencies] -linkify-it-py = {version = ">=1,<3", optional = true, markers = "extra == \"linkify\""} -mdit-py-plugins = {version = "*", optional = true, markers = "extra == \"plugins\""} mdurl = ">=0.1,<1.0" [package.extras] @@ -3210,25 +3350,6 @@ files = [ [package.dependencies] traitlets = "*" -[[package]] -name = "mdit-py-plugins" -version = "0.4.0" -description = "Collection of plugins for markdown-it-py" -optional = true -python-versions = ">=3.8" -files = [ - {file = "mdit_py_plugins-0.4.0-py3-none-any.whl", hash = "sha256:b51b3bb70691f57f974e257e367107857a93b36f322a9e6d44ca5bf28ec2def9"}, - {file = "mdit_py_plugins-0.4.0.tar.gz", hash = "sha256:d8ab27e9aed6c38aa716819fedfde15ca275715955f8a185a8e1cf90fb1d2c1b"}, -] - -[package.dependencies] -markdown-it-py = ">=1.0.0,<4.0.0" - -[package.extras] -code-style = ["pre-commit"] -rtd = ["myst-parser", "sphinx-book-theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - [[package]] name = "mdurl" version = "0.1.2" @@ -3240,15 +3361,30 @@ files = [ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] +[[package]] +name = "metal-sdk" +version = "2.1.4" +description = "SDK for getmetal.io" +optional = false +python-versions = ">=3.7" +files = [ + {file = "metal_sdk-2.1.4-py3-none-any.whl", hash = "sha256:b6f23b5aa5382d8a49eb20bc95992c2c796f4d3056dbcda5259afd86f301d853"}, + {file = "metal_sdk-2.1.4.tar.gz", hash = "sha256:9d6060ab1e9e8605547281396b3254567ffe6ae56a6855ede1dd6fe1de1326a2"}, +] + +[package.dependencies] +httpx = "*" +typing-extensions = "*" + [[package]] name = "metaphor-python" -version = "0.1.14" +version = "0.1.16" description = "A Python package for the Metaphor API." optional = false python-versions = "*" files = [ - {file = "metaphor-python-0.1.14.tar.gz", hash = "sha256:9c9be35d1270e1e2984637711c15b614ad852fbd2b51d428b4cc478e95018fab"}, - {file = "metaphor_python-0.1.14-py3-none-any.whl", hash = "sha256:9435fb702d62032e4affbf7825fe4b1f0c6099d33b4867f4238c07f4515eb9a2"}, + {file = "metaphor-python-0.1.16.tar.gz", hash = "sha256:c26c3e8a37ef1b195073d556c929180c2b1acf7590a801da8d1b9afbdd82dd8f"}, + {file = "metaphor_python-0.1.16-py3-none-any.whl", hash = "sha256:f3989a679f888cc0374593ab5e5a82b0f121c90b14d6eb5b813e861990dbb13b"}, ] [package.dependencies] @@ -3283,22 +3419,70 @@ gmpy = ["gmpy2 (>=2.1.0a4)"] tests = ["pytest (>=4.6)"] [[package]] -name = "msg-parser" -version = "1.2.0" -description = "This module enables reading, parsing and converting Microsoft Outlook MSG E-Mail files." +name = "msgpack" +version = "1.0.6" +description = "MessagePack serializer" optional = false -python-versions = ">=3.4" +python-versions = ">=3.8" files = [ - {file = "msg_parser-1.2.0-py2.py3-none-any.whl", hash = "sha256:d47a2f0b2a359cb189fad83cc991b63ea781ecc70d91410324273fbf93e95375"}, - {file = "msg_parser-1.2.0.tar.gz", hash = "sha256:0de858d4fcebb6c8f6f028da83a17a20fe01cdce67c490779cf43b3b0162aa66"}, + {file = "msgpack-1.0.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f4321692e7f299277e55f322329b2c972d93bb612d85f3fda8741bec5c6285ce"}, + {file = "msgpack-1.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f0e36a5fa7a182cde391a128a64f437657d2b9371dfa42eda3436245adccbf5"}, + {file = "msgpack-1.0.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b5c8dd9a386a66e50bd7fa22b7a49fb8ead2b3574d6bd69eb1caced6caea0803"}, + {file = "msgpack-1.0.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f85200ea102276afdd3749ca94747f057bbb868d1c52921ee2446730b508d0f"}, + {file = "msgpack-1.0.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a006c300e82402c0c8f1ded11352a3ba2a61b87e7abb3054c845af2ca8d553c"}, + {file = "msgpack-1.0.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33bbf47ea5a6ff20c23426106e81863cdbb5402de1825493026ce615039cc99d"}, + {file = "msgpack-1.0.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:04450e4b5e1e662e7c86b6aafb7c230af9334fd0becf5e6b80459a507884241c"}, + {file = "msgpack-1.0.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b06a5095a79384760625b5de3f83f40b3053a385fb893be8a106fbbd84c14980"}, + {file = "msgpack-1.0.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3910211b0ab20be3a38e0bb944ed45bd4265d8d9f11a3d1674b95b298e08dd5c"}, + {file = "msgpack-1.0.6-cp310-cp310-win32.whl", hash = "sha256:1dc67b40fe81217b308ab12651adba05e7300b3a2ccf84d6b35a878e308dd8d4"}, + {file = "msgpack-1.0.6-cp310-cp310-win_amd64.whl", hash = "sha256:885de1ed5ea01c1bfe0a34c901152a264c3c1f8f1d382042b92ea354bd14bb0e"}, + {file = "msgpack-1.0.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:099c3d8a027367e1a6fc55d15336f04ff65c60c4f737b5739f7db4525c65fe9e"}, + {file = "msgpack-1.0.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b88dc97ba86c96b964c3745a445d9a65f76fe21955a953064fe04adb63e9367"}, + {file = "msgpack-1.0.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:00ce5f827d4f26fc094043e6f08b6069c1b148efa2631c47615ae14fb6cafc89"}, + {file = "msgpack-1.0.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd6af61388be65a8701f5787362cb54adae20007e0cc67ca9221a4b95115583b"}, + {file = "msgpack-1.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:652e4b7497825b0af6259e2c54700e6dc33d2fc4ed92b8839435090d4c9cc911"}, + {file = "msgpack-1.0.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b08676a17e3f791daad34d5fcb18479e9c85e7200d5a17cbe8de798643a7e37"}, + {file = "msgpack-1.0.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:229ccb6713c8b941eaa5cf13dc7478eba117f21513b5893c35e44483e2f0c9c8"}, + {file = "msgpack-1.0.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:95ade0bd4cf69e04e8b8f8ec2d197d9c9c4a9b6902e048dc7456bf6d82e12a80"}, + {file = "msgpack-1.0.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b16344032a27b2ccfd341f89dadf3e4ef6407d91e4b93563c14644a8abb3ad7"}, + {file = "msgpack-1.0.6-cp311-cp311-win32.whl", hash = "sha256:55bb4a1bf94e39447bc08238a2fb8a767460388a8192f67c103442eb36920887"}, + {file = "msgpack-1.0.6-cp311-cp311-win_amd64.whl", hash = "sha256:ae97504958d0bc58c1152045c170815d5c4f8af906561ce044b6358b43d0c97e"}, + {file = "msgpack-1.0.6-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ecf431786019a7bfedc28281531d706627f603e3691d64eccdbce3ecd353823"}, + {file = "msgpack-1.0.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a635aecf1047255576dbb0927cbf9a7aa4a68e9d54110cc3c926652d18f144e0"}, + {file = "msgpack-1.0.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:102cfb54eaefa73e8ca1e784b9352c623524185c98e057e519545131a56fb0af"}, + {file = "msgpack-1.0.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c5e05e4f5756758c58a8088aa10dc70d851c89f842b611fdccfc0581c1846bc"}, + {file = "msgpack-1.0.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68569509dd015fcdd1e6b2b3ccc8c51fd27d9a97f461ccc909270e220ee09685"}, + {file = "msgpack-1.0.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf652839d16de91fe1cfb253e0a88db9a548796939533894e07f45d4bdf90a5f"}, + {file = "msgpack-1.0.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14db7e1b7a7ed362b2f94897bf2486c899c8bb50f6e34b2db92fe534cdab306f"}, + {file = "msgpack-1.0.6-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:159cfec18a6e125dd4723e2b1de6f202b34b87c850fb9d509acfd054c01135e9"}, + {file = "msgpack-1.0.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6a01a072b2219b65a6ff74df208f20b2cac9401c60adb676ee34e53b4c651077"}, + {file = "msgpack-1.0.6-cp312-cp312-win32.whl", hash = "sha256:e36560d001d4ba469d469b02037f2dd404421fd72277d9474efe9f03f83fced5"}, + {file = "msgpack-1.0.6-cp312-cp312-win_amd64.whl", hash = "sha256:5e7fae9ca93258a956551708cf60dc6c8145574e32ce8c8c4d894e63bcb04341"}, + {file = "msgpack-1.0.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:40b801b768f5a765e33c68f30665d3c6ee1c8623a2d2bb78e6e59f2db4e4ceb7"}, + {file = "msgpack-1.0.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:da057d3652e698b00746e47f06dbb513314f847421e857e32e1dc61c46f6c052"}, + {file = "msgpack-1.0.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f75114c05ec56566da6b55122791cf5bb53d5aada96a98c016d6231e03132f76"}, + {file = "msgpack-1.0.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61213482b5a387ead9e250e9e3cb290292feca39dc83b41c3b1b7b8ffc8d8ecb"}, + {file = "msgpack-1.0.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae6c561f11b444b258b1b4be2bdd1e1cf93cd1d80766b7e869a79db4543a8a8"}, + {file = "msgpack-1.0.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:619a63753ba9e792fe3c6c0fc2b9ee2cfbd92153dd91bee029a89a71eb2942cd"}, + {file = "msgpack-1.0.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:70843788c85ca385846a2d2f836efebe7bb2687ca0734648bf5c9dc6c55602d2"}, + {file = "msgpack-1.0.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fb4571efe86545b772a4630fee578c213c91cbcfd20347806e47fd4e782a18fe"}, + {file = "msgpack-1.0.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bbb4448a05d261fae423d5c0b0974ad899f60825bc77eabad5a0c518e78448c2"}, + {file = "msgpack-1.0.6-cp38-cp38-win32.whl", hash = "sha256:5cd67674db3c73026e0a2c729b909780e88bd9cbc8184256f9567640a5d299a8"}, + {file = "msgpack-1.0.6-cp38-cp38-win_amd64.whl", hash = "sha256:a1cf98afa7ad5e7012454ca3fde254499a13f9d92fd50cb46118118a249a1355"}, + {file = "msgpack-1.0.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d6d25b8a5c70e2334ed61a8da4c11cd9b97c6fbd980c406033f06e4463fda006"}, + {file = "msgpack-1.0.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:88cdb1da7fdb121dbb3116910722f5acab4d6e8bfcacab8fafe27e2e7744dc6a"}, + {file = "msgpack-1.0.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3b5658b1f9e486a2eec4c0c688f213a90085b9cf2fec76ef08f98fdf6c62f4b9"}, + {file = "msgpack-1.0.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76820f2ece3b0a7c948bbb6a599020e29574626d23a649476def023cbb026787"}, + {file = "msgpack-1.0.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c780d992f5d734432726b92a0c87bf1857c3d85082a8dea29cbf56e44a132b3"}, + {file = "msgpack-1.0.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0ed35d6d6122d0baa9a1b59ebca4ee302139f4cfb57dab85e4c73ab793ae7ed"}, + {file = "msgpack-1.0.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:32c0aff31f33033f4961abc01f78497e5e07bac02a508632aef394b384d27428"}, + {file = "msgpack-1.0.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:35ad5aed9b52217d4cea739d0ea3a492a18dd86fecb4b132668a69f27fb0363b"}, + {file = "msgpack-1.0.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47275ff73005a3e5e146e50baa2378e1730cba6e292f0222bc496a8e4c4adfc8"}, + {file = "msgpack-1.0.6-cp39-cp39-win32.whl", hash = "sha256:7baf16fd8908a025c4a8d7b699103e72d41f967e2aee5a2065432bcdbd9fd06e"}, + {file = "msgpack-1.0.6-cp39-cp39-win_amd64.whl", hash = "sha256:fc97aa4b4fb928ff4d3b74da7c30b360d0cb3ede49a5a6e1fd9705f49aea1deb"}, + {file = "msgpack-1.0.6.tar.gz", hash = "sha256:25d3746da40f3c8c59c3b1d001e49fd2aa17904438f980d9a391370366df001e"}, ] -[package.dependencies] -olefile = ">=0.46" - -[package.extras] -rtf = ["compressed-rtf (>=1.0.5)"] - [[package]] name = "multidict" version = "6.0.4" @@ -3469,13 +3653,13 @@ files = [ [[package]] name = "nest-asyncio" -version = "1.5.7" +version = "1.5.8" description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" files = [ - {file = "nest_asyncio-1.5.7-py3-none-any.whl", hash = "sha256:5301c82941b550b3123a1ea772ba9a1c80bad3a182be8c1a5ae6ad3be57a9657"}, - {file = "nest_asyncio-1.5.7.tar.gz", hash = "sha256:6a80f7b98f24d9083ed24608977c09dd608d83f91cccc24c9d2cba6d10e01c10"}, + {file = "nest_asyncio-1.5.8-py3-none-any.whl", hash = "sha256:accda7a339a70599cb08f9dd09a67e0c2ef8d8d6f4c07f96ab203f2ae254e48d"}, + {file = "nest_asyncio-1.5.8.tar.gz", hash = "sha256:25aa2ca0d2a5b5531956b9e273b45cf664cae2b145101d73b86b199978d48fdb"}, ] [[package]] @@ -3496,6 +3680,31 @@ doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.4)", "pydata-sphinx- extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.10)", "sympy (>=1.10)"] test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] +[[package]] +name = "nh3" +version = "0.2.14" +description = "Ammonia HTML sanitizer Python binding" +optional = false +python-versions = "*" +files = [ + {file = "nh3-0.2.14-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a"}, + {file = "nh3-0.2.14-cp37-abi3-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75"}, + {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450"}, + {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e"}, + {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e"}, + {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad"}, + {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2"}, + {file = "nh3-0.2.14-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525"}, + {file = "nh3-0.2.14-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6"}, + {file = "nh3-0.2.14-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4"}, + {file = "nh3-0.2.14-cp37-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5"}, + {file = "nh3-0.2.14-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d"}, + {file = "nh3-0.2.14-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6"}, + {file = "nh3-0.2.14-cp37-abi3-win32.whl", hash = "sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873"}, + {file = "nh3-0.2.14-cp37-abi3-win_amd64.whl", hash = "sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e"}, + {file = "nh3-0.2.14.tar.gz", hash = "sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4"}, +] + [[package]] name = "nltk" version = "3.8.1" @@ -3523,41 +3732,40 @@ twitter = ["twython"] [[package]] name = "numexpr" -version = "2.8.5" +version = "2.8.7" description = "Fast numerical expression evaluator for NumPy" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "numexpr-2.8.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51f3ab160c3847ebcca93cd88f935a7802b54a01ab63fe93152994a64d7a6cf2"}, - {file = "numexpr-2.8.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:de29c77f674e4eb8f0846525a475cab64008c227c8bc4ba5153ab3f72441cc63"}, - {file = "numexpr-2.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf85ba1327eb87ec82ae7936f13c8850fb969a0ca34f3ba9fa3897c09d5c80d7"}, - {file = "numexpr-2.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c00be69f747f44a631830215cab482f0f77f75af2925695adff57c1cc0f9a68"}, - {file = "numexpr-2.8.5-cp310-cp310-win32.whl", hash = "sha256:c46350dcdb93e32f033eea5a21269514ffcaf501d9abd6036992d37e48a308b0"}, - {file = "numexpr-2.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:894b027438b8ec88dea32a19193716c79f4ff8ddb92302dcc9731b51ba3565a8"}, - {file = "numexpr-2.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6df184d40d4cf9f21c71f429962f39332f7398147762588c9f3a5c77065d0c06"}, - {file = "numexpr-2.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:178b85ad373c6903e55d75787d61b92380439b70d94b001cb055a501b0821335"}, - {file = "numexpr-2.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:578fe4008e4d5d6ff01bbeb2d7b7ba1ec658a5cda9c720cd26a9a8325f8ef438"}, - {file = "numexpr-2.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef621b4ee366a5c6a484f6678c9259f5b826569f8bfa0b89ba2306d5055468bb"}, - {file = "numexpr-2.8.5-cp311-cp311-win32.whl", hash = "sha256:dd57ab1a3d3aaa9274aff1cefbf93b8ddacc7973afef5b125905f6bf18fabab0"}, - {file = "numexpr-2.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:783324ba40eb804ecfc9ebae86120a1e339ab112d0ab8a1f0d48a26354d5bf9b"}, - {file = "numexpr-2.8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:183d5430db76826e54465c69db93a3c6ecbf03cda5aa1bb96eaad0147e9b68dc"}, - {file = "numexpr-2.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39ce106f92ccea5b07b1d6f2f3c4370f05edf27691dc720a63903484a2137e48"}, - {file = "numexpr-2.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b594dc9e2d6291a0bc5c065e6d9caf3eee743b5663897832e9b17753c002947a"}, - {file = "numexpr-2.8.5-cp37-cp37m-win32.whl", hash = "sha256:62b4faf8e0627673b0210a837792bddd23050ecebc98069ab23eb0633ff1ef5f"}, - {file = "numexpr-2.8.5-cp37-cp37m-win_amd64.whl", hash = "sha256:db5c65417d69414f1ab31302ea01d3548303ef31209c38b4849d145be4e1d1ba"}, - {file = "numexpr-2.8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eb36ffcfa1606e41aa08d559b4277bcad0e16b83941d1a4fee8d2bd5a34f8e0e"}, - {file = "numexpr-2.8.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:34af2a0e857d02a4bc5758bc037a777d50dacb13bcd57c7905268a3e44994ed6"}, - {file = "numexpr-2.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a8dad2bfaad5a5c34a2e8bbf62b9df1dfab266d345fda1feb20ff4e264b347a"}, - {file = "numexpr-2.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b93f5a866cd13a808bc3d3a9c487d94cd02eec408b275ff0aa150f2e8e5191f8"}, - {file = "numexpr-2.8.5-cp38-cp38-win32.whl", hash = "sha256:558390fea6370003ac749ed9d0f38d708aa096f5dcb707ddb6e0ca5a0dd37da1"}, - {file = "numexpr-2.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:55983806815035eb63c5039520688c49536bb7f3cc3fc1d7d64c6a00cf3f353e"}, - {file = "numexpr-2.8.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1510da20e6f5f45333610b1ded44c566e2690c6c437c84f2a212ca09627c7e01"}, - {file = "numexpr-2.8.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9e8b5bf7bcb4e8dcd66522d8fc96e1db7278f901cb4fd2e155efbe62a41dde08"}, - {file = "numexpr-2.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ed0e1c1ef5f34381448539f1fe9015906d21c9cfa2797c06194d4207dadb465"}, - {file = "numexpr-2.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aea6ab45c87c0a7041183c08a798f0ad4d7c5eccbce20cfe79ce6f1a45ef3702"}, - {file = "numexpr-2.8.5-cp39-cp39-win32.whl", hash = "sha256:cbfd833ee5fdb0efb862e152aee7e6ccea9c596d5c11d22604c2e6307bff7cad"}, - {file = "numexpr-2.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:283ce8609a7ccbadf91a68f3484558b3e36d27c93c98a41ec205efb0ab43c872"}, - {file = "numexpr-2.8.5.tar.gz", hash = "sha256:45ed41e55a0abcecf3d711481e12a5fb7a904fe99d42bc282a17cc5f8ea510be"}, + {file = "numexpr-2.8.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d88531ffea3ea9287e8a1665c6a2d0206d3f4660d5244423e2a134a7f0ce5fba"}, + {file = "numexpr-2.8.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db1065ba663a854115cf1f493afd7206e2efcef6643129e8061e97a51ad66ebb"}, + {file = "numexpr-2.8.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4546416004ff2e7eb9cf52c2d7ab82732b1b505593193ee9f93fa770edc5230"}, + {file = "numexpr-2.8.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb2f473fdfd09d17db3038e34818d05b6bc561a36785aa927d6c0e06bccc9911"}, + {file = "numexpr-2.8.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5496fc9e3ae214637cbca1ab556b0e602bd3afe9ff4c943a29c482430972cda8"}, + {file = "numexpr-2.8.7-cp310-cp310-win32.whl", hash = "sha256:d43f1f0253a6f2db2f76214e6f7ae9611b422cba3f7d4c86415d7a78bbbd606f"}, + {file = "numexpr-2.8.7-cp310-cp310-win_amd64.whl", hash = "sha256:cf5f112bce5c5966c47cc33700bc14ce745c8351d437ed57a9574fff581f341a"}, + {file = "numexpr-2.8.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:32934d51b5bc8a6636436326da79ed380e2f151989968789cf65b1210572cb46"}, + {file = "numexpr-2.8.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f021ac93cb3dd5d8ba2882627b615b1f58cb089dcc85764c6fbe7a549ed21b0c"}, + {file = "numexpr-2.8.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dccf572763517db6562fb7b17db46aacbbf62a9ca0a66672872f4f71aee7b186"}, + {file = "numexpr-2.8.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11121b14ee3179bade92e823f25f1b94e18716d33845db5081973331188c3338"}, + {file = "numexpr-2.8.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:81451962d4145a46dba189df65df101d4d1caddb6efe6ebfe05982cd9f62b2cf"}, + {file = "numexpr-2.8.7-cp311-cp311-win32.whl", hash = "sha256:da55ba845b847cc33c4bf81cee4b1bddfb0831118cabff8db62888ab8697ec34"}, + {file = "numexpr-2.8.7-cp311-cp311-win_amd64.whl", hash = "sha256:fd93b88d5332069916fa00829ea1b972b7e73abcb1081eee5c905a514b8b59e3"}, + {file = "numexpr-2.8.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5340d2c86d83f52e1a3e7fd97c37d358ae99af9de316bdeeab2565b9b1e622ca"}, + {file = "numexpr-2.8.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3bdf8cbc00c77a46230c765d242f92d35905c239b20c256c48dbac91e49f253"}, + {file = "numexpr-2.8.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d46c47e361fa60966a3339cb4f463ae6151ce7d78ed38075f06e8585d2c8929f"}, + {file = "numexpr-2.8.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a371cfc1670a18eea2d5c70abaa95a0e8824b70d28da884bad11931266e3a0ca"}, + {file = "numexpr-2.8.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:47a249cecd1382d482a5bf1fac0d11392fb2ed0f7d415ebc4cd901959deb1ec9"}, + {file = "numexpr-2.8.7-cp312-cp312-win32.whl", hash = "sha256:b8a5b2c21c26b62875bf819d375d798b96a32644e3c28bd4ce7789ed1fb489da"}, + {file = "numexpr-2.8.7-cp312-cp312-win_amd64.whl", hash = "sha256:f29f4d08d9b0ed6fa5d32082971294b2f9131b8577c2b7c36432ed670924313f"}, + {file = "numexpr-2.8.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ecaa5be24cf8fa0f00108e9dfa1021b7510e9dd9d159b8d8bc7c7ddbb995b31"}, + {file = "numexpr-2.8.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3a84284e0a407ca52980fd20962e89aff671c84cd6e73458f2e29ea2aa206356"}, + {file = "numexpr-2.8.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e838289e3b7bbe100b99e35496e6cc4cc0541c2207078941ee5a1d46e6b925ae"}, + {file = "numexpr-2.8.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0983052f308ea75dd232eb7f4729eed839db8fe8d82289940342b32cc55b15d0"}, + {file = "numexpr-2.8.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8bf005acd7f1985c71b1b247aaac8950d6ea05a0fe0bbbbf3f96cd398b136daa"}, + {file = "numexpr-2.8.7-cp39-cp39-win32.whl", hash = "sha256:56ec95f8d1db0819e64987dcf1789acd500fa4ea396eeabe4af6efdcb8902d07"}, + {file = "numexpr-2.8.7-cp39-cp39-win_amd64.whl", hash = "sha256:c7bf60fc1a9c90a9cb21c4c235723e579bff70c8d5362228cb2cf34426104ba2"}, + {file = "numexpr-2.8.7.tar.gz", hash = "sha256:596eeb3bbfebc912f4b6eaaf842b61ba722cebdb8bc42dfefa657d3a74953849"}, ] [package.dependencies] @@ -3565,79 +3773,76 @@ numpy = ">=1.13.3" [[package]] name = "numpy" -version = "1.25.2" +version = "1.26.0" description = "Fundamental package for array computing in Python" optional = false -python-versions = ">=3.9" +python-versions = "<3.13,>=3.9" files = [ - {file = "numpy-1.25.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db3ccc4e37a6873045580d413fe79b68e47a681af8db2e046f1dacfa11f86eb3"}, - {file = "numpy-1.25.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:90319e4f002795ccfc9050110bbbaa16c944b1c37c0baeea43c5fb881693ae1f"}, - {file = "numpy-1.25.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4a913e29b418d096e696ddd422d8a5d13ffba4ea91f9f60440a3b759b0187"}, - {file = "numpy-1.25.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f08f2e037bba04e707eebf4bc934f1972a315c883a9e0ebfa8a7756eabf9e357"}, - {file = "numpy-1.25.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bec1e7213c7cb00d67093247f8c4db156fd03075f49876957dca4711306d39c9"}, - {file = "numpy-1.25.2-cp310-cp310-win32.whl", hash = "sha256:7dc869c0c75988e1c693d0e2d5b26034644399dd929bc049db55395b1379e044"}, - {file = "numpy-1.25.2-cp310-cp310-win_amd64.whl", hash = "sha256:834b386f2b8210dca38c71a6e0f4fd6922f7d3fcff935dbe3a570945acb1b545"}, - {file = "numpy-1.25.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5462d19336db4560041517dbb7759c21d181a67cb01b36ca109b2ae37d32418"}, - {file = "numpy-1.25.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5652ea24d33585ea39eb6a6a15dac87a1206a692719ff45d53c5282e66d4a8f"}, - {file = "numpy-1.25.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d60fbae8e0019865fc4784745814cff1c421df5afee233db6d88ab4f14655a2"}, - {file = "numpy-1.25.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e7f0f7f6d0eee8364b9a6304c2845b9c491ac706048c7e8cf47b83123b8dbf"}, - {file = "numpy-1.25.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bb33d5a1cf360304754913a350edda36d5b8c5331a8237268c48f91253c3a364"}, - {file = "numpy-1.25.2-cp311-cp311-win32.whl", hash = "sha256:5883c06bb92f2e6c8181df7b39971a5fb436288db58b5a1c3967702d4278691d"}, - {file = "numpy-1.25.2-cp311-cp311-win_amd64.whl", hash = "sha256:5c97325a0ba6f9d041feb9390924614b60b99209a71a69c876f71052521d42a4"}, - {file = "numpy-1.25.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b79e513d7aac42ae918db3ad1341a015488530d0bb2a6abcbdd10a3a829ccfd3"}, - {file = "numpy-1.25.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eb942bfb6f84df5ce05dbf4b46673ffed0d3da59f13635ea9b926af3deb76926"}, - {file = "numpy-1.25.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e0746410e73384e70d286f93abf2520035250aad8c5714240b0492a7302fdca"}, - {file = "numpy-1.25.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7806500e4f5bdd04095e849265e55de20d8cc4b661b038957354327f6d9b295"}, - {file = "numpy-1.25.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8b77775f4b7df768967a7c8b3567e309f617dd5e99aeb886fa14dc1a0791141f"}, - {file = "numpy-1.25.2-cp39-cp39-win32.whl", hash = "sha256:2792d23d62ec51e50ce4d4b7d73de8f67a2fd3ea710dcbc8563a51a03fb07b01"}, - {file = "numpy-1.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:76b4115d42a7dfc5d485d358728cdd8719be33cc5ec6ec08632a5d6fca2ed380"}, - {file = "numpy-1.25.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1a1329e26f46230bf77b02cc19e900db9b52f398d6722ca853349a782d4cff55"}, - {file = "numpy-1.25.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3abc71e8b6edba80a01a52e66d83c5d14433cbcd26a40c329ec7ed09f37901"}, - {file = "numpy-1.25.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1b9735c27cea5d995496f46a8b1cd7b408b3f34b6d50459d9ac8fe3a20cc17bf"}, - {file = "numpy-1.25.2.tar.gz", hash = "sha256:fd608e19c8d7c55021dffd43bfe5492fab8cc105cc8986f813f8c3c048b38760"}, -] - -[[package]] -name = "olefile" -version = "0.46" -description = "Python package to parse, read and write Microsoft OLE2 files (Structured Storage or Compound Document, Microsoft Office)" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "olefile-0.46.zip", hash = "sha256:133b031eaf8fd2c9399b78b8bc5b8fcbe4c31e85295749bb17a87cba8f3c3964"}, + {file = "numpy-1.26.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8db2f125746e44dce707dd44d4f4efeea8d7e2b43aace3f8d1f235cfa2733dd"}, + {file = "numpy-1.26.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0621f7daf973d34d18b4e4bafb210bbaf1ef5e0100b5fa750bd9cde84c7ac292"}, + {file = "numpy-1.26.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51be5f8c349fdd1a5568e72713a21f518e7d6707bcf8503b528b88d33b57dc68"}, + {file = "numpy-1.26.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:767254ad364991ccfc4d81b8152912e53e103ec192d1bb4ea6b1f5a7117040be"}, + {file = "numpy-1.26.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:436c8e9a4bdeeee84e3e59614d38c3dbd3235838a877af8c211cfcac8a80b8d3"}, + {file = "numpy-1.26.0-cp310-cp310-win32.whl", hash = "sha256:c2e698cb0c6dda9372ea98a0344245ee65bdc1c9dd939cceed6bb91256837896"}, + {file = "numpy-1.26.0-cp310-cp310-win_amd64.whl", hash = "sha256:09aaee96c2cbdea95de76ecb8a586cb687d281c881f5f17bfc0fb7f5890f6b91"}, + {file = "numpy-1.26.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:637c58b468a69869258b8ae26f4a4c6ff8abffd4a8334c830ffb63e0feefe99a"}, + {file = "numpy-1.26.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:306545e234503a24fe9ae95ebf84d25cba1fdc27db971aa2d9f1ab6bba19a9dd"}, + {file = "numpy-1.26.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c6adc33561bd1d46f81131d5352348350fc23df4d742bb246cdfca606ea1208"}, + {file = "numpy-1.26.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e062aa24638bb5018b7841977c360d2f5917268d125c833a686b7cbabbec496c"}, + {file = "numpy-1.26.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:546b7dd7e22f3c6861463bebb000646fa730e55df5ee4a0224408b5694cc6148"}, + {file = "numpy-1.26.0-cp311-cp311-win32.whl", hash = "sha256:c0b45c8b65b79337dee5134d038346d30e109e9e2e9d43464a2970e5c0e93229"}, + {file = "numpy-1.26.0-cp311-cp311-win_amd64.whl", hash = "sha256:eae430ecf5794cb7ae7fa3808740b015aa80747e5266153128ef055975a72b99"}, + {file = "numpy-1.26.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:166b36197e9debc4e384e9c652ba60c0bacc216d0fc89e78f973a9760b503388"}, + {file = "numpy-1.26.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f042f66d0b4ae6d48e70e28d487376204d3cbf43b84c03bac57e28dac6151581"}, + {file = "numpy-1.26.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5e18e5b14a7560d8acf1c596688f4dfd19b4f2945b245a71e5af4ddb7422feb"}, + {file = "numpy-1.26.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f6bad22a791226d0a5c7c27a80a20e11cfe09ad5ef9084d4d3fc4a299cca505"}, + {file = "numpy-1.26.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4acc65dd65da28060e206c8f27a573455ed724e6179941edb19f97e58161bb69"}, + {file = "numpy-1.26.0-cp312-cp312-win32.whl", hash = "sha256:bb0d9a1aaf5f1cb7967320e80690a1d7ff69f1d47ebc5a9bea013e3a21faec95"}, + {file = "numpy-1.26.0-cp312-cp312-win_amd64.whl", hash = "sha256:ee84ca3c58fe48b8ddafdeb1db87388dce2c3c3f701bf447b05e4cfcc3679112"}, + {file = "numpy-1.26.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4a873a8180479bc829313e8d9798d5234dfacfc2e8a7ac188418189bb8eafbd2"}, + {file = "numpy-1.26.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:914b28d3215e0c721dc75db3ad6d62f51f630cb0c277e6b3bcb39519bed10bd8"}, + {file = "numpy-1.26.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c78a22e95182fb2e7874712433eaa610478a3caf86f28c621708d35fa4fd6e7f"}, + {file = "numpy-1.26.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f737708b366c36b76e953c46ba5827d8c27b7a8c9d0f471810728e5a2fe57c"}, + {file = "numpy-1.26.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b44e6a09afc12952a7d2a58ca0a2429ee0d49a4f89d83a0a11052da696440e49"}, + {file = "numpy-1.26.0-cp39-cp39-win32.whl", hash = "sha256:5671338034b820c8d58c81ad1dafc0ed5a00771a82fccc71d6438df00302094b"}, + {file = "numpy-1.26.0-cp39-cp39-win_amd64.whl", hash = "sha256:020cdbee66ed46b671429c7265cf00d8ac91c046901c55684954c3958525dab2"}, + {file = "numpy-1.26.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0792824ce2f7ea0c82ed2e4fecc29bb86bee0567a080dacaf2e0a01fe7654369"}, + {file = "numpy-1.26.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d484292eaeb3e84a51432a94f53578689ffdea3f90e10c8b203a99be5af57d8"}, + {file = "numpy-1.26.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:186ba67fad3c60dbe8a3abff3b67a91351100f2661c8e2a80364ae6279720299"}, + {file = "numpy-1.26.0.tar.gz", hash = "sha256:f93fc78fe8bf15afe2b8d6b6499f1c73953169fad1e9a8dd086cdff3190e7fdf"}, ] [[package]] name = "onnxruntime" -version = "1.15.1" +version = "1.16.0" description = "ONNX Runtime is a runtime accelerator for Machine Learning models" optional = false python-versions = "*" files = [ - {file = "onnxruntime-1.15.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:baad59e6a763237fa39545325d29c16f98b8a45d2dfc524c67631e2e3ba44d16"}, - {file = "onnxruntime-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:568c2db848f619a0a93e843c028e9fb4879929d40b04bd60f9ba6eb8d2e93421"}, - {file = "onnxruntime-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69088d7784bb04dedfd9e883e2c96e4adf8ae0451acdd0abb78d68f59ecc6d9d"}, - {file = "onnxruntime-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cef43737b2cd886d5d718d100f56ec78c9c476c5db5f8f946e95024978fe754"}, - {file = "onnxruntime-1.15.1-cp310-cp310-win32.whl", hash = "sha256:79d7e65abb44a47c633ede8e53fe7b9756c272efaf169758c482c983cca98d7e"}, - {file = "onnxruntime-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:8bc4c47682933a7a2c79808688aad5f12581305e182be552de50783b5438e6bd"}, - {file = "onnxruntime-1.15.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:652b2cb777f76446e3cc41072dd3d1585a6388aeff92b9de656724bc22e241e4"}, - {file = "onnxruntime-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89b86dbed15740abc385055a29c9673a212600248d702737ce856515bdeddc88"}, - {file = "onnxruntime-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed5cdd9ee748149a57f4cdfa67187a0d68f75240645a3c688299dcd08742cc98"}, - {file = "onnxruntime-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f748cce6a70ed38c19658615c55f4eedb9192765a4e9c4bd2682adfe980698d"}, - {file = "onnxruntime-1.15.1-cp311-cp311-win32.whl", hash = "sha256:e0312046e814c40066e7823da58075992d51364cbe739eeeb2345ec440c3ac59"}, - {file = "onnxruntime-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:f0980969689cb956c22bd1318b271e1be260060b37f3ddd82c7d63bd7f2d9a79"}, - {file = "onnxruntime-1.15.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:345986cfdbd6f4b20a89b6a6cd9abd3e2ced2926ae0b6e91fefa8149f95c0f09"}, - {file = "onnxruntime-1.15.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d7b3ad75e040f1e95757f69826a11051737b31584938a26d466a0234c6de98"}, - {file = "onnxruntime-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3603d07b829bcc1c14963a76103e257aade8861eb208173b300cc26e118ec2f8"}, - {file = "onnxruntime-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3df0625b9295daf1f7409ea55f72e1eeb38d54f5769add53372e79ddc3cf98d"}, - {file = "onnxruntime-1.15.1-cp38-cp38-win32.whl", hash = "sha256:f68b47fdf1a0406c0292f81ac993e2a2ae3e8b166b436d590eb221f64e8e187a"}, - {file = "onnxruntime-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:52d762d297cc3f731f54fa65a3e329b813164970671547bef6414d0ed52765c9"}, - {file = "onnxruntime-1.15.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:99228f9f03dc1fc8af89a28c9f942e8bd3e97e894e263abe1a32e4ddb1f6363b"}, - {file = "onnxruntime-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:45db7f96febb0cf23e3af147f35c4f8de1a37dd252d1cef853c242c2780250cd"}, - {file = "onnxruntime-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bafc112a36db25c821b90ab747644041cb4218f6575889775a2c12dd958b8c3"}, - {file = "onnxruntime-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:985693d18f2d46aa34fd44d7f65ff620660b2c8fa4b8ec365c2ca353f0fbdb27"}, - {file = "onnxruntime-1.15.1-cp39-cp39-win32.whl", hash = "sha256:708eb31b0c04724bf0f01c1309a9e69bbc09b85beb750e5662c8aed29f1ff9fd"}, - {file = "onnxruntime-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:73d6de4c42dfde1e9dbea04773e6dc23346c8cda9c7e08c6554fafc97ac60138"}, + {file = "onnxruntime-1.16.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:69c86ba3d90c166944c4a3c8a5b2a24a7bc45e68ae5997d83279af21ffd0f5f3"}, + {file = "onnxruntime-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:604a46aa2ad6a51f2fc4df1a984ea571a43aa02424aea93464c32ce02d23b3bb"}, + {file = "onnxruntime-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a40660516b382031279fb690fc3d068ad004173c2bd12bbdc0bd0fe01ef8b7c3"}, + {file = "onnxruntime-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:349fd9c7875c1a76609d45b079484f8059adfb1fb87a30506934fb667ceab249"}, + {file = "onnxruntime-1.16.0-cp310-cp310-win32.whl", hash = "sha256:22c9e2f1a1f15b41b01195cd2520c013c22228efc4795ae4118048ea4118aad2"}, + {file = "onnxruntime-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:b9667a131abfd226a728cc1c1ecf5cc5afa4fff37422f95a84bc22f7c175b57f"}, + {file = "onnxruntime-1.16.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:f7b292726a1f3fa4a483d7e902da083a5889a86a860dbc3a6479988cad342578"}, + {file = "onnxruntime-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61eaf288a2482c5561f620fb686c80c32709e92724bbb59a5e4a0d349429e205"}, + {file = "onnxruntime-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fe2239d5821d5501eecccfe5c408485591b5d73eb76a61491a8f78179c2e65a"}, + {file = "onnxruntime-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a4924604fcdf1704b7f7e087b4c0b0e181c58367a687da55b1aec2705631943"}, + {file = "onnxruntime-1.16.0-cp311-cp311-win32.whl", hash = "sha256:55d8456f1ab28c32aec9c478b7638ed145102b03bb9b719b79e065ffc5de9c72"}, + {file = "onnxruntime-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:c2a53ffd456187028c841ac7ed0d83b4c2b7e48bd2b1cf2a42d253ecf1e97cb3"}, + {file = "onnxruntime-1.16.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:bf5769aa4095cfe2503307867fa95b5f73732909ee21b67fe24da443af445925"}, + {file = "onnxruntime-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0974deadf11ddab201d915a10517be00fa9d6816def56fa374e4c1a0008985a"}, + {file = "onnxruntime-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99dccf1d2eba5ecd7b6c0e8e80d92d0030291f3506726c156e018a4d7a187c6f"}, + {file = "onnxruntime-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0170ed05d3a8a7c24fe01fc262a6bc603837751f3bb273df7006a2da73f37fff"}, + {file = "onnxruntime-1.16.0-cp38-cp38-win32.whl", hash = "sha256:5ecd38e98ccdcbbaa7e529e96852f4c1c136559802354b76378d9a19532018ee"}, + {file = "onnxruntime-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:1c585c60e9541a9bd4fb319ba9a3ef6122a28dcf4f3dbcdf014df44570cad6f8"}, + {file = "onnxruntime-1.16.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:efe59c1e51ad647fb18860233f5971e309961d09ca10697170ef9b7d9fa728f4"}, + {file = "onnxruntime-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e3c9a9cccab8f6512a0c0207b2816dd8864f2f720f6e9df5cf01e30c4f80194f"}, + {file = "onnxruntime-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcf16a252308ec6e0737db7028b63fed0ac28fbad134f86216c0dfb051a31f38"}, + {file = "onnxruntime-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f533aa90ee7189e88b6b612d6adae7d290971090598cfd47ce034ab0d106fc9c"}, + {file = "onnxruntime-1.16.0-cp39-cp39-win32.whl", hash = "sha256:306c7f5d8a0c24c65afb34f7deb0bc526defde2249e53538f1dce083945a2d6e"}, + {file = "onnxruntime-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:df8a00a7b057ba497e2822175cc68731d84b89a6d50a3a2a3ec51e98e9c91125"}, ] [package.dependencies] @@ -3650,13 +3855,13 @@ sympy = "*" [[package]] name = "openai" -version = "0.27.8" +version = "0.27.10" description = "Python client library for the OpenAI API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-0.27.8-py3-none-any.whl", hash = "sha256:e0a7c2f7da26bdbe5354b03c6d4b82a2f34bd4458c7a17ae1a7092c3e397e03c"}, - {file = "openai-0.27.8.tar.gz", hash = "sha256:2483095c7db1eee274cebac79e315a986c4e55207bb4fa7b82d185b3a2ed9536"}, + {file = "openai-0.27.10-py3-none-any.whl", hash = "sha256:beabd1757e3286fa166dde3b70ebb5ad8081af046876b47c14c41e203ed22a14"}, + {file = "openai-0.27.10.tar.gz", hash = "sha256:60e09edf7100080283688748c6803b7b3b52d5a55d21890f3815292a0552d83b"}, ] [package.dependencies] @@ -3686,13 +3891,13 @@ et-xmlfile = "*" [[package]] name = "opentelemetry-api" -version = "1.19.0" +version = "1.20.0" description = "OpenTelemetry Python API" optional = false python-versions = ">=3.7" files = [ - {file = "opentelemetry_api-1.19.0-py3-none-any.whl", hash = "sha256:dcd2a0ad34b691964947e1d50f9e8c415c32827a1d87f0459a72deb9afdf5597"}, - {file = "opentelemetry_api-1.19.0.tar.gz", hash = "sha256:db374fb5bea00f3c7aa290f5d94cea50b659e6ea9343384c5f6c2bb5d5e8db65"}, + {file = "opentelemetry_api-1.20.0-py3-none-any.whl", hash = "sha256:982b76036fec0fdaf490ae3dfd9f28c81442a33414f737abc687a32758cdcba5"}, + {file = "opentelemetry_api-1.20.0.tar.gz", hash = "sha256:06abe351db7572f8afdd0fb889ce53f3c992dbf6f6262507b385cc1963e06983"}, ] [package.dependencies] @@ -3701,42 +3906,43 @@ importlib-metadata = ">=6.0,<7.0" [[package]] name = "opentelemetry-exporter-otlp" -version = "1.19.0" +version = "1.20.0" description = "OpenTelemetry Collector Exporters" optional = false python-versions = ">=3.7" files = [ - {file = "opentelemetry_exporter_otlp-1.19.0-py3-none-any.whl", hash = "sha256:5316ffc754d5a4fb4faaa811a837593edd680bed429fd7c10d898e45f4946903"}, - {file = "opentelemetry_exporter_otlp-1.19.0.tar.gz", hash = "sha256:2d4b066180452b8e74a0a598d98901769148a0fe17900c70aca37d3c7c4e8aaa"}, + {file = "opentelemetry_exporter_otlp-1.20.0-py3-none-any.whl", hash = "sha256:3b4d47726da83fef84467bdf96da4f8f3d1a61b35db3c16354c391ce8e9decf6"}, + {file = "opentelemetry_exporter_otlp-1.20.0.tar.gz", hash = "sha256:f8cb69f80c333166e5cfaa030f9e28f7faaf343aff24caaa2cb4202ea4849b6b"}, ] [package.dependencies] -opentelemetry-exporter-otlp-proto-grpc = "1.19.0" -opentelemetry-exporter-otlp-proto-http = "1.19.0" +opentelemetry-exporter-otlp-proto-grpc = "1.20.0" +opentelemetry-exporter-otlp-proto-http = "1.20.0" [[package]] name = "opentelemetry-exporter-otlp-proto-common" -version = "1.19.0" +version = "1.20.0" description = "OpenTelemetry Protobuf encoding" optional = false python-versions = ">=3.7" files = [ - {file = "opentelemetry_exporter_otlp_proto_common-1.19.0-py3-none-any.whl", hash = "sha256:792d5496ecfebaf4f56b2c434c5e2823f88ffaeb5dcd272ea423b249fd52bded"}, - {file = "opentelemetry_exporter_otlp_proto_common-1.19.0.tar.gz", hash = "sha256:c13d02a31dec161f8910d96db6b58309af17d92b827c64284bf85eec3f2d7297"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.20.0-py3-none-any.whl", hash = "sha256:dd63209b40702636ab6ae76a06b401b646ad7b008a906ecb41222d4af24fbdef"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.20.0.tar.gz", hash = "sha256:df60c681bd61812e50b3a39a7a1afeeb6d4066117583249fcc262269374e7a49"}, ] [package.dependencies] -opentelemetry-proto = "1.19.0" +backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} +opentelemetry-proto = "1.20.0" [[package]] name = "opentelemetry-exporter-otlp-proto-grpc" -version = "1.19.0" +version = "1.20.0" description = "OpenTelemetry Collector Protobuf over gRPC Exporter" optional = false python-versions = ">=3.7" files = [ - {file = "opentelemetry_exporter_otlp_proto_grpc-1.19.0-py3-none-any.whl", hash = "sha256:ae2a6484d12ba4d0f1096c4565193c1c27a951a9d2b10a9a84da3e1f866e84d6"}, - {file = "opentelemetry_exporter_otlp_proto_grpc-1.19.0.tar.gz", hash = "sha256:e69261b4da8cbaa42d9b5f1cff4fcebbf8a3c02f85d69a8aea698312084f4180"}, + {file = "opentelemetry_exporter_otlp_proto_grpc-1.20.0-py3-none-any.whl", hash = "sha256:7c3f066065891b56348ba2c7f9df6ec635a712841cae0a36f2f6a81642ae7dec"}, + {file = "opentelemetry_exporter_otlp_proto_grpc-1.20.0.tar.gz", hash = "sha256:6c06d43c3771bda1795226e327722b4b980fa1ca1ec9e985f2ef3e29795bdd52"}, ] [package.dependencies] @@ -3745,22 +3951,22 @@ deprecated = ">=1.2.6" googleapis-common-protos = ">=1.52,<2.0" grpcio = ">=1.0.0,<2.0.0" opentelemetry-api = ">=1.15,<2.0" -opentelemetry-exporter-otlp-proto-common = "1.19.0" -opentelemetry-proto = "1.19.0" -opentelemetry-sdk = ">=1.19.0,<1.20.0" +opentelemetry-exporter-otlp-proto-common = "1.20.0" +opentelemetry-proto = "1.20.0" +opentelemetry-sdk = ">=1.20.0,<1.21.0" [package.extras] test = ["pytest-grpc"] [[package]] name = "opentelemetry-exporter-otlp-proto-http" -version = "1.19.0" +version = "1.20.0" description = "OpenTelemetry Collector Protobuf over HTTP Exporter" optional = false python-versions = ">=3.7" files = [ - {file = "opentelemetry_exporter_otlp_proto_http-1.19.0-py3-none-any.whl", hash = "sha256:4e050ca57819519a3cb8a6b17feac0d3b4b115796674b6a26295036ae941e11a"}, - {file = "opentelemetry_exporter_otlp_proto_http-1.19.0.tar.gz", hash = "sha256:7c8d2c1268cef4d9c1b13a1399f510e3c3dbb88c52f54597d487a128a23b681e"}, + {file = "opentelemetry_exporter_otlp_proto_http-1.20.0-py3-none-any.whl", hash = "sha256:03f6e768ad25f1c3a9586e8c695db4a4adf978f8546a1285fa962e16bfbb0bd6"}, + {file = "opentelemetry_exporter_otlp_proto_http-1.20.0.tar.gz", hash = "sha256:500f42821420fdf0759193d6438edc0f4e984a83e14c08a23023c06a188861b4"}, ] [package.dependencies] @@ -3768,9 +3974,9 @@ backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} deprecated = ">=1.2.6" googleapis-common-protos = ">=1.52,<2.0" opentelemetry-api = ">=1.15,<2.0" -opentelemetry-exporter-otlp-proto-common = "1.19.0" -opentelemetry-proto = "1.19.0" -opentelemetry-sdk = ">=1.19.0,<1.20.0" +opentelemetry-exporter-otlp-proto-common = "1.20.0" +opentelemetry-proto = "1.20.0" +opentelemetry-sdk = ">=1.20.0,<1.21.0" requests = ">=2.7,<3.0" [package.extras] @@ -3794,13 +4000,13 @@ prometheus-client = ">=0.5.0,<1.0.0" [[package]] name = "opentelemetry-instrumentation" -version = "0.40b0" +version = "0.41b0" description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" optional = false python-versions = ">=3.7" files = [ - {file = "opentelemetry_instrumentation-0.40b0-py3-none-any.whl", hash = "sha256:789d3726e698aa9526dd247b461b9172f99a4345571546c4aecf40279679fc8e"}, - {file = "opentelemetry_instrumentation-0.40b0.tar.gz", hash = "sha256:08bebe6a752514ed61e901e9fee5ccf06ae7533074442e707d75bb65f3e0aa17"}, + {file = "opentelemetry_instrumentation-0.41b0-py3-none-any.whl", hash = "sha256:0ef9e5705ceca0205992a4a845ae4251ce6ec15a1206ca07c2b00afb0c5bd386"}, + {file = "opentelemetry_instrumentation-0.41b0.tar.gz", hash = "sha256:214382ba10dfd29d4e24898a4c7ef18b7368178a6277a1aec95cdb75cabf4612"}, ] [package.dependencies] @@ -3810,20 +4016,20 @@ wrapt = ">=1.0.0,<2.0.0" [[package]] name = "opentelemetry-instrumentation-aiohttp-client" -version = "0.40b0" +version = "0.41b0" description = "OpenTelemetry aiohttp client instrumentation" optional = false python-versions = ">=3.7" files = [ - {file = "opentelemetry_instrumentation_aiohttp_client-0.40b0-py3-none-any.whl", hash = "sha256:195c7089636c7639463fbc588b79bc0cf41dd2971f7da1e99a9d4433249d058d"}, - {file = "opentelemetry_instrumentation_aiohttp_client-0.40b0.tar.gz", hash = "sha256:4de0a72b922d391183d00e2a1cc32e52b88dd2a6c77311b166e566b125b19943"}, + {file = "opentelemetry_instrumentation_aiohttp_client-0.41b0-py3-none-any.whl", hash = "sha256:a1d0d18dee5e57cf9187d1a561f9d4ce56d16433231208405458358ff6399a6f"}, + {file = "opentelemetry_instrumentation_aiohttp_client-0.41b0.tar.gz", hash = "sha256:56fd35e90c2534b2647e7cdd85f34383eddaa300ee51e989c3763dcdb205ca91"}, ] [package.dependencies] opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.40b0" -opentelemetry-semantic-conventions = "0.40b0" -opentelemetry-util-http = "0.40b0" +opentelemetry-instrumentation = "0.41b0" +opentelemetry-semantic-conventions = "0.41b0" +opentelemetry-util-http = "0.41b0" wrapt = ">=1.0.0,<2.0.0" [package.extras] @@ -3832,79 +4038,79 @@ test = ["http-server-mock", "opentelemetry-instrumentation-aiohttp-client[instru [[package]] name = "opentelemetry-instrumentation-asgi" -version = "0.40b0" +version = "0.41b0" description = "ASGI instrumentation for OpenTelemetry" optional = false python-versions = ">=3.7" files = [ - {file = "opentelemetry_instrumentation_asgi-0.40b0-py3-none-any.whl", hash = "sha256:3fc6940bacaccf2d96c24bd937a46dade28b930df8ec4e1231f612f2a66e4496"}, - {file = "opentelemetry_instrumentation_asgi-0.40b0.tar.gz", hash = "sha256:98678ef9e3856746dd52b11b8c6ce258acc70ecb910c9053ad7aaabab8fa71f2"}, + {file = "opentelemetry_instrumentation_asgi-0.41b0-py3-none-any.whl", hash = "sha256:46084195fb9c50507abbe1dd490ae4c31c8658c5790f1ddf7af95c417dbe6422"}, + {file = "opentelemetry_instrumentation_asgi-0.41b0.tar.gz", hash = "sha256:921244138b37a9a25edf2153f1c248f16f98610ee8d840b25fd7bf6b165e4d72"}, ] [package.dependencies] asgiref = ">=3.0,<4.0" opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.40b0" -opentelemetry-semantic-conventions = "0.40b0" -opentelemetry-util-http = "0.40b0" +opentelemetry-instrumentation = "0.41b0" +opentelemetry-semantic-conventions = "0.41b0" +opentelemetry-util-http = "0.41b0" [package.extras] instruments = ["asgiref (>=3.0,<4.0)"] -test = ["opentelemetry-instrumentation-asgi[instruments]", "opentelemetry-test-utils (==0.40b0)"] +test = ["opentelemetry-instrumentation-asgi[instruments]", "opentelemetry-test-utils (==0.41b0)"] [[package]] name = "opentelemetry-instrumentation-fastapi" -version = "0.40b0" +version = "0.41b0" description = "OpenTelemetry FastAPI Instrumentation" optional = false python-versions = ">=3.7" files = [ - {file = "opentelemetry_instrumentation_fastapi-0.40b0-py3-none-any.whl", hash = "sha256:2d71b41b460ebadc347a87ef6c9595864965745a7752b1e08d064eea327e350e"}, - {file = "opentelemetry_instrumentation_fastapi-0.40b0.tar.gz", hash = "sha256:d97276a4bda9155de74b0761cdf52831d22fb93894b644ebba026501aa6f7a94"}, + {file = "opentelemetry_instrumentation_fastapi-0.41b0-py3-none-any.whl", hash = "sha256:5990368e99ecc989df0a248a0b9b8e85d8b3eb7c1dbf5131c36982ba7f4a43b7"}, + {file = "opentelemetry_instrumentation_fastapi-0.41b0.tar.gz", hash = "sha256:eb4ceefe8b944fc9ea5e61fa558b99afd1285431b563f3f0104ac177cde4dfe5"}, ] [package.dependencies] opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.40b0" -opentelemetry-instrumentation-asgi = "0.40b0" -opentelemetry-semantic-conventions = "0.40b0" -opentelemetry-util-http = "0.40b0" +opentelemetry-instrumentation = "0.41b0" +opentelemetry-instrumentation-asgi = "0.41b0" +opentelemetry-semantic-conventions = "0.41b0" +opentelemetry-util-http = "0.41b0" [package.extras] instruments = ["fastapi (>=0.58,<1.0)"] -test = ["httpx (>=0.22,<1.0)", "opentelemetry-instrumentation-fastapi[instruments]", "opentelemetry-test-utils (==0.40b0)", "requests (>=2.23,<3.0)"] +test = ["httpx (>=0.22,<1.0)", "opentelemetry-instrumentation-fastapi[instruments]", "opentelemetry-test-utils (==0.41b0)", "requests (>=2.23,<3.0)"] [[package]] name = "opentelemetry-instrumentation-grpc" -version = "0.40b0" +version = "0.41b0" description = "OpenTelemetry gRPC instrumentation" optional = false python-versions = ">=3.7" files = [ - {file = "opentelemetry_instrumentation_grpc-0.40b0-py3-none-any.whl", hash = "sha256:36904f1c25a29465efcb1cc3f90b5871b462f547cd6ab244c5ea6185872eb507"}, - {file = "opentelemetry_instrumentation_grpc-0.40b0.tar.gz", hash = "sha256:c8ac3a1db5553f17aa0d071ac0a3f520da0ef1e9c6f1d1625b211fa58b084a7f"}, + {file = "opentelemetry_instrumentation_grpc-0.41b0-py3-none-any.whl", hash = "sha256:da08f5a545655079bf3333ede05491915e11f04b21e1a511c54f001e7fb9cb22"}, + {file = "opentelemetry_instrumentation_grpc-0.41b0.tar.gz", hash = "sha256:0a0df7bb974eab9a0b5750410114067602129f3b641b5d73d3f0fb3526be7847"}, ] [package.dependencies] opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.40b0" +opentelemetry-instrumentation = "0.41b0" opentelemetry-sdk = ">=1.12,<2.0" -opentelemetry-semantic-conventions = "0.40b0" +opentelemetry-semantic-conventions = "0.41b0" wrapt = ">=1.0.0,<2.0.0" [package.extras] instruments = ["grpcio (>=1.27,<2.0)"] -test = ["opentelemetry-instrumentation-grpc[instruments]", "opentelemetry-sdk (>=1.12,<2.0)", "opentelemetry-test-utils (==0.40b0)", "protobuf (>=3.13,<4.0)"] +test = ["opentelemetry-instrumentation-grpc[instruments]", "opentelemetry-sdk (>=1.12,<2.0)", "opentelemetry-test-utils (==0.41b0)", "protobuf (>=3.13,<4.0)"] [[package]] name = "opentelemetry-proto" -version = "1.19.0" +version = "1.20.0" description = "OpenTelemetry Python Proto" optional = false python-versions = ">=3.7" files = [ - {file = "opentelemetry_proto-1.19.0-py3-none-any.whl", hash = "sha256:d06391cb5fa0fab111b42d3adf5f22683748745bd8f59ed4acfd93cfd252b960"}, - {file = "opentelemetry_proto-1.19.0.tar.gz", hash = "sha256:be53205622d85ecd37ebbf764aed907d87620a45eae638860cb5a778bf900c04"}, + {file = "opentelemetry_proto-1.20.0-py3-none-any.whl", hash = "sha256:512c3d2c6864fb7547a69577c3907348e6c985b7a204533563cb4c4c5046203b"}, + {file = "opentelemetry_proto-1.20.0.tar.gz", hash = "sha256:cf01f49b3072ee57468bccb1a4f93bdb55411f4512d0ac3f97c5c04c0040b5a2"}, ] [package.dependencies] @@ -3912,40 +4118,40 @@ protobuf = ">=3.19,<5.0" [[package]] name = "opentelemetry-sdk" -version = "1.19.0" +version = "1.20.0" description = "OpenTelemetry Python SDK" optional = false python-versions = ">=3.7" files = [ - {file = "opentelemetry_sdk-1.19.0-py3-none-any.whl", hash = "sha256:bb67ad676b1bc671766a40d7fc9d9563854c186fa11f0dc8fa2284e004bd4263"}, - {file = "opentelemetry_sdk-1.19.0.tar.gz", hash = "sha256:765928956262c7a7766eaba27127b543fb40ef710499cad075f261f52163a87f"}, + {file = "opentelemetry_sdk-1.20.0-py3-none-any.whl", hash = "sha256:f2230c276ff4c63ea09b3cb2e2ac6b1265f90af64e8d16bbf275c81a9ce8e804"}, + {file = "opentelemetry_sdk-1.20.0.tar.gz", hash = "sha256:702e432a457fa717fd2ddfd30640180e69938f85bb7fec3e479f85f61c1843f8"}, ] [package.dependencies] -opentelemetry-api = "1.19.0" -opentelemetry-semantic-conventions = "0.40b0" +opentelemetry-api = "1.20.0" +opentelemetry-semantic-conventions = "0.41b0" typing-extensions = ">=3.7.4" [[package]] name = "opentelemetry-semantic-conventions" -version = "0.40b0" +version = "0.41b0" description = "OpenTelemetry Semantic Conventions" optional = false python-versions = ">=3.7" files = [ - {file = "opentelemetry_semantic_conventions-0.40b0-py3-none-any.whl", hash = "sha256:7ebbaf86755a0948902e68637e3ae516c50222c30455e55af154ad3ffe283839"}, - {file = "opentelemetry_semantic_conventions-0.40b0.tar.gz", hash = "sha256:5a7a491873b15ab7c4907bbfd8737645cc87ca55a0a326c1755d1b928d8a0fae"}, + {file = "opentelemetry_semantic_conventions-0.41b0-py3-none-any.whl", hash = "sha256:45404391ed9e50998183a4925ad1b497c01c143f06500c3b9c3d0013492bb0f2"}, + {file = "opentelemetry_semantic_conventions-0.41b0.tar.gz", hash = "sha256:0ce5b040b8a3fc816ea5879a743b3d6fe5db61f6485e4def94c6ee4d402e1eb7"}, ] [[package]] name = "opentelemetry-util-http" -version = "0.40b0" +version = "0.41b0" description = "Web util for OpenTelemetry" optional = false python-versions = ">=3.7" files = [ - {file = "opentelemetry_util_http-0.40b0-py3-none-any.whl", hash = "sha256:7e071110b724a24d70de7441aeb4541bf8495ba5f5c33927e6b806d597379d0f"}, - {file = "opentelemetry_util_http-0.40b0.tar.gz", hash = "sha256:47d93efa1bb6c71954a5c6ae29a9546efae77f6875dc6c807a76898e0d478b80"}, + {file = "opentelemetry_util_http-0.41b0-py3-none-any.whl", hash = "sha256:6a167fd1e0e8b0f629530d971165b5d82ed0be2154b7f29498499c3a517edee5"}, + {file = "opentelemetry_util_http-0.41b0.tar.gz", hash = "sha256:16d5bd04a380dc1079e766562d1e1626cbb47720f197f67010c45f090fffdfb3"}, ] [[package]] @@ -4159,40 +4365,6 @@ files = [ {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] -[[package]] -name = "pdf2image" -version = "1.16.3" -description = "A wrapper around the pdftoppm and pdftocairo command line tools to convert PDF to a PIL Image list." -optional = false -python-versions = "*" -files = [ - {file = "pdf2image-1.16.3-py3-none-any.whl", hash = "sha256:b6154164af3677211c22cbb38b2bd778b43aca02758e962fe1e231f6d3b0e380"}, - {file = "pdf2image-1.16.3.tar.gz", hash = "sha256:74208810c2cef4d9e347769b8e62a52303982ddb4f2dfd744c7ab4b940ae287e"}, -] - -[package.dependencies] -pillow = "*" - -[[package]] -name = "pdfminer-six" -version = "20221105" -description = "PDF parser and analyzer" -optional = false -python-versions = ">=3.6" -files = [ - {file = "pdfminer.six-20221105-py3-none-any.whl", hash = "sha256:1eaddd712d5b2732f8ac8486824533514f8ba12a0787b3d5fe1e686cd826532d"}, - {file = "pdfminer.six-20221105.tar.gz", hash = "sha256:8448ab7b939d18b64820478ecac5394f482d7a79f5f7eaa7703c6c959c175e1d"}, -] - -[package.dependencies] -charset-normalizer = ">=2.0.0" -cryptography = ">=36.0.0" - -[package.extras] -dev = ["black", "mypy (==0.931)", "nox", "pytest"] -docs = ["sphinx", "sphinx-argparse"] -image = ["Pillow"] - [[package]] name = "pexpect" version = "4.8.0" @@ -4220,7 +4392,7 @@ files = [ [[package]] name = "pillow" -version = "10.0.0" +version = "10.0.1" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.8" @@ -4287,13 +4459,13 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa [[package]] name = "pinecone-client" -version = "2.2.2" +version = "2.2.4" description = "Pinecone client and SDK" optional = false python-versions = ">=3.8" files = [ - {file = "pinecone-client-2.2.2.tar.gz", hash = "sha256:391fe413754efd4e0ef00154b44271d63c4cdd4bedf088d23111a5725d863210"}, - {file = "pinecone_client-2.2.2-py3-none-any.whl", hash = "sha256:21fddb752668efee4d3c6b706346d9580e36a8b06b8d97afd60bd33ef2536e7e"}, + {file = "pinecone-client-2.2.4.tar.gz", hash = "sha256:2c1cc1d6648b2be66e944db2ffa59166a37b9164d1135ad525d9cd8b1e298168"}, + {file = "pinecone_client-2.2.4-py3-none-any.whl", hash = "sha256:5bf496c01c2f82f4e5c2dc977cc5062ecd7168b8ed90743b09afcc8c7eb242ec"}, ] [package.dependencies] @@ -4308,7 +4480,7 @@ typing-extensions = ">=3.7.4" urllib3 = ">=1.21.1" [package.extras] -grpc = ["googleapis-common-protos (>=1.53.0)", "grpc-gateway-protoc-gen-openapiv2 (==0.1.0)", "grpcio (>=1.44.0)", "lz4 (>=3.1.3)", "protobuf (>=3.19.5,<3.20.0)"] +grpc = ["googleapis-common-protos (>=1.53.0)", "grpc-gateway-protoc-gen-openapiv2 (==0.1.0)", "grpcio (>=1.44.0)", "lz4 (>=3.1.3)", "protobuf (>=3.20.0,<3.21.0)"] [[package]] name = "platformdirs" @@ -4327,13 +4499,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "pluggy" -version = "1.2.0" +version = "1.3.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] [package.extras] @@ -4342,13 +4514,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "portalocker" -version = "2.7.0" +version = "2.8.2" description = "Wraps the portalocker recipe for easy usage" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "portalocker-2.7.0-py2.py3-none-any.whl", hash = "sha256:a07c5b4f3985c3cf4798369631fb7011adb498e2a46d8440efc75a8f29a0f983"}, - {file = "portalocker-2.7.0.tar.gz", hash = "sha256:032e81d534a88ec1736d03f780ba073f047a06c478b06e2937486f334e955c51"}, + {file = "portalocker-2.8.2-py3-none-any.whl", hash = "sha256:cfb86acc09b9aa7c3b43594e19be1345b9d16af3feb08bf92f23d4dce513a28e"}, + {file = "portalocker-2.8.2.tar.gz", hash = "sha256:2b035aa7828e46c58e9b31390ee1f169b98e1066ab10b9a6a861fe7e25ee4f33"}, ] [package.dependencies] @@ -4357,7 +4529,7 @@ pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} [package.extras] docs = ["sphinx (>=1.7.1)"] redis = ["redis"] -tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)"] +tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] [[package]] name = "postgrest" @@ -4503,13 +4675,13 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] [[package]] name = "psycopg" -version = "3.1.10" +version = "3.1.11" description = "PostgreSQL database adapter for Python" optional = false python-versions = ">=3.7" files = [ - {file = "psycopg-3.1.10-py3-none-any.whl", hash = "sha256:8bbeddae5075c7890b2fa3e3553440376d3c5e28418335dee3c3656b06fa2b52"}, - {file = "psycopg-3.1.10.tar.gz", hash = "sha256:15b25741494344c24066dc2479b0f383dd1b82fa5e75612fa4fa5bb30726e9b6"}, + {file = "psycopg-3.1.11-py3-none-any.whl", hash = "sha256:7542c45810ea16356e5126c9b4291cbc3802aa326fcbba09ff154fe380de29be"}, + {file = "psycopg-3.1.11.tar.gz", hash = "sha256:cd711edb64b07d7f8a233c365806caf7e55bbe7cbbd8d5c680f672bb5353c8d5"}, ] [package.dependencies] @@ -4517,74 +4689,74 @@ typing-extensions = ">=4.1" tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] -binary = ["psycopg-binary (==3.1.10)"] -c = ["psycopg-c (==3.1.10)"] +binary = ["psycopg-binary (==3.1.11)"] +c = ["psycopg-c (==3.1.11)"] dev = ["black (>=23.1.0)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.4.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] pool = ["psycopg-pool"] -test = ["anyio (>=3.6.2)", "mypy (>=1.4.1)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] +test = ["anyio (>=3.6.2,<4.0)", "mypy (>=1.4.1)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] [[package]] name = "psycopg-binary" -version = "3.1.10" +version = "3.1.11" description = "PostgreSQL database adapter for Python -- C optimisation distribution" optional = false python-versions = ">=3.7" files = [ - {file = "psycopg_binary-3.1.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a529c203f6e0f4c67ba27cf8f9739eb3bc880ad70d6ad6c0e56c2230a66b5a09"}, - {file = "psycopg_binary-3.1.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bd6e14d1aeb12754a43446c77a5ce819b68875cc25ae6538089ef90d7f6dd6f7"}, - {file = "psycopg_binary-3.1.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1583ced5948cf88124212c4503dfe5b01ac3e2dd1a2833c083917f4c4aabe8b4"}, - {file = "psycopg_binary-3.1.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2098721c486478987be700723b28ec7a48f134eba339de36af0e745f37dfe461"}, - {file = "psycopg_binary-3.1.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7e61f7b412fca7b15dd043a0b22fd528d2ed8276e76b3764c3889e29fa65082b"}, - {file = "psycopg_binary-3.1.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0f33e33a072e3d5af51ee4d4a439e10dbe623fe87ef295d5d688180d529f13f"}, - {file = "psycopg_binary-3.1.10-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f6f7738c59262d8d19154164d99c881ed58ed377fb6f1d685eb0dc43bbcd8022"}, - {file = "psycopg_binary-3.1.10-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:511d38b1e1961d179d47d5103ba9634ecfc7ead431d19a9337ef82f3a2bca807"}, - {file = "psycopg_binary-3.1.10-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:666e7acf2ffdb5e8a58e8b0c1759facdb9688c7e90ee8ca7aed675803b57404d"}, - {file = "psycopg_binary-3.1.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:57b93c756fee5f7c7bd580c34cd5d244f7d5638f8b2cf25333f97b9b8b2ebfd1"}, - {file = "psycopg_binary-3.1.10-cp310-cp310-win_amd64.whl", hash = "sha256:a1d61b7724c7215a8ea4495a5c6b704656f4b7bb6165f4cb9989b685886ebc48"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:36fff836a7823c9d71fa7faa333c74b2b081af216cebdbb0f481dce55ee2d974"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:32caf98cb00881bfcbbbae39a15f2a4e08b79ff983f1c0f13b60a888ef6e8431"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5565a6a86fee8d74f30de89e07f399567cdf59367aeb09624eb690d524339076"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9fb0d64520b29bd80a6731476ad8e1c20348dfdee00ab098899d23247b641675"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfc05ed4e74fa8615d7cc2bd57f00f97662f4e865a731dbd43da9a527e289c8c"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5b59c8cff887757ddf438ff9489d79c5e6b717112c96f5c68e16f367ff8724e"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbaf12361136afefc5faab21a174a437e71c803b083f410e5140c7605bc66b"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ff72576061c774bcce5f5440b93e63d4c430032dd056d30f6cb1988e549dd92c"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a4e91e1a8d61c60f592a1dfcebdf55e52a29fe4fdb650c5bd5414c848e77d029"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f7187269d825e84c945be7d93dd5088a4e0b6481a4bdaba3bf7069d4ac13703d"}, - {file = "psycopg_binary-3.1.10-cp311-cp311-win_amd64.whl", hash = "sha256:ba7812a593c16d9d661844dc8dd4d81548fd1c2a0ee676f3e3d8638369f4c5e4"}, - {file = "psycopg_binary-3.1.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88caa5859740507b3596c6c2e00ceaccee2c6ab5317bc535887801ad3cc7f3e1"}, - {file = "psycopg_binary-3.1.10-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a3a7e99ba10c2e83a48d79431560e0d5ca7865f68f2bac3a462dc2b151e9926"}, - {file = "psycopg_binary-3.1.10-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:848f4f4707dc73f4b4e844c92f3de795b2ddb728f75132602bda5e6ba55084fc"}, - {file = "psycopg_binary-3.1.10-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:415961e839bb49cfd75cd961503fb8846c0768f247db1fa7171c1ac61d38711b"}, - {file = "psycopg_binary-3.1.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0471869e658d0c6b8c3ed53153794739c18d7dad2dd5b8e6ff023a364c20f7df"}, - {file = "psycopg_binary-3.1.10-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4290060ee0d856caa979ecf675c0e6959325f508272ccf27f64c3801c7bcbde7"}, - {file = "psycopg_binary-3.1.10-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:abf04bc06c8f6a1ac3dc2106d3b79c8661352e9d8a57ca2934ffa6aae8fe600a"}, - {file = "psycopg_binary-3.1.10-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:51fe70708243b83bf16710d8c11b61bd46562e6a24a6300d5434380b35911059"}, - {file = "psycopg_binary-3.1.10-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8b658f7f8b49fb60a1c52e3f6692f690a85bdf1ad30aafe0f3f1fd74f6958cf8"}, - {file = "psycopg_binary-3.1.10-cp37-cp37m-win_amd64.whl", hash = "sha256:ffc8c796194f23b9b07f6d25f927ec4df84a194bbc7a1f9e73316734eef512f9"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:74ce92122be34cf0e5f06d79869e1001c8421a68fa7ddf6fe38a717155cf3a64"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:75608a900984061c8898be68fbddc6f3da5eefdffce6e0624f5371645740d172"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6670d160d054466e8fdedfbc749ef8bf7dfdf69296048954d24645dd4d3d3c01"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d32026cfab7ba7ac687a42c33345026a2fb6fc5608a6144077f767af4386be0b"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:908fa388a5b75dfd17a937acb24708bd272e21edefca9a495004c6f70ec2636a"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e46b97073bd4de114f475249d681eaf054e950699c5d7af554d3684db39b82d"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9cf56bb4b115def3a18157f3b3b7d8322ee94a8dea30028db602c8f9ae34ad1e"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3b6c6f90241c4c5a6ca3f0d8827e37ef90fdc4deb9d8cfa5678baa0ea374b391"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:747176a6aeb058079f56c5397bd90339581ab7b3cc0d62e7445654e6a484c7e1"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41a415e78c457b06497fa0084e4ea7245ca1a377b55756dd757034210b64da7e"}, - {file = "psycopg_binary-3.1.10-cp38-cp38-win_amd64.whl", hash = "sha256:a7bbe9017edd898d7b3a8747700ed045dda96a907dff87f45e642e28d8584481"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0f062f20256708929a58c41d44f350efced4c00a603323d1413f6dc0b84d95a5"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dea30f2704337ca2d0322fccfe1fa30f61ce9185de3937eb986321063114a51f"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9d88ac72531034ebf7ec09114e732b066a9078f4ce213cf65cc5e42eb538d30"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2bea0940d69c3e24a72530730952687912893b34c53aa39e79045e7b446174d"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6a691dc8e2436d9c1e5cf93902d63e9501688fccc957eb22f952d37886257470"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa92661f99351765673835a4d936d79bd24dfbb358b29b084d83be38229a90e4"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:30eb731ed5525d8df892db6532cc8ffd8a163b73bc355127dee9c49334e16eee"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:50bf7a59d3a85a82d466fed341d352b44d09d6adc18656101d163a7cfc6509a0"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f48665947c55f8d6eb3f0be98de80411508e1ec329f354685329b57fced82c7f"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:caa771569da01fc0389ca34920c331a284425a68f92d1ba0a80cc08935f8356e"}, - {file = "psycopg_binary-3.1.10-cp39-cp39-win_amd64.whl", hash = "sha256:b30887e631fd67affaed98f6cd2135b44f2d1a6d9bca353a69c3889c78bd7aa8"}, + {file = "psycopg_binary-3.1.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6b643d191b493251d3b2834a2929ab702b9e3539cdf2d4c5a923a6bc9aa4985"}, + {file = "psycopg_binary-3.1.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1125455cffb7e39d937aa43c4b738367a65969ac8daf6a688a5a83b7872c2c89"}, + {file = "psycopg_binary-3.1.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:adf2a5e113ccbf5ba30133e91814bec2d5a10629bdf365b52f7d727994f51c54"}, + {file = "psycopg_binary-3.1.11-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f966b201d61a9021ce0bd57ac89863e3892654e1aa8d94e9ed6fbd09931f9d04"}, + {file = "psycopg_binary-3.1.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:67c9717599fa6ec5852634ca4e64405af4b7cbfa5ce4e96ce6a101f27a75e6f6"}, + {file = "psycopg_binary-3.1.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7eb04da13d588c0407488ab3c003b81d8bbd20bdd38630690974b1329f6acb78"}, + {file = "psycopg_binary-3.1.11-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9671379d1e6118924010b20490fa66d0b46a7868b5571ff77ee9f8d1f5f6137d"}, + {file = "psycopg_binary-3.1.11-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:86c460e1d86d70fdc919109beee2e332ed5b708a99a676564657ffd49da1e330"}, + {file = "psycopg_binary-3.1.11-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:cb725f3b168d4d714854a62609fd85cebc9be76a94851d8ca62efc604d6ae38c"}, + {file = "psycopg_binary-3.1.11-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d17d5608b08f4e5650f726a875a74c44bc840ac1d58d407f3fa58576b11f8840"}, + {file = "psycopg_binary-3.1.11-cp310-cp310-win_amd64.whl", hash = "sha256:112d59e083bee10f58636f343a5dbafd63eaf549cc00a0eb1d278d47f94244ab"}, + {file = "psycopg_binary-3.1.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4ee614cef003e0b79ccc6b21505405aaf6b144596c8e93f08e4e5061b20382f"}, + {file = "psycopg_binary-3.1.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9093421fcafc935402d4e1caf1f9b95879f8f8c8e50f18568b233fbe110a0cff"}, + {file = "psycopg_binary-3.1.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:403a43f05b6cc93c749f9c60f609b3127d2fed28783df00d93ce1c34879174e4"}, + {file = "psycopg_binary-3.1.11-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:264ce7152713310ffa355d4e746ffcecb5706836b086c30fff16e522b36052b5"}, + {file = "psycopg_binary-3.1.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd16d03ef0b1cfff0b9adffa59265f37ecbf060d92466458970d6bffe68f0baa"}, + {file = "psycopg_binary-3.1.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d6f60975171ce97ccf63c953c038abd70c2f0a92b5246e231f2e23df4fe95d6"}, + {file = "psycopg_binary-3.1.11-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a01c2de6a798ccbe3df0cdd608611e7988820e40b7b6e166698e2570d422ecd8"}, + {file = "psycopg_binary-3.1.11-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9d2bf3718620bd94486d697084d64cabe82766270cc639eee8792af4467c07de"}, + {file = "psycopg_binary-3.1.11-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b731a4510ffc0fd98f852ea54397cb173ff7ec0d78e4f1e56cbb0736c58a2f2"}, + {file = "psycopg_binary-3.1.11-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2dcd661f28d86d46bbafb390a36ebc095fd6aaf6851af865f136149ef0365103"}, + {file = "psycopg_binary-3.1.11-cp311-cp311-win_amd64.whl", hash = "sha256:856fa116847e02c64cb218e566d6dc282f1ae015cf6301ecfd1f184f0ca63aa3"}, + {file = "psycopg_binary-3.1.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:549b6437c7664a8d6d151927b9b8a47a893f6fb494ca203bfcea9bfe900025bc"}, + {file = "psycopg_binary-3.1.11-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41806470423ad22750871096293a61dde84b9a8bc2afe445b24a43ddaab32523"}, + {file = "psycopg_binary-3.1.11-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d5f427c31422452f50bc37807d863d08191356a6569ad0a81b2c46c1a86bea53"}, + {file = "psycopg_binary-3.1.11-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52b2dc4fd159e5efae9b352e193427b8c418c1db4f2a243ab3f91c47c288bad5"}, + {file = "psycopg_binary-3.1.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:992a828ef15c14b2d9a620d5308b90ca8c5002db3e67a1707a79579942e7a32e"}, + {file = "psycopg_binary-3.1.11-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ba7df55a331f8db15bd38fb73531fafff2dd847956170a0ca56a431fc8cad94a"}, + {file = "psycopg_binary-3.1.11-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8d59390ac47390e997d18bb51a578c66d6a40f29fd37aea67873410fd985e9f6"}, + {file = "psycopg_binary-3.1.11-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:7dd68347e7ac28cd7eca5b3e56dfb9f316b5bf26c16c5e40372d9689b2a4e9f9"}, + {file = "psycopg_binary-3.1.11-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6b25d734c3944e8e7e37872ce90a8de41fc6e57b547611ff6339a0dd1ad74b33"}, + {file = "psycopg_binary-3.1.11-cp37-cp37m-win_amd64.whl", hash = "sha256:5774ebecff7cf4088c301d77991de9dab52e5eba5ecd4c896afbb4f9f0846c97"}, + {file = "psycopg_binary-3.1.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc206326bce9942e62f1fa80cc1a764ba8af0fa7dad9c7441209846e0323d5d1"}, + {file = "psycopg_binary-3.1.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:833d088533d1d6ed758d0fd263666f0d7ceb8a4d8dd4ae440ac041fdf018f651"}, + {file = "psycopg_binary-3.1.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89bc2fe2514035556a7fcb5afc52136767359094b937951067975879e94989eb"}, + {file = "psycopg_binary-3.1.11-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef0be7c87d9ec0aa29a8bbf0399cf16d0d768331619898b28c6088960215f646"}, + {file = "psycopg_binary-3.1.11-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:593a3081754e57deec56ce7ee8449837a3b937eab2cd6f52aaa59ecbf76005fc"}, + {file = "psycopg_binary-3.1.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ddd364ea43f799be615e3729210f19a58e0996dda7b01a53de0e7001f15d082"}, + {file = "psycopg_binary-3.1.11-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f2b2b51d6bee271bb5ebe37beca751107276bb4f6ea32d960b44ef1cef3b571"}, + {file = "psycopg_binary-3.1.11-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f4cd0c5ac86e2e1f32e93de3a124f79a7a5d24364349de7120535116775047aa"}, + {file = "psycopg_binary-3.1.11-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:460aa41b4b42e640bed3127de80324a292f5a91032af7da441b6a820cb6907f9"}, + {file = "psycopg_binary-3.1.11-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81a7efca200b0eba256c4141e255c1961c4ed1b099904cc57fa01f4751b43255"}, + {file = "psycopg_binary-3.1.11-cp38-cp38-win_amd64.whl", hash = "sha256:4fc2a2f89b9f985a01e2a3830d9fa7572bf902e11eb0229f8cf46c52bf8d5520"}, + {file = "psycopg_binary-3.1.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:96c9d992d0b1fd014a0f824b49eb02b6c512608e343dbabdee0fe261a2c9496a"}, + {file = "psycopg_binary-3.1.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b7da780c259257d8cf78a1b766cfaf493a26a27cdfccd9c8d62289ae54ab663f"}, + {file = "psycopg_binary-3.1.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c3bc72e937097b0e7d586cdee0a4d30e19651c406eb5e6d225a4bf4db3d234"}, + {file = "psycopg_binary-3.1.11-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f13f27232397a24432d74a41feb87ea376fb6f8f3d28eb42c526e3403bbdea0"}, + {file = "psycopg_binary-3.1.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6ad686b35f9c65c0c917255da72682436caf96c5f6e9fa6a0d26e1990d40ca6"}, + {file = "psycopg_binary-3.1.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c481d3371f7f2e0cada201cdd0d30739633729a243ed6aadb54563fd73b654b0"}, + {file = "psycopg_binary-3.1.11-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:12f296e0af084b463a2dd65de6987dc833e12cfab940ef3c451ef05a420322bc"}, + {file = "psycopg_binary-3.1.11-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3a5b25093370280e176b2830012363b48f05fb7773ebaf3b9ab00dbf5027952c"}, + {file = "psycopg_binary-3.1.11-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e9e2849fec6dd461ef0923a6b2bb659d7d7eafb5c9f9302f27a8cab205abe406"}, + {file = "psycopg_binary-3.1.11-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b88e6204e9c8d6fcd41928ac3721930e404ae64dd4718fc0ff5cf4559ff11d88"}, + {file = "psycopg_binary-3.1.11-cp39-cp39-win_amd64.whl", hash = "sha256:c95a61b49ca62600eaa54165e5c7630c3bced7957402df83f511acd48ff2abf6"}, ] [[package]] @@ -4669,41 +4841,41 @@ files = [ [[package]] name = "pulsar-client" -version = "3.2.0" +version = "3.3.0" description = "Apache Pulsar Python client library" optional = false python-versions = "*" files = [ - {file = "pulsar_client-3.2.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:da53bbe1903026ca1253d36a67bde0ae88513497091658aee8c5514c3e567483"}, - {file = "pulsar_client-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec595a71b7a25f1a72a1350efd6680a511b53253c3cac1911ba3d6c4d71fa64c"}, - {file = "pulsar_client-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3557c65463d74ec8d2864752389beb06761ab591dd134a164e0b1303c66719b"}, - {file = "pulsar_client-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d51dc76fec48217489bde95754ad58288c9389361de42f5a27d64e19840d27fb"}, - {file = "pulsar_client-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9ef2baf85311e0fe1b98342fdafbb93a1818a08ef999eaa524234fedf6f3b941"}, - {file = "pulsar_client-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:0928b02beda0c98e77178f4e30e962ddb8ee8c3320e4c7304a78b0796e976523"}, - {file = "pulsar_client-3.2.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:584f44b03474a69906be711a597a4d516263a55be31e49fc07be503dc8406821"}, - {file = "pulsar_client-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a637b9a3b30860c61e68a7b8ea650e0987d89e82f73b6a3df1ab662a6438fdda"}, - {file = "pulsar_client-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4a187fdc5febcf16f725179dcf2c476f31eeebd8353794d91754a3202dd5072"}, - {file = "pulsar_client-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5ff879f868cf1fd29db99f39fdb22b3ec3e749c648aca28526689756d922d1c5"}, - {file = "pulsar_client-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4a5f85d0cc414f739a5b51d843f213b54b2cd768c3a34f7c27cca410712b1f81"}, - {file = "pulsar_client-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:4fe748283848d829a80c0323558faeebea4c240d69fa58314ac90344f6999d17"}, - {file = "pulsar_client-3.2.0-cp37-cp37m-macosx_10_15_universal2.whl", hash = "sha256:06b91c26def86dbbc35be15257999fd8a2afbadf32983916ea3eef44f4d4cab4"}, - {file = "pulsar_client-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39ec897bc8d232e6b118793378fc662a844334b829a28a1b4ad1c5fe8d019135"}, - {file = "pulsar_client-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa37c96c25c1b5aff3bad0fd0194b385ec190b2c67a2f439ac91577f81ae18d3"}, - {file = "pulsar_client-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d49cdd4d1b7fc2e80d100acf14e6fd3898f6e099e403fc56ed22a690245b2fec"}, - {file = "pulsar_client-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0058ca3191fd24528ccf94dba6f12e4093831454a2597166f96900d0717271bf"}, - {file = "pulsar_client-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:cb69b0411008e0b56df51de0aab20aa1c1a12aef3019b9ceba89afbae1f07fe2"}, - {file = "pulsar_client-3.2.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:f7d33e99602352df7a30707eab4e5781654602212fb618928bffb5523f2bcf35"}, - {file = "pulsar_client-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad1ac15a175ca90555c681a4d0134568771c6346b97a172f3ef14006556a50ae"}, - {file = "pulsar_client-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:369e08ef1d5cb196dd9271039928800f90b4701a9c9df90bc068b44260d2fb11"}, - {file = "pulsar_client-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a52ba2b6736a2ebeed31b590e75d417dda149e333461655860efa84d898a3eb4"}, - {file = "pulsar_client-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c801334b3b569b23976481a2922bcea0c6dd990fc26544658dd9e9c8f78ca36"}, - {file = "pulsar_client-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:cd01fd419280e9013d1655bc53662248be2656b623b1506480e1a985aa7dadd2"}, - {file = "pulsar_client-3.2.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:0abe54d84db76435a6cd88ce27610352cabc7efae9fa3e7f874e032ec2ca0b3f"}, - {file = "pulsar_client-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9a1b6a806eb4819d8cbab1c4ae44ebf2110a94204a46c365f5757e1455252f2"}, - {file = "pulsar_client-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34ea2a6b75ae0e303d522e5b57c75a4ff03dc18b9bfc14151fb14dfaf5866f17"}, - {file = "pulsar_client-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:be6d3a9b2e1db3b6d1a7db5e13f7b4ed420674cf072cdb520fb004c4cd54c0af"}, - {file = "pulsar_client-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b6b733e6239ffb505f7084df0175baf9d0215f14d0a02e9bbd1fdf71a2d6ea17"}, - {file = "pulsar_client-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:edc2135d02b4793efb086edca0ffaa6e8ac9133961c2cdc17ae487e0a53da481"}, + {file = "pulsar_client-3.3.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:c31afd3e67a044ff93177df89e08febf214cc965e95ede097d9fe8755af00e01"}, + {file = "pulsar_client-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f66982284571674b215324cc26b5c2f7c56c7043113c47a7084cb70d67a8afb"}, + {file = "pulsar_client-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fe50a06f81c48a75a9b95c27a6446260039adca71d9face273740de96b2efca"}, + {file = "pulsar_client-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d4c46a4b96a6e9919cfe220156d69a2ede8053d9ea1add4ada108abcf2ba9775"}, + {file = "pulsar_client-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1e4b5d44b992c9b036286b483f3588c10b89c6047fb59d80c7474445997f4e10"}, + {file = "pulsar_client-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:497a59ac6b650835a3b2c502f53477e5c98e5226998ca3f17c0b0a3eb4d67d08"}, + {file = "pulsar_client-3.3.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:386e78ff52058d881780bae1f6e84ac9434ae0b01a8581755ca8cc0dc844a332"}, + {file = "pulsar_client-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e4ecb780df58bcfd3918590bd3ff31ed79bccfbef3a1a60370642eb1e14a9d2"}, + {file = "pulsar_client-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ce1e215c252f22a6f26ca5e9076826041a04d88dc213b92c86b524be2774a64"}, + {file = "pulsar_client-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:88b0fd5be73a4103986b9dbe3a66468cf8829371e34af87ff8f216e3980f4cbe"}, + {file = "pulsar_client-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33656450536d83eed1563ff09692c2c415fb199d88e9ed97d701ca446a119e1b"}, + {file = "pulsar_client-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:ce33de700b06583df8777e139d68cb4b4b3d0a2eac168d74278d8935f357fb10"}, + {file = "pulsar_client-3.3.0-cp37-cp37m-macosx_10_15_universal2.whl", hash = "sha256:7b5dd25cf778d6c980d36c53081e843ea272afe7af4f0ad6394ae9513f94641b"}, + {file = "pulsar_client-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33c4e6865fda62a2e460f823dce4d49ac2973a4459b8ff99eda5fdd6aaaebf46"}, + {file = "pulsar_client-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1810ddc623c8de2675d17405ce47057a9a2b92298e708ce4d9564847f5ad904"}, + {file = "pulsar_client-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8259c3b856eb6deaa1f93dce893ab18d99d36d102da5612c8e97a4fb41b70ab1"}, + {file = "pulsar_client-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5e7a48b2e505cde758fd51a601b5da0671fa98c9baee38362aaaa3ab2b930c28"}, + {file = "pulsar_client-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ede264385d47257b2f2b08ecde9181ec5338bea5639cc543d1856f01736778d2"}, + {file = "pulsar_client-3.3.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:0f64c62746ccd5b65a0c505f5f40b9af1f147eb1fa2d8f9c90cd5c8b92dd8597"}, + {file = "pulsar_client-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b84a20c9012e3c4ef1b7085acd7467197118c090b378dec27d773fb79d91556"}, + {file = "pulsar_client-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4e15fa696e275ccb66d0791fdc19c4dea0420d81349c8055e485b134125e14f"}, + {file = "pulsar_client-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:72cbb1bdcba2dd1265296b5ba65331622ee89c16db75edaad46dd7b90c6dd447"}, + {file = "pulsar_client-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d54dd12955bf587dd46d9184444af5e853d9da2a14bbfb739ed2c7c3b78ce280"}, + {file = "pulsar_client-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:43f98afdf0334b2b957a4d96f97a1fe8a7f7fd1e2631d40c3f00b4162f396485"}, + {file = "pulsar_client-3.3.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:efe7c1e6a96daccc522c3567b6847ffa54c13e0f510d9a427b4aeff9fbebe54b"}, + {file = "pulsar_client-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f28e94420090fceeb38e23fc744f3edf8710e48314ef5927d2b674a1d1e43ee0"}, + {file = "pulsar_client-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42c8f3eaa98e2351805ecb6efb6d5fedf47a314a3ce6af0e05ea1449ea7244ed"}, + {file = "pulsar_client-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5e69750f8ae57e55fddf97b459ce0d8b38b2bb85f464a71e871ee6a86d893be7"}, + {file = "pulsar_client-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7e147e5ba460c1818bc05254279a885b4e552bcafb8961d40e31f98d5ff46628"}, + {file = "pulsar_client-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:694530af1d6c75fb81456fb509778c1868adee31e997ddece6e21678200182ea"}, ] [package.dependencies] @@ -4980,85 +5152,92 @@ plugins = ["importlib-metadata"] [[package]] name = "pymongo" -version = "4.4.1" +version = "4.5.0" description = "Python driver for MongoDB " optional = false python-versions = ">=3.7" files = [ - {file = "pymongo-4.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bbdd6c719cc2ea440d7245ba71ecdda507275071753c6ffe9c8232647246f575"}, - {file = "pymongo-4.4.1-cp310-cp310-manylinux1_i686.whl", hash = "sha256:a438508dd8007a4a724601c3790db46fe0edc3d7d172acafc5f148ceb4a07815"}, - {file = "pymongo-4.4.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:3a350d03959f9d5b7f2ea0621f5bb2eb3927b8fc1c4031d12cfd3949839d4f66"}, - {file = "pymongo-4.4.1-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:e6d5d2c97c35f83dc65ccd5d64c7ed16eba6d9403e3744e847aee648c432f0bb"}, - {file = "pymongo-4.4.1-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:1944b16ffef3573ae064196460de43eb1c865a64fed23551b5eac1951d80acca"}, - {file = "pymongo-4.4.1-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:912b0fdc16500125dc1837be8b13c99d6782d93d6cd099d0e090e2aca0b6d100"}, - {file = "pymongo-4.4.1-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:d1b1c8eb21de4cb5e296614e8b775d5ecf9c56b7d3c6000f4bfdb17f9e244e72"}, - {file = "pymongo-4.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3b508e0de613b906267f2c484cb5e9afd3a64680e1af23386ca8f99a29c6145"}, - {file = "pymongo-4.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f41feb8cf429799ac43ed34504839954aa7d907f8bd9ecb52ed5ff0d2ea84245"}, - {file = "pymongo-4.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1897123c4bede1af0c264a3bc389a2505bae50d85e4f211288d352928c02d017"}, - {file = "pymongo-4.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c4bcd285bf0f5272d50628e4ea3989738e3af1251b2dd7bf50da2d593f3a56"}, - {file = "pymongo-4.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:995b868ccc9df8d36cb28142363e3911846fe9f43348d942951f60cdd7f62224"}, - {file = "pymongo-4.4.1-cp310-cp310-win32.whl", hash = "sha256:a5198beca36778f19a98b56f541a0529502046bc867b352dda5b6322e1ddc4fd"}, - {file = "pymongo-4.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:a86d20210c9805a032cda14225087ec483613aff0955327c7871a3c980562c5b"}, - {file = "pymongo-4.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5a2a1da505ea78787b0382c92dc21a45d19918014394b220c4734857e9c73694"}, - {file = "pymongo-4.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35545583396684ea70a0b005034a469bf3f447732396e5b3d50bec94890b8d5c"}, - {file = "pymongo-4.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5248fdf7244a5e976279fe154d116c73f6206e0be71074ea9d9b1e73b5893dd5"}, - {file = "pymongo-4.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:44381b817eeb47a41bbfbd279594a7fb21017e0e3e15550eb0fd3758333097f3"}, - {file = "pymongo-4.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f0bd25de90b804cc95e548f55f430df2b47f242a4d7bbce486db62f3b3c981f"}, - {file = "pymongo-4.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d67f4029c57b36a0278aeae044ce382752c078c7625cef71b5e2cf3e576961f9"}, - {file = "pymongo-4.4.1-cp311-cp311-win32.whl", hash = "sha256:8082eef0d8c711c9c272906fa469965e52b44dbdb8a589b54857b1351dc2e511"}, - {file = "pymongo-4.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:980da627edc1275896d7d4670596433ec66e1f452ec244e07bbb2f91c955b581"}, - {file = "pymongo-4.4.1-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:6cf08997d3ecf9a1eabe12c35aa82a5c588f53fac054ed46fe5c16a0a20ea43d"}, - {file = "pymongo-4.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:a6750449759f0a83adc9df3a469483a8c3eef077490b76f30c03dc8f7a4b1d66"}, - {file = "pymongo-4.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:efa67f46c1678df541e8f41247d22430905f80a3296d9c914aaa793f2c9fa1db"}, - {file = "pymongo-4.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d9a5e16a32fb1000c72a8734ddd8ae291974deb5d38d40d1bdd01dbe4024eeb0"}, - {file = "pymongo-4.4.1-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:36b0b06c6e830d190215fced82872e5fd8239771063afa206f9adc09574018a3"}, - {file = "pymongo-4.4.1-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:4ec9c6d4547c93cf39787c249969f7348ef6c4d36439af10d57b5ee65f3dfbf9"}, - {file = "pymongo-4.4.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:5368801ca6b66aacc5cc013258f11899cd6a4c3bb28cec435dd67f835905e9d2"}, - {file = "pymongo-4.4.1-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:91848d555155ad4594de5e575b6452adc471bc7bc4b4d2b1f4f15a78a8af7843"}, - {file = "pymongo-4.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0f08a2dba1469252462c414b66cb416c7f7295f2c85e50f735122a251fcb131"}, - {file = "pymongo-4.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2fe4bbf2b2c91e4690b5658b0fbb98ca6e0a8fba9ececd65b4e7d2d1df3e9b01"}, - {file = "pymongo-4.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e307d67641d0e2f7e7d6ee3dad880d090dace96cc1d95c99d15bd9f545a1168"}, - {file = "pymongo-4.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d43634594f2486cc9bb604a1dc0914234878c4faf6604574a25260cb2faaa06"}, - {file = "pymongo-4.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef0e3279e72cccc3dc7be75b12b1e54cc938d7ce13f5f22bea844b9d9d5fecd4"}, - {file = "pymongo-4.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05935f5a4bbae0a99482147588351b7b17999f4a4e6e55abfb74367ac58c0634"}, - {file = "pymongo-4.4.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:854d92d2437e3496742e17342496e1f3d9efb22455501fd6010aa3658138e457"}, - {file = "pymongo-4.4.1-cp37-cp37m-win32.whl", hash = "sha256:ddffc0c6d0e92cf43dc6c47639d1ef9ab3c280db2998a33dbb9953bd864841e1"}, - {file = "pymongo-4.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2259302d8ab51cd56c3d9d5cca325977e35a0bb3a15a297ec124d2da56c214f7"}, - {file = "pymongo-4.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:262a4073d2ee0654f0314ef4d9aab1d8c13dc8dae5c102312e152c02bfa7bdb7"}, - {file = "pymongo-4.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:022c91e2a41eefbcddc844c534520a13c6f613666c37b9fb9ed039eff47bd2e4"}, - {file = "pymongo-4.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:a0d326c3ba989091026fbc4827638dc169abdbb0c0bbe593716921543f530af6"}, - {file = "pymongo-4.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:5a1e5b931bf729b2eacd720a0e40201c2d5ed0e2bada60863f19b069bb5016c4"}, - {file = "pymongo-4.4.1-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:54d0b8b6f2548e15b09232827d9ba8e03a599c9a30534f7f2c7bae79df2d1f91"}, - {file = "pymongo-4.4.1-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:e426e213ab07a73f8759ab8d69e87d05d7a60b3ecbf7673965948dcf8ebc1c9f"}, - {file = "pymongo-4.4.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:53831effe4dc0243231a944dfbd87896e42b1cf081776930de5cc74371405e3b"}, - {file = "pymongo-4.4.1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:977c34b5b0b50bd169fbca1a4dd06fbfdfd8ac47734fdc3473532c10098e16ce"}, - {file = "pymongo-4.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fab52db4d3aa3b73bcf920fb375dbea63bf0df0cb4bdb38c5a0a69e16568cc21"}, - {file = "pymongo-4.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bb935789276422d8875f051837356edfccdb886e673444d91e4941a8142bd48"}, - {file = "pymongo-4.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d45243ff4800320c842c45e01c91037e281840e8c6ed2949ed82a70f55c0e6a"}, - {file = "pymongo-4.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32d6d2b7e14bb6bc052f6cba0c1cf4d47a2b49c56ea1ed0f960a02bc9afaefb2"}, - {file = "pymongo-4.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:85b92b3828b2c923ed448f820c147ee51fa4566e35c9bf88415586eb0192ced2"}, - {file = "pymongo-4.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3f345380f6d6d6d1dc6db9fa5c8480c439ea79553b71a2cbe3030a1f20676595"}, - {file = "pymongo-4.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0dcc64747b628a96bcfc6405c42acae3762c85d8ae8c1ce18834b8151cad7486"}, - {file = "pymongo-4.4.1-cp38-cp38-win32.whl", hash = "sha256:ebe1683ec85d8bca389183d01ecf4640c797d6f22e6dac3453a6c492920d5ec3"}, - {file = "pymongo-4.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:58c492e28057838792bed67875f982ffbd3c9ceb67341cc03811859fddb8efbf"}, - {file = "pymongo-4.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:aed21b3142311ad139629c4e101b54f25447ec40d6f42c72ad5c1a6f4f851f3a"}, - {file = "pymongo-4.4.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:98764ae13de0ab80ba824ca0b84177006dec51f48dfb7c944d8fa78ab645c67f"}, - {file = "pymongo-4.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7b7127bb35f10d974ec1bd5573389e99054c558b821c9f23bb8ff94e7ae6e612"}, - {file = "pymongo-4.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:48409bac0f6a62825c306c9a124698df920afdc396132908a8e88b466925a248"}, - {file = "pymongo-4.4.1-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:55b6ebeeabe32a9d2e38eeb90f07c020cb91098b34b5fca42ff3991cb6e6e621"}, - {file = "pymongo-4.4.1-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:4e6a70c9d437b043fb07eef1796060f476359e5b7d8e23baa49f1a70379d6543"}, - {file = "pymongo-4.4.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:0bdbbcc1ef3a56347630c57eda5cd9536bdbdb82754b3108c66cbc51b5233dfb"}, - {file = "pymongo-4.4.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:04ec1c5451ad358fdbff28ddc6e8a3d1b5f62178d38cd08007a251bc3f59445a"}, - {file = "pymongo-4.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a7739bcebdbeb5648edb15af00fd38f2ab5de20851a1341d229494a638284cc"}, - {file = "pymongo-4.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02dba4ea2a6f22de4b50864d3957a0110b75d3eeb40aeab0b0ff64bcb5a063e6"}, - {file = "pymongo-4.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:884a35c0740744a48f67210692841581ab83a4608d3a031e7125022989ef65f8"}, - {file = "pymongo-4.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2aab6d1cff00d68212eca75d2260980202b14038d9298fed7d5c455fe3285c7c"}, - {file = "pymongo-4.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae1f85223193f249320f695eec4242cdcc311357f5f5064c2e72cfd18017e8ee"}, - {file = "pymongo-4.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b25d2ccdb2901655cc56c0fc978c5ddb35029c46bfd30d182d0e23fffd55b14b"}, - {file = "pymongo-4.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:334d41649f157c56a47fb289bae3b647a867c1a74f5f3a8a371fb361580bd9d3"}, - {file = "pymongo-4.4.1-cp39-cp39-win32.whl", hash = "sha256:c409e5888a94a3ff99783fffd9477128ffab8416e3f8b2c633993eecdcd5c267"}, - {file = "pymongo-4.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:3681caf37edbe05f72f0d351e4a6cb5874ec7ab5eeb99df3a277dbf110093739"}, - {file = "pymongo-4.4.1.tar.gz", hash = "sha256:a4df87dbbd03ac6372d24f2a8054b4dc33de497d5227b50ec649f436ad574284"}, + {file = "pymongo-4.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2d4fa1b01fa7e5b7bb8d312e3542e211b320eb7a4e3d8dc884327039d93cb9e0"}, + {file = "pymongo-4.5.0-cp310-cp310-manylinux1_i686.whl", hash = "sha256:dfcd2b9f510411de615ccedd47462dae80e82fdc09fe9ab0f0f32f11cf57eeb5"}, + {file = "pymongo-4.5.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:3e33064f1984db412b34d51496f4ea785a9cff621c67de58e09fb28da6468a52"}, + {file = "pymongo-4.5.0-cp310-cp310-manylinux2014_i686.whl", hash = "sha256:33faa786cc907de63f745f587e9879429b46033d7d97a7b84b37f4f8f47b9b32"}, + {file = "pymongo-4.5.0-cp310-cp310-manylinux2014_ppc64le.whl", hash = "sha256:76a262c41c1a7cbb84a3b11976578a7eb8e788c4b7bfbd15c005fb6ca88e6e50"}, + {file = "pymongo-4.5.0-cp310-cp310-manylinux2014_s390x.whl", hash = "sha256:0f4b125b46fe377984fbaecf2af40ed48b05a4b7676a2ff98999f2016d66b3ec"}, + {file = "pymongo-4.5.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:40d5f6e853ece9bfc01e9129b228df446f49316a4252bb1fbfae5c3c9dedebad"}, + {file = "pymongo-4.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:152259f0f1a60f560323aacf463a3642a65a25557683f49cfa08c8f1ecb2395a"}, + {file = "pymongo-4.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d64878d1659d2a5bdfd0f0a4d79bafe68653c573681495e424ab40d7b6d6d41"}, + {file = "pymongo-4.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1bb3a62395ffe835dbef3a1cbff48fbcce709c78bd1f52e896aee990928432b"}, + {file = "pymongo-4.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe48f50fb6348511a3268a893bfd4ab5f263f5ac220782449d03cd05964d1ae7"}, + {file = "pymongo-4.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7591a3beea6a9a4fa3080d27d193b41f631130e3ffa76b88c9ccea123f26dc59"}, + {file = "pymongo-4.5.0-cp310-cp310-win32.whl", hash = "sha256:3a7166d57dc74d679caa7743b8ecf7dc3a1235a9fd178654dddb2b2a627ae229"}, + {file = "pymongo-4.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:21b953da14549ff62ea4ae20889c71564328958cbdf880c64a92a48dda4c9c53"}, + {file = "pymongo-4.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ead4f19d0257a756b21ac2e0e85a37a7245ddec36d3b6008d5bfe416525967dc"}, + {file = "pymongo-4.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9aff6279e405dc953eeb540ab061e72c03cf38119613fce183a8e94f31be608f"}, + {file = "pymongo-4.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4c8d6aa91d3e35016847cbe8d73106e3d1c9a4e6578d38e2c346bfe8edb3ca"}, + {file = "pymongo-4.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08819da7864f9b8d4a95729b2bea5fffed08b63d3b9c15b4fea47de655766cf5"}, + {file = "pymongo-4.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a253b765b7cbc4209f1d8ee16c7287c4268d3243070bf72d7eec5aa9dfe2a2c2"}, + {file = "pymongo-4.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8027c9063579083746147cf401a7072a9fb6829678076cd3deff28bb0e0f50c8"}, + {file = "pymongo-4.5.0-cp311-cp311-win32.whl", hash = "sha256:9d2346b00af524757576cc2406414562cced1d4349c92166a0ee377a2a483a80"}, + {file = "pymongo-4.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:c3c3525ea8658ee1192cdddf5faf99b07ebe1eeaa61bf32821126df6d1b8072b"}, + {file = "pymongo-4.5.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e5a27f348909235a106a3903fc8e70f573d89b41d723a500869c6569a391cff7"}, + {file = "pymongo-4.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9a9a39b7cac81dca79fca8c2a6479ef4c7b1aab95fad7544cc0e8fd943595a2"}, + {file = "pymongo-4.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:496c9cbcb4951183d4503a9d7d2c1e3694aab1304262f831d5e1917e60386036"}, + {file = "pymongo-4.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23cc6d7eb009c688d70da186b8f362d61d5dd1a2c14a45b890bd1e91e9c451f2"}, + {file = "pymongo-4.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fff7d17d30b2cd45afd654b3fc117755c5d84506ed25fda386494e4e0a3416e1"}, + {file = "pymongo-4.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6422b6763b016f2ef2beedded0e546d6aa6ba87910f9244d86e0ac7690f75c96"}, + {file = "pymongo-4.5.0-cp312-cp312-win32.whl", hash = "sha256:77cfff95c1fafd09e940b3fdcb7b65f11442662fad611d0e69b4dd5d17a81c60"}, + {file = "pymongo-4.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:e57d859b972c75ee44ea2ef4758f12821243e99de814030f69a3decb2aa86807"}, + {file = "pymongo-4.5.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2b0176f9233a5927084c79ff80b51bd70bfd57e4f3d564f50f80238e797f0c8a"}, + {file = "pymongo-4.5.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:89b3f2da57a27913d15d2a07d58482f33d0a5b28abd20b8e643ab4d625e36257"}, + {file = "pymongo-4.5.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:5caee7bd08c3d36ec54617832b44985bd70c4cbd77c5b313de6f7fce0bb34f93"}, + {file = "pymongo-4.5.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:1d40ad09d9f5e719bc6f729cc6b17f31c0b055029719406bd31dde2f72fca7e7"}, + {file = "pymongo-4.5.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:076afa0a4a96ca9f77fec0e4a0d241200b3b3a1766f8d7be9a905ecf59a7416b"}, + {file = "pymongo-4.5.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:3fa3648e4f1e63ddfe53563ee111079ea3ab35c3b09cd25bc22dadc8269a495f"}, + {file = "pymongo-4.5.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:44ee985194c426ddf781fa784f31ffa29cb59657b2dba09250a4245431847d73"}, + {file = "pymongo-4.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b33c17d9e694b66d7e96977e9e56df19d662031483efe121a24772a44ccbbc7e"}, + {file = "pymongo-4.5.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d79ae3bb1ff041c0db56f138c88ce1dfb0209f3546d8d6e7c3f74944ecd2439"}, + {file = "pymongo-4.5.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d67225f05f6ea27c8dc57f3fa6397c96d09c42af69d46629f71e82e66d33fa4f"}, + {file = "pymongo-4.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41771b22dd2822540f79a877c391283d4e6368125999a5ec8beee1ce566f3f82"}, + {file = "pymongo-4.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a1f26bc1f5ce774d99725773901820dfdfd24e875028da4a0252a5b48dcab5c"}, + {file = "pymongo-4.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3236cf89d69679eaeb9119c840f5c7eb388a2110b57af6bb6baf01a1da387c18"}, + {file = "pymongo-4.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e1f61355c821e870fb4c17cdb318669cfbcf245a291ce5053b41140870c3e5cc"}, + {file = "pymongo-4.5.0-cp37-cp37m-win32.whl", hash = "sha256:49dce6957598975d8b8d506329d2a3a6c4aee911fa4bbcf5e52ffc6897122950"}, + {file = "pymongo-4.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f2227a08b091bd41df5aadee0a5037673f691e2aa000e1968b1ea2342afc6880"}, + {file = "pymongo-4.5.0-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:435228d3c16a375274ac8ab9c4f9aef40c5e57ddb8296e20ecec9e2461da1017"}, + {file = "pymongo-4.5.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:8e559116e4128630ad3b7e788e2e5da81cbc2344dee246af44471fa650486a70"}, + {file = "pymongo-4.5.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:840eaf30ccac122df260b6005f9dfae4ac287c498ee91e3e90c56781614ca238"}, + {file = "pymongo-4.5.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:b4fe46b58010115514b842c669a0ed9b6a342017b15905653a5b1724ab80917f"}, + {file = "pymongo-4.5.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:a8127437ebc196a6f5e8fddd746bd0903a400dc6b5ae35df672dd1ccc7170a2a"}, + {file = "pymongo-4.5.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:2988ef5e6b360b3ff1c6d55c53515499de5f48df31afd9f785d788cdacfbe2d3"}, + {file = "pymongo-4.5.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:e249190b018d63c901678053b4a43e797ca78b93fb6d17633e3567d4b3ec6107"}, + {file = "pymongo-4.5.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:1240edc1a448d4ada4bf1a0e55550b6292420915292408e59159fd8bbdaf8f63"}, + {file = "pymongo-4.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6d2a56fc2354bb6378f3634402eec788a8f3facf0b3e7d468db5f2b5a78d763"}, + {file = "pymongo-4.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a0aade2b11dc0c326ccd429ee4134d2d47459ff68d449c6d7e01e74651bd255"}, + {file = "pymongo-4.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74c0da07c04d0781490b2915e7514b1adb265ef22af039a947988c331ee7455b"}, + {file = "pymongo-4.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3754acbd7efc7f1b529039fcffc092a15e1cf045e31f22f6c9c5950c613ec4d"}, + {file = "pymongo-4.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:631492573a1bef2f74f9ac0f9d84e0ce422c251644cd81207530af4aa2ee1980"}, + {file = "pymongo-4.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e2654d1278384cff75952682d17c718ecc1ad1d6227bb0068fd826ba47d426a5"}, + {file = "pymongo-4.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:168172ef7856e20ec024fe2a746bfa895c88b32720138e6438fd765ebd2b62dd"}, + {file = "pymongo-4.5.0-cp38-cp38-win32.whl", hash = "sha256:b25f7bea162b3dbec6d33c522097ef81df7c19a9300722fa6853f5b495aecb77"}, + {file = "pymongo-4.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:b520aafc6cb148bac09ccf532f52cbd31d83acf4d3e5070d84efe3c019a1adbf"}, + {file = "pymongo-4.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8543253adfaa0b802bfa88386db1009c6ebb7d5684d093ee4edc725007553d21"}, + {file = "pymongo-4.5.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:bc5d8c3647b8ae28e4312f1492b8f29deebd31479cd3abaa989090fb1d66db83"}, + {file = "pymongo-4.5.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:505f8519c4c782a61d94a17b0da50be639ec462128fbd10ab0a34889218fdee3"}, + {file = "pymongo-4.5.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:53f2dda54d76a98b43a410498bd12f6034b2a14b6844ca08513733b2b20b7ad8"}, + {file = "pymongo-4.5.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:9c04b9560872fa9a91251030c488e0a73bce9321a70f991f830c72b3f8115d0d"}, + {file = "pymongo-4.5.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:58a63a26a1e3dc481dd3a18d6d9f8bd1d576cd1ffe0d479ba7dd38b0aeb20066"}, + {file = "pymongo-4.5.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:f076b779aa3dc179aa3ed861be063a313ed4e48ae9f6a8370a9b1295d4502111"}, + {file = "pymongo-4.5.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:1b1d7d9aabd8629a31d63cd106d56cca0e6420f38e50563278b520f385c0d86e"}, + {file = "pymongo-4.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37df8f6006286a5896d1cbc3efb8471ced42e3568d38e6cb00857277047b0d63"}, + {file = "pymongo-4.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:56320c401f544d762fc35766936178fbceb1d9261cd7b24fbfbc8fb6f67aa8a5"}, + {file = "pymongo-4.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bbd705d5f3c3d1ff2d169e418bb789ff07ab3c70d567cc6ba6b72b04b9143481"}, + {file = "pymongo-4.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80a167081c75cf66b32f30e2f1eaee9365af935a86dbd76788169911bed9b5d5"}, + {file = "pymongo-4.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c42748ccc451dfcd9cef6c5447a7ab727351fd9747ad431db5ebb18a9b78a4d"}, + {file = "pymongo-4.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf62da7a4cdec9a4b2981fcbd5e08053edffccf20e845c0b6ec1e77eb7fab61d"}, + {file = "pymongo-4.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b5bbb87fa0511bd313d9a2c90294c88db837667c2bda2ea3fa7a35b59fd93b1f"}, + {file = "pymongo-4.5.0-cp39-cp39-win32.whl", hash = "sha256:465fd5b040206f8bce7016b01d7e7f79d2fcd7c2b8e41791be9632a9df1b4999"}, + {file = "pymongo-4.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:63d8019eee119df308a075b8a7bdb06d4720bf791e2b73d5ab0e7473c115d79c"}, + {file = "pymongo-4.5.0.tar.gz", hash = "sha256:681f252e43b3ef054ca9161635f81b730f4d8cadd28b3f2b2004f5a72f853982"}, ] [package.dependencies] @@ -5066,23 +5245,12 @@ dnspython = ">=1.16.0,<3.0.0" [package.extras] aws = ["pymongo-auth-aws (<2.0.0)"] -encryption = ["pymongo-auth-aws (<2.0.0)", "pymongocrypt (>=1.6.0,<2.0.0)"] -gssapi = ["pykerberos"] -ocsp = ["certifi", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] +encryption = ["certifi", "pymongo[aws]", "pymongocrypt (>=1.6.0,<2.0.0)"] +gssapi = ["pykerberos", "winkerberos (>=0.5.0)"] +ocsp = ["certifi", "cryptography (>=2.5)", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] snappy = ["python-snappy"] zstd = ["zstandard"] -[[package]] -name = "pypandoc" -version = "1.11" -description = "Thin wrapper for pandoc." -optional = false -python-versions = ">=3.6" -files = [ - {file = "pypandoc-1.11-py3-none-any.whl", hash = "sha256:b260596934e9cfc6513056110a7c8600171d414f90558bf4407e68b209be8007"}, - {file = "pypandoc-1.11.tar.gz", hash = "sha256:7f6d68db0e57e0f6961bec2190897118c4d305fc2d31c22cd16037f22ee084a5"}, -] - [[package]] name = "pyparsing" version = "3.1.1" @@ -5099,21 +5267,21 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pypdf" -version = "3.15.1" +version = "3.16.2" description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" optional = false python-versions = ">=3.6" files = [ - {file = "pypdf-3.15.1-py3-none-any.whl", hash = "sha256:99b337af7da8046d1e2e94354846e8c56753e1cdc817ac0fbe770c1e2281902b"}, - {file = "pypdf-3.15.1.tar.gz", hash = "sha256:d0dfaf4f10dfb06ac39e1d6a9cbffd63e77621d1e89c0ef08f346fd902df7b4b"}, + {file = "pypdf-3.16.2-py3-none-any.whl", hash = "sha256:d132953be1e2af7b115fbfd445459fdfc601a845ca12379160e1b6afaa1fef2c"}, + {file = "pypdf-3.16.2.tar.gz", hash = "sha256:6e000281fd0f4cd32e6f1e75b05af0b6c0fbbd9777fa9a8e9d86e34cda65419d"}, ] [package.dependencies] -typing_extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} +typing_extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.10\""} [package.extras] crypto = ["PyCryptodome", "cryptography"] -dev = ["black", "flit", "pip-tools", "pre-commit (<2.18.0)", "pytest-cov", "pytest-socket", "wheel"] +dev = ["black", "flit", "pip-tools", "pre-commit (<2.18.0)", "pytest-cov", "pytest-socket", "pytest-timeout", "wheel"] docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"] full = ["Pillow (>=8.0.0)", "PyCryptodome", "cryptography"] image = ["Pillow (>=8.0.0)"] @@ -5144,13 +5312,13 @@ chardet = "*" [[package]] name = "pytest" -version = "7.4.0" +version = "7.4.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, - {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, + {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"}, + {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"}, ] [package.dependencies] @@ -5182,6 +5350,62 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +[[package]] +name = "pytest-mock" +version = "3.11.1" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"}, + {file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "pytest-sugar" +version = "0.9.7" +description = "pytest-sugar is a plugin for pytest that changes the default look and feel of pytest (e.g. progressbar, show tests that fail instantly)." +optional = false +python-versions = "*" +files = [ + {file = "pytest-sugar-0.9.7.tar.gz", hash = "sha256:f1e74c1abfa55f7241cf7088032b6e378566f16b938f3f08905e2cf4494edd46"}, + {file = "pytest_sugar-0.9.7-py2.py3-none-any.whl", hash = "sha256:8cb5a4e5f8bbcd834622b0235db9e50432f4cbd71fef55b467fe44e43701e062"}, +] + +[package.dependencies] +packaging = ">=21.3" +pytest = ">=6.2.0" +termcolor = ">=2.1.0" + +[package.extras] +dev = ["black", "flake8", "pre-commit"] + +[[package]] +name = "pytest-xdist" +version = "3.3.1" +description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-xdist-3.3.1.tar.gz", hash = "sha256:d5ee0520eb1b7bcca50a60a518ab7a7707992812c578198f8b44fdfac78e8c93"}, + {file = "pytest_xdist-3.3.1-py3-none-any.whl", hash = "sha256:ff9daa7793569e6a68544850fd3927cd257cc03a7ef76c95e86915355e82b5f2"}, +] + +[package.dependencies] +execnet = ">=1.1" +pytest = ">=6.2.0" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + [[package]] name = "python-dateutil" version = "2.8.2" @@ -5196,19 +5420,6 @@ files = [ [package.dependencies] six = ">=1.5" -[[package]] -name = "python-docx" -version = "0.8.11" -description = "Create and update Microsoft Word .docx files." -optional = false -python-versions = "*" -files = [ - {file = "python-docx-0.8.11.tar.gz", hash = "sha256:1105d233a0956dd8dd1e710d20b159e2d72ac3c301041b95f4d4ceb3e0ebebc4"}, -] - -[package.dependencies] -lxml = ">=2.3.2" - [[package]] name = "python-dotenv" version = "1.0.0" @@ -5286,13 +5497,13 @@ XlsxWriter = ">=0.5.7" [[package]] name = "pytz" -version = "2023.3" +version = "2023.3.post1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, - {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, + {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, + {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, ] [[package]] @@ -5474,13 +5685,13 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "qdrant-client" -version = "1.4.0" +version = "1.5.4" description = "Client library for the Qdrant vector search engine" optional = false -python-versions = ">=3.7,<3.12" +python-versions = ">=3.8,<3.12" files = [ - {file = "qdrant_client-1.4.0-py3-none-any.whl", hash = "sha256:2f9e563955b5163da98016f2ed38d9aea5058576c7c5844e9aa205d28155f56d"}, - {file = "qdrant_client-1.4.0.tar.gz", hash = "sha256:2e54f5a80eb1e7e67f4603b76365af4817af15fb3d0c0f44de4fd93afbbe5537"}, + {file = "qdrant_client-1.5.4-py3-none-any.whl", hash = "sha256:b0247886c51d755f70dc1a62545f38ba5c7d72971ab533f1264fb695c21a6d8f"}, + {file = "qdrant_client-1.5.4.tar.gz", hash = "sha256:6ffbca94f7cab23230001710b7dc04684dbc18dadf66982179a37531b4c4b178"}, ] [package.dependencies] @@ -5492,6 +5703,9 @@ portalocker = ">=2.7.0,<3.0.0" pydantic = ">=1.10.8" urllib3 = ">=1.26.14,<2.0.0" +[package.extras] +fastembed = ["fastembed (==0.0.4)"] + [[package]] name = "realtime" version = "1.0.0" @@ -5508,6 +5722,24 @@ python-dateutil = ">=2.8.1,<3.0.0" typing-extensions = ">=4.2.0,<5.0.0" websockets = ">=10.3,<11.0" +[[package]] +name = "redis" +version = "4.6.0" +description = "Python client for Redis database and key-value store" +optional = true +python-versions = ">=3.7" +files = [ + {file = "redis-4.6.0-py3-none-any.whl", hash = "sha256:e2b03db868160ee4591de3cb90d40ebb50a90dd302138775937f6a42b7ed183c"}, + {file = "redis-4.6.0.tar.gz", hash = "sha256:585dc516b9eb042a619ef0a39c3d7d55fe81bdb4df09a52c9cdde0d07bf1aa7d"}, +] + +[package.dependencies] +async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""} + +[package.extras] +hiredis = ["hiredis (>=1.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] + [[package]] name = "regex" version = "2023.8.8" @@ -5628,13 +5860,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.5.2" +version = "13.5.3" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.5.2-py3-none-any.whl", hash = "sha256:146a90b3b6b47cac4a73c12866a499e9817426423f57c5a66949c086191a8808"}, - {file = "rich-13.5.2.tar.gz", hash = "sha256:fb9d6c0a0f643c99eed3875b5377a184132ba9be4d61516a55273d3554d75a39"}, + {file = "rich-13.5.3-py3-none-any.whl", hash = "sha256:9257b468badc3d347e146a4faa268ff229039d4c2d176ab0cffb4c4fbc73d5d9"}, + {file = "rich-13.5.3.tar.gz", hash = "sha256:87b43e0543149efa1253f485cd845bb7ee54df16c9617b8a893650ab84b4acb6"}, ] [package.dependencies] @@ -5644,6 +5876,16 @@ pygments = ">=2.13.0,<3.0.0" [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] +[[package]] +name = "roundrobin" +version = "0.0.4" +description = "Collection of roundrobin utilities" +optional = false +python-versions = "*" +files = [ + {file = "roundrobin-0.0.4.tar.gz", hash = "sha256:7e9d19a5bd6123d99993fb935fa86d25c88bb2096e493885f61737ed0f5e9abd"}, +] + [[package]] name = "rsa" version = "4.9" @@ -5685,9 +5927,9 @@ files = [ ] [[package]] -name = "safetensors" -version = "0.3.2" -description = "Fast and Safe Tensor serialization" +name = "sacremoses" +version = "0.0.53" +description = "SacreMoses" optional = true python-versions = "*" files = [ @@ -5722,51 +5964,46 @@ files = [ {file = "safetensors-0.3.2.tar.gz", hash = "sha256:2dbd34554ed3b99435a0e84df077108f5334c8336b5ed9cb8b6b98f7b10da2f6"}, ] -[package.extras] -all = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "flax (>=0.6.3)", "h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "isort (>=5.5.4)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "numpy (>=1.21.6)", "paddlepaddle (>=2.4.1)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)", "tensorflow (==2.11.0)", "torch (>=1.10)"] -dev = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "flax (>=0.6.3)", "h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "isort (>=5.5.4)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "numpy (>=1.21.6)", "paddlepaddle (>=2.4.1)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)", "tensorflow (==2.11.0)", "torch (>=1.10)"] -jax = ["flax (>=0.6.3)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)"] -numpy = ["numpy (>=1.21.6)"] -paddlepaddle = ["paddlepaddle (>=2.4.1)"] -pinned-tf = ["tensorflow (==2.11.0)"] -quality = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "isort (>=5.5.4)"] -tensorflow = ["tensorflow (>=2.11.0)"] -testing = ["h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "numpy (>=1.21.6)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)"] -torch = ["torch (>=1.10)"] +[package.dependencies] +click = "*" +joblib = "*" +regex = "*" +six = "*" +tqdm = "*" [[package]] name = "scikit-learn" -version = "1.3.0" +version = "1.3.1" description = "A set of python modules for machine learning and data mining" optional = true python-versions = ">=3.8" files = [ - {file = "scikit-learn-1.3.0.tar.gz", hash = "sha256:8be549886f5eda46436b6e555b0e4873b4f10aa21c07df45c4bc1735afbccd7a"}, - {file = "scikit_learn-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:981287869e576d42c682cf7ca96af0c6ac544ed9316328fd0d9292795c742cf5"}, - {file = "scikit_learn-1.3.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:436aaaae2c916ad16631142488e4c82f4296af2404f480e031d866863425d2a2"}, - {file = "scikit_learn-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7e28d8fa47a0b30ae1bd7a079519dd852764e31708a7804da6cb6f8b36e3630"}, - {file = "scikit_learn-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae80c08834a473d08a204d966982a62e11c976228d306a2648c575e3ead12111"}, - {file = "scikit_learn-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:552fd1b6ee22900cf1780d7386a554bb96949e9a359999177cf30211e6b20df6"}, - {file = "scikit_learn-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79970a6d759eb00a62266a31e2637d07d2d28446fca8079cf9afa7c07b0427f8"}, - {file = "scikit_learn-1.3.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:850a00b559e636b23901aabbe79b73dc604b4e4248ba9e2d6e72f95063765603"}, - {file = "scikit_learn-1.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee04835fb016e8062ee9fe9074aef9b82e430504e420bff51e3e5fffe72750ca"}, - {file = "scikit_learn-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d953531f5d9f00c90c34fa3b7d7cfb43ecff4c605dac9e4255a20b114a27369"}, - {file = "scikit_learn-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:151ac2bf65ccf363664a689b8beafc9e6aae36263db114b4ca06fbbbf827444a"}, - {file = "scikit_learn-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a885a9edc9c0a341cab27ec4f8a6c58b35f3d449c9d2503a6fd23e06bbd4f6a"}, - {file = "scikit_learn-1.3.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:9877af9c6d1b15486e18a94101b742e9d0d2f343d35a634e337411ddb57783f3"}, - {file = "scikit_learn-1.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c470f53cea065ff3d588050955c492793bb50c19a92923490d18fcb637f6383a"}, - {file = "scikit_learn-1.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd6e2d7389542eae01077a1ee0318c4fec20c66c957f45c7aac0c6eb0fe3c612"}, - {file = "scikit_learn-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:3a11936adbc379a6061ea32fa03338d4ca7248d86dd507c81e13af428a5bc1db"}, - {file = "scikit_learn-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:998d38fcec96584deee1e79cd127469b3ad6fefd1ea6c2dfc54e8db367eb396b"}, - {file = "scikit_learn-1.3.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:ded35e810438a527e17623ac6deae3b360134345b7c598175ab7741720d7ffa7"}, - {file = "scikit_learn-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e8102d5036e28d08ab47166b48c8d5e5810704daecf3a476a4282d562be9a28"}, - {file = "scikit_learn-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7617164951c422747e7c32be4afa15d75ad8044f42e7d70d3e2e0429a50e6718"}, - {file = "scikit_learn-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:1d54fb9e6038284548072df22fd34777e434153f7ffac72c8596f2d6987110dd"}, + {file = "scikit-learn-1.3.1.tar.gz", hash = "sha256:1a231cced3ee3fa04756b4a7ab532dc9417acd581a330adff5f2c01ac2831fcf"}, + {file = "scikit_learn-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3153612ff8d36fa4e35ef8b897167119213698ea78f3fd130b4068e6f8d2da5a"}, + {file = "scikit_learn-1.3.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:6bb9490fdb8e7e00f1354621689187bef3cab289c9b869688f805bf724434755"}, + {file = "scikit_learn-1.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7135a03af71138669f19bc96e7d0cc8081aed4b3565cc3b131135d65fc642ba"}, + {file = "scikit_learn-1.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d8dee8c1f40eeba49a85fe378bdf70a07bb64aba1a08fda1e0f48d27edfc3e6"}, + {file = "scikit_learn-1.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:4d379f2b34096105a96bd857b88601dffe7389bd55750f6f29aaa37bc6272eb5"}, + {file = "scikit_learn-1.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14e8775eba072ab10866a7e0596bc9906873e22c4c370a651223372eb62de180"}, + {file = "scikit_learn-1.3.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:58b0c2490eff8355dc26e884487bf8edaccf2ba48d09b194fb2f3a026dd64f9d"}, + {file = "scikit_learn-1.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f66eddfda9d45dd6cadcd706b65669ce1df84b8549875691b1f403730bdef217"}, + {file = "scikit_learn-1.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6448c37741145b241eeac617028ba6ec2119e1339b1385c9720dae31367f2be"}, + {file = "scikit_learn-1.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:c413c2c850241998168bbb3bd1bb59ff03b1195a53864f0b80ab092071af6028"}, + {file = "scikit_learn-1.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:52b77cc08bd555969ec5150788ed50276f5ef83abb72e6f469c5b91a0009bbca"}, + {file = "scikit_learn-1.3.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a683394bc3f80b7c312c27f9b14ebea7766b1f0a34faf1a2e9158d80e860ec26"}, + {file = "scikit_learn-1.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15d964d9eb181c79c190d3dbc2fff7338786bf017e9039571418a1d53dab236"}, + {file = "scikit_learn-1.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ce9233cdf0cdcf0858a5849d306490bf6de71fa7603a3835124e386e62f2311"}, + {file = "scikit_learn-1.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:1ec668ce003a5b3d12d020d2cde0abd64b262ac5f098b5c84cf9657deb9996a8"}, + {file = "scikit_learn-1.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccbbedae99325628c1d1cbe3916b7ef58a1ce949672d8d39c8b190e10219fd32"}, + {file = "scikit_learn-1.3.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:845f81c7ceb4ea6bac64ab1c9f2ce8bef0a84d0f21f3bece2126adcc213dfecd"}, + {file = "scikit_learn-1.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8454d57a22d856f1fbf3091bd86f9ebd4bff89088819886dc0c72f47a6c30652"}, + {file = "scikit_learn-1.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d993fb70a1d78c9798b8f2f28705bfbfcd546b661f9e2e67aa85f81052b9c53"}, + {file = "scikit_learn-1.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:66f7bb1fec37d65f4ef85953e1df5d3c98a0f0141d394dcdaead5a6de9170347"}, ] [package.dependencies] joblib = ">=1.1.1" -numpy = ">=1.17.3" +numpy = ">=1.17.3,<2.0" scipy = ">=1.5.0" threadpoolctl = ">=2.0.0" @@ -5908,7 +6145,7 @@ files = [ [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5,<=7.1.2)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "shapely" @@ -6026,13 +6263,13 @@ files = [ [[package]] name = "soupsieve" -version = "2.4.1" +version = "2.5" description = "A modern CSS selector implementation for Beautiful Soup." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"}, - {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"}, + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, ] [[package]] @@ -6276,21 +6513,18 @@ files = [ doc = ["reno", "sphinx", "tornado (>=4.5)"] [[package]] -name = "textual" -version = "0.33.0" -description = "Modern Text User Interface framework" -optional = true -python-versions = ">=3.7,<4.0" +name = "termcolor" +version = "2.3.0" +description = "ANSI color formatting for output in terminal" +optional = false +python-versions = ">=3.7" files = [ - {file = "textual-0.33.0-py3-none-any.whl", hash = "sha256:698a093add0fd21c786232bcacde9ff427a9d5dc9ea5deca93437d9453e4ead1"}, - {file = "textual-0.33.0.tar.gz", hash = "sha256:e0a98b1d9c4458c5bb4269c65d0a7f3e926f197411242d2f8faf80183d47a728"}, + {file = "termcolor-2.3.0-py3-none-any.whl", hash = "sha256:3afb05607b89aed0ffe25202399ee0867ad4d3cb4180d98aaf8eefa6a5f7d475"}, + {file = "termcolor-2.3.0.tar.gz", hash = "sha256:b5b08f68937f138fe92f6c089b99f1e2da0ae56c52b78bf7075fd95420fd9a5a"}, ] -[package.dependencies] -importlib-metadata = ">=4.11.3" -markdown-it-py = {version = ">=2.1.0", extras = ["linkify", "plugins"]} -rich = ">=13.3.3" -typing-extensions = ">=4.4.0,<5.0.0" +[package.extras] +tests = ["pytest", "pytest-cov"] [[package]] name = "threadpoolctl" @@ -6350,69 +6584,119 @@ blobfile = ["blobfile (>=2)"] [[package]] name = "tokenizers" -version = "0.13.3" -description = "Fast and Customizable Tokenizers" +version = "0.14.0" +description = "" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "tokenizers-0.13.3-cp310-cp310-macosx_10_11_x86_64.whl", hash = "sha256:f3835c5be51de8c0a092058a4d4380cb9244fb34681fd0a295fbf0a52a5fdf33"}, - {file = "tokenizers-0.13.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4ef4c3e821730f2692489e926b184321e887f34fb8a6b80b8096b966ba663d07"}, - {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5fd1a6a25353e9aa762e2aae5a1e63883cad9f4e997c447ec39d071020459bc"}, - {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee0b1b311d65beab83d7a41c56a1e46ab732a9eed4460648e8eb0bd69fc2d059"}, - {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ef4215284df1277dadbcc5e17d4882bda19f770d02348e73523f7e7d8b8d396"}, - {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4d53976079cff8a033f778fb9adca2d9d69d009c02fa2d71a878b5f3963ed30"}, - {file = "tokenizers-0.13.3-cp310-cp310-win32.whl", hash = "sha256:1f0e3b4c2ea2cd13238ce43548959c118069db7579e5d40ec270ad77da5833ce"}, - {file = "tokenizers-0.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:89649c00d0d7211e8186f7a75dfa1db6996f65edce4b84821817eadcc2d3c79e"}, - {file = "tokenizers-0.13.3-cp311-cp311-macosx_10_11_universal2.whl", hash = "sha256:56b726e0d2bbc9243872b0144515ba684af5b8d8cd112fb83ee1365e26ec74c8"}, - {file = "tokenizers-0.13.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:cc5c022ce692e1f499d745af293ab9ee6f5d92538ed2faf73f9708c89ee59ce6"}, - {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f55c981ac44ba87c93e847c333e58c12abcbb377a0c2f2ef96e1a266e4184ff2"}, - {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f247eae99800ef821a91f47c5280e9e9afaeed9980fc444208d5aa6ba69ff148"}, - {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b3e3215d048e94f40f1c95802e45dcc37c5b05eb46280fc2ccc8cd351bff839"}, - {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ba2b0bf01777c9b9bc94b53764d6684554ce98551fec496f71bc5be3a03e98b"}, - {file = "tokenizers-0.13.3-cp311-cp311-win32.whl", hash = "sha256:cc78d77f597d1c458bf0ea7c2a64b6aa06941c7a99cb135b5969b0278824d808"}, - {file = "tokenizers-0.13.3-cp311-cp311-win_amd64.whl", hash = "sha256:ecf182bf59bd541a8876deccf0360f5ae60496fd50b58510048020751cf1724c"}, - {file = "tokenizers-0.13.3-cp37-cp37m-macosx_10_11_x86_64.whl", hash = "sha256:0527dc5436a1f6bf2c0327da3145687d3bcfbeab91fed8458920093de3901b44"}, - {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07cbb2c307627dc99b44b22ef05ff4473aa7c7cc1fec8f0a8b37d8a64b1a16d2"}, - {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4560dbdeaae5b7ee0d4e493027e3de6d53c991b5002d7ff95083c99e11dd5ac0"}, - {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64064bd0322405c9374305ab9b4c07152a1474370327499911937fd4a76d004b"}, - {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8c6e2ab0f2e3d939ca66aa1d596602105fe33b505cd2854a4c1717f704c51de"}, - {file = "tokenizers-0.13.3-cp37-cp37m-win32.whl", hash = "sha256:6cc29d410768f960db8677221e497226e545eaaea01aa3613fa0fdf2cc96cff4"}, - {file = "tokenizers-0.13.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fc2a7fdf864554a0dacf09d32e17c0caa9afe72baf9dd7ddedc61973bae352d8"}, - {file = "tokenizers-0.13.3-cp38-cp38-macosx_10_11_x86_64.whl", hash = "sha256:8791dedba834c1fc55e5f1521be325ea3dafb381964be20684b92fdac95d79b7"}, - {file = "tokenizers-0.13.3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:d607a6a13718aeb20507bdf2b96162ead5145bbbfa26788d6b833f98b31b26e1"}, - {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3791338f809cd1bf8e4fee6b540b36822434d0c6c6bc47162448deee3f77d425"}, - {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2f35f30e39e6aab8716f07790f646bdc6e4a853816cc49a95ef2a9016bf9ce6"}, - {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310204dfed5aa797128b65d63538a9837cbdd15da2a29a77d67eefa489edda26"}, - {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0f9b92ea052305166559f38498b3b0cae159caea712646648aaa272f7160963"}, - {file = "tokenizers-0.13.3-cp38-cp38-win32.whl", hash = "sha256:9a3fa134896c3c1f0da6e762d15141fbff30d094067c8f1157b9fdca593b5806"}, - {file = "tokenizers-0.13.3-cp38-cp38-win_amd64.whl", hash = "sha256:8e7b0cdeace87fa9e760e6a605e0ae8fc14b7d72e9fc19c578116f7287bb873d"}, - {file = "tokenizers-0.13.3-cp39-cp39-macosx_10_11_x86_64.whl", hash = "sha256:00cee1e0859d55507e693a48fa4aef07060c4bb6bd93d80120e18fea9371c66d"}, - {file = "tokenizers-0.13.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:a23ff602d0797cea1d0506ce69b27523b07e70f6dda982ab8cf82402de839088"}, - {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70ce07445050b537d2696022dafb115307abdffd2a5c106f029490f84501ef97"}, - {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:280ffe95f50eaaf655b3a1dc7ff1d9cf4777029dbbc3e63a74e65a056594abc3"}, - {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97acfcec592f7e9de8cadcdcda50a7134423ac8455c0166b28c9ff04d227b371"}, - {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd7730c98a3010cd4f523465867ff95cd9d6430db46676ce79358f65ae39797b"}, - {file = "tokenizers-0.13.3-cp39-cp39-win32.whl", hash = "sha256:48625a108029cb1ddf42e17a81b5a3230ba6888a70c9dc14e81bc319e812652d"}, - {file = "tokenizers-0.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:bc0a6f1ba036e482db6453571c9e3e60ecd5489980ffd95d11dc9f960483d783"}, - {file = "tokenizers-0.13.3.tar.gz", hash = "sha256:2e546dbb68b623008a5442353137fbb0123d311a6d7ba52f2667c8862a75af2e"}, + {file = "tokenizers-0.14.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:1a90e1030d9c61de64045206c62721a36f892dcfc5bbbc119dfcd417c1ca60ca"}, + {file = "tokenizers-0.14.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7cacc5a33767bb2a03b6090eac556c301a1d961ac2949be13977bc3f20cc4e3c"}, + {file = "tokenizers-0.14.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:81994795e1b4f868a6e73107af8cdf088d31357bae6f7abf26c42874eab16f43"}, + {file = "tokenizers-0.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ec53f832bfa91abafecbf92b4259b466fb31438ab31e8291ade0fcf07de8fc2"}, + {file = "tokenizers-0.14.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:854aa813a55d6031a6399b1bca09e4e7a79a80ec05faeea77fc6809d59deb3d5"}, + {file = "tokenizers-0.14.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c34d2f02e25e0fa96e574cadb43a6f14bdefc77f84950991da6e3732489e164"}, + {file = "tokenizers-0.14.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f17d5ad725c827d3dc7db2bbe58093a33db2de49bbb639556a6d88d82f0ca19"}, + {file = "tokenizers-0.14.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:337a7b7d6b32c6f904faee4304987cb018d1488c88b91aa635760999f5631013"}, + {file = "tokenizers-0.14.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:98a7ceb767e1079ef2c99f52a4e7b816f2e682b2b6fef02c8eff5000536e54e1"}, + {file = "tokenizers-0.14.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:25ad4a0f883a311a5b021ed979e21559cb4184242c7446cd36e07d046d1ed4be"}, + {file = "tokenizers-0.14.0-cp310-none-win32.whl", hash = "sha256:360706b0c2c6ba10e5e26b7eeb7aef106dbfc0a81ad5ad599a892449b4973b10"}, + {file = "tokenizers-0.14.0-cp310-none-win_amd64.whl", hash = "sha256:1c2ce437982717a5e221efa3c546e636f12f325cc3d9d407c91d2905c56593d0"}, + {file = "tokenizers-0.14.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:612d0ba4f40f4d41163af9613dac59c902d017dc4166ea4537a476af807d41c3"}, + {file = "tokenizers-0.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3013ad0cff561d9be9ce2cc92b76aa746b4e974f20e5b4158c03860a4c8ffe0f"}, + {file = "tokenizers-0.14.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c89a0d6d2ec393a6261df71063b1e22bdd7c6ef3d77b8826541b596132bcf524"}, + {file = "tokenizers-0.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5514417f37fc2ca8159b27853cd992a9a4982e6c51f04bd3ac3f65f68a8fa781"}, + {file = "tokenizers-0.14.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8e761fd1af8409c607b11f084dc7cc50f80f08bd426d4f01d1c353b097d2640f"}, + {file = "tokenizers-0.14.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c16fbcd5ef10df9e51cc84238cdb05ee37e4228aaff39c01aa12b0a0409e29b8"}, + {file = "tokenizers-0.14.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3439d9f858dd9033b69769be5a56eb4fb79fde13fad14fab01edbf2b98033ad9"}, + {file = "tokenizers-0.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c19f8cdc3e84090464a6e28757f60461388cc8cd41c02c109e180a6b7c571f6"}, + {file = "tokenizers-0.14.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:df763ce657a297eb73008d5907243a7558a45ae0930b38ebcb575a24f8296520"}, + {file = "tokenizers-0.14.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:095b0b6683a9b76002aa94659f75c09e4359cb291b318d6e77a60965d7a7f138"}, + {file = "tokenizers-0.14.0-cp311-none-win32.whl", hash = "sha256:712ec0e68a399ded8e115e7e25e7017802fa25ee6c36b4eaad88481e50d0c638"}, + {file = "tokenizers-0.14.0-cp311-none-win_amd64.whl", hash = "sha256:917aa6d6615b33d9aa811dcdfb3109e28ff242fbe2cb89ea0b7d3613e444a672"}, + {file = "tokenizers-0.14.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:8464ee7d43ecd9dd1723f51652f49b979052ea3bcd25329e3df44e950c8444d1"}, + {file = "tokenizers-0.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:84c2b96469b34825557c6fe0bc3154c98d15be58c416a9036ca90afdc9979229"}, + {file = "tokenizers-0.14.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:24b3ccec65ee6f876cd67251c1dcfa1c318c9beec5a438b134f7e33b667a8b36"}, + {file = "tokenizers-0.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde333fc56dd5fbbdf2de3067d6c0c129867d33eac81d0ba9b65752ad6ef4208"}, + {file = "tokenizers-0.14.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ddcc2f251bd8a2b2f9a7763ad4468a34cfc4ee3b0fba3cfb34d12c964950cac"}, + {file = "tokenizers-0.14.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10a34eb1416dcec3c6f9afea459acd18fcc93234687de605a768a987eda589ab"}, + {file = "tokenizers-0.14.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:56bc7252530a6a20c6eed19b029914bb9cc781efbe943ca9530856051de99d0f"}, + {file = "tokenizers-0.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07f5c2324326a00c85111081d5eae4da9d64d56abb5883389b3c98bee0b50a7c"}, + {file = "tokenizers-0.14.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5efd92e44e43f36332b5f3653743dca5a0b72cdabb012f20023e220f01f675cb"}, + {file = "tokenizers-0.14.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9223bcb77a826dbc9fd0efa6bce679a96b1a01005142778bb42ce967581c5951"}, + {file = "tokenizers-0.14.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:e2c1b4707344d3fbfce35d76802c2429ca54e30a5ecb05b3502c1e546039a3bb"}, + {file = "tokenizers-0.14.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:5892ba10fe0a477bde80b9f06bce05cb9d83c15a4676dcae5cbe6510f4524bfc"}, + {file = "tokenizers-0.14.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0e1818f33ac901d5d63830cb6a69a707819f4d958ae5ecb955d8a5ad823a2e44"}, + {file = "tokenizers-0.14.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d06a6fe406df1e616f9e649522683411c6c345ddaaaad7e50bbb60a2cb27e04d"}, + {file = "tokenizers-0.14.0-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b6e2d4bc223dc6a99efbe9266242f1ac03eb0bef0104e6cef9f9512dd5c816b"}, + {file = "tokenizers-0.14.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08ea1f612796e438c9a7e2ad86ab3c1c05c8fe0fad32fcab152c69a3a1a90a86"}, + {file = "tokenizers-0.14.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6ab1a58c05a3bd8ece95eb5d1bc909b3fb11acbd3ff514e3cbd1669e3ed28f5b"}, + {file = "tokenizers-0.14.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:495dc7d3b78815de79dafe7abce048a76154dadb0ffc7f09b7247738557e5cef"}, + {file = "tokenizers-0.14.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aaa0401a245d891b3b2ba9cf027dc65ca07627e11fe3ce597644add7d07064f8"}, + {file = "tokenizers-0.14.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ae4fa13a786fd0d6549da241c6a1077f9b6320a7120d922ccc201ad1d4feea8f"}, + {file = "tokenizers-0.14.0-cp37-none-win32.whl", hash = "sha256:ae0d5b5ab6032c24a2e74cc15f65b6510070926671129e922aa3826c834558d7"}, + {file = "tokenizers-0.14.0-cp37-none-win_amd64.whl", hash = "sha256:2839369a9eb948905612f5d8e70453267d9c7bf17573e5ab49c2f28368fd635d"}, + {file = "tokenizers-0.14.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:f483af09a07fcb8b8b4cd07ac1be9f58bb739704ef9156e955531299ab17ec75"}, + {file = "tokenizers-0.14.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9c2ec661d0d63e618cb145ad15ddb6a81e16d9deb7a203f385d78141da028984"}, + {file = "tokenizers-0.14.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:97e87eb7cbeff63c3b1aa770fdcf18ea4f1c852bfb75d0c913e71b8924a99d61"}, + {file = "tokenizers-0.14.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98c4bd09b47f77f41785488971543de63db82608f0dc0bc6646c876b5ca44d1f"}, + {file = "tokenizers-0.14.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0cbeb5406be31f7605d032bb261f2e728da8ac1f4f196c003bc640279ceb0f52"}, + {file = "tokenizers-0.14.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fe799fa48fd7dd549a68abb7bee32dd3721f50210ad2e3e55058080158c72c25"}, + {file = "tokenizers-0.14.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:66daf7c6375a95970e86cb3febc48becfeec4e38b2e0195218d348d3bb86593b"}, + {file = "tokenizers-0.14.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b177422af79a77c46bb8f56d73827e688fdc092878cff54e24f5c07a908db"}, + {file = "tokenizers-0.14.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a9aef7a5622648b70f979e96cbc2f795eba5b28987dd62f4dbf8f1eac6d64a1a"}, + {file = "tokenizers-0.14.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:397a24feff284d39b40fdd61c1c828bb6648dfe97b6766c84fbaf7256e272d09"}, + {file = "tokenizers-0.14.0-cp38-none-win32.whl", hash = "sha256:93cc2ec19b6ff6149b2e5127ceda3117cc187dd38556a1ed93baba13dffda069"}, + {file = "tokenizers-0.14.0-cp38-none-win_amd64.whl", hash = "sha256:bf7f540ab8a6fc53fb762963edb7539b11f00af8f70b206f0a6d1a25109ad307"}, + {file = "tokenizers-0.14.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a58d0b34586f4c5229de5aa124cf76b9455f2e01dc5bd6ed018f6e3bb12572d3"}, + {file = "tokenizers-0.14.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:90ceca6a06bb4b0048d0a51d0d47ef250d3cb37cc36b6b43334be8c02ac18b0f"}, + {file = "tokenizers-0.14.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5f6c9554bda64799b1d65052d834553bff9a6ef4a6c2114668e2ed8f1871a2a3"}, + {file = "tokenizers-0.14.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ee14b41024bc05ea172fc2c87f66b60d7c5c636c3a52a09a25ec18e752e6dc7"}, + {file = "tokenizers-0.14.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:879201b1c76b24dc70ce02fc42c3eeb7ff20c353ce0ee638be6449f7c80e73ba"}, + {file = "tokenizers-0.14.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ca79ea6ddde5bb32f7ad1c51de1032829c531e76bbcae58fb3ed105a31faf021"}, + {file = "tokenizers-0.14.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd5934048e60aedddf6c5b076d44ccb388702e1650e2eb7b325a1682d883fbf9"}, + {file = "tokenizers-0.14.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a1566cabd4bf8f09d6c1fa7a3380a181801a495e7218289dbbd0929de471711"}, + {file = "tokenizers-0.14.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a8fc72a7adc6fa12db38100c403d659bc01fbf6e57f2cc9219e75c4eb0ea313c"}, + {file = "tokenizers-0.14.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7fd08ed6c14aa285482d9e5f48c04de52bdbcecaca0d30465d7a36bbea6b14df"}, + {file = "tokenizers-0.14.0-cp39-none-win32.whl", hash = "sha256:3279c0c1d5fdea7d3499c582fed392fb0463d1046544ca010f53aeee5d2ce12c"}, + {file = "tokenizers-0.14.0-cp39-none-win_amd64.whl", hash = "sha256:203ca081d25eb6e4bc72ea04d552e457079c5c6a3713715ece246f6ca02ca8d0"}, + {file = "tokenizers-0.14.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:b45704d5175499387e33a1dd5c8d49ab4d7ef3c36a9ba8a410bb3e68d10f80a0"}, + {file = "tokenizers-0.14.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6d17d5eb38ccc2f615a7a3692dfa285abe22a1e6d73bbfd753599e34ceee511c"}, + {file = "tokenizers-0.14.0-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4a7e6e7989ba77a20c33f7a8a45e0f5b3e7530b2deddad2c3b2a58b323156134"}, + {file = "tokenizers-0.14.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81876cefea043963abf6c92e0cf73ce6ee10bdc43245b6565ce82c0305c2e613"}, + {file = "tokenizers-0.14.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d8cd05f73d1ce875a23bfdb3a572417c0f46927c6070ca43a7f6f044c3d6605"}, + {file = "tokenizers-0.14.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:419a38b89be0081d872eac09449c03cd6589c2ee47461184592ee4b1ad93af1d"}, + {file = "tokenizers-0.14.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4caf274a9ba944eb83bc695beef95abe24ce112907fb06217875894d8a4f62b8"}, + {file = "tokenizers-0.14.0-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:6ecb3a7741d7ebf65db93d246b102efca112860707e07233f1b88703cb01dbc5"}, + {file = "tokenizers-0.14.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cb7fe9a383cb2932848e459d0277a681d58ad31aa6ccda204468a8d130a9105c"}, + {file = "tokenizers-0.14.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4731e0577780d85788ab4f00d54e16e76fe305739396e6fb4c54b89e6fa12de"}, + {file = "tokenizers-0.14.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9900291ccd19417128e328a26672390365dab1d230cd00ee7a5e2a0319e2716"}, + {file = "tokenizers-0.14.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:493e6932fbca6875fd2e51958f1108ce4c5ae41aa6f2b8017c5f07beaff0a1ac"}, + {file = "tokenizers-0.14.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1792e6b46b89aba0d501c0497f38c96e5b54735379fd8a07a28f45736ba51bb1"}, + {file = "tokenizers-0.14.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0af26d37c7080688ef606679f3a3d44b63b881de9fa00cc45adc240ba443fd85"}, + {file = "tokenizers-0.14.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:99379ec4d7023c07baed85c68983bfad35fd210dfbc256eaafeb842df7f888e3"}, + {file = "tokenizers-0.14.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:84118aa60dcbb2686730342a0cb37e54e02fde001f936557223d46b6cd8112cd"}, + {file = "tokenizers-0.14.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d616e1859ffcc8fcda60f556c34338b96fb72ca642f6dafc3b1d2aa1812fb4dd"}, + {file = "tokenizers-0.14.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7826b79bbbffc2150bf8d621297cc600d8a1ea53992547c4fd39630de10466b4"}, + {file = "tokenizers-0.14.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eb3931d734f1e66b77c2a8e22ebe0c196f127c7a0f48bf9601720a6f85917926"}, + {file = "tokenizers-0.14.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:6a475b5cafc7a740bf33d00334b1f2b434b6124198384d8b511931a891be39ff"}, + {file = "tokenizers-0.14.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:3d3c9e286ae00b0308903d2ef7b31efc84358109aa41abaa27bd715401c3fef4"}, + {file = "tokenizers-0.14.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:27244e96810434cf705f317e9b74a1163cd2be20bdbd3ed6b96dae1914a6778c"}, + {file = "tokenizers-0.14.0-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ca9b0536fd5f03f62427230e85d9d57f9eed644ab74c319ae4877c9144356aed"}, + {file = "tokenizers-0.14.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f64cdff8c0454295b739d77e25cff7264fa9822296395e60cbfecc7f66d88fb"}, + {file = "tokenizers-0.14.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a00cdfb40544656b7a3b176049d63227d5e53cf2574912514ebb4b9da976aaa1"}, + {file = "tokenizers-0.14.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:b611d96b96957cb2f39560c77cc35d2fcb28c13d5b7d741412e0edfdb6f670a8"}, + {file = "tokenizers-0.14.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:27ad1c02fdd74dcf3502fafb87393412e65f698f2e3aba4ad568a1f3b43d5c9f"}, + {file = "tokenizers-0.14.0.tar.gz", hash = "sha256:a06efa1f19dcc0e9bd0f4ffbf963cb0217af92a9694f68fe7eee5e1c6ddc4bde"}, ] +[package.dependencies] +huggingface_hub = ">=0.16.4,<0.17" + [package.extras] -dev = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] -docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] +dev = ["tokenizers[testing]"] +docs = ["setuptools_rust", "sphinx", "sphinx_rtd_theme"] testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"] -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -optional = true -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] - [[package]] name = "tomli" version = "2.0.1" @@ -6543,87 +6827,80 @@ telegram = ["requests"] [[package]] name = "traitlets" -version = "5.9.0" +version = "5.10.1" description = "Traitlets Python configuration system" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "traitlets-5.9.0-py3-none-any.whl", hash = "sha256:9e6ec080259b9a5940c797d58b613b5e31441c2257b87c2e795c5228ae80d2d8"}, - {file = "traitlets-5.9.0.tar.gz", hash = "sha256:f6cde21a9c68cf756af02035f72d5a723bf607e862e7be33ece505abf4a3bad9"}, + {file = "traitlets-5.10.1-py3-none-any.whl", hash = "sha256:07ab9c5bf8a0499fd7b088ba51be899c90ffc936ffc797d7b6907fc516bcd116"}, + {file = "traitlets-5.10.1.tar.gz", hash = "sha256:db9c4aa58139c3ba850101913915c042bdba86f7c8a0dda1c6f7f92c5da8e542"}, ] [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.5.1)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] [[package]] name = "transformers" -version = "4.31.0" -description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow" +version = "4.17.0" +description = "State-of-the-art Natural Language Processing for TensorFlow 2.0 and PyTorch" optional = true -python-versions = ">=3.8.0" +python-versions = ">=3.6.0" files = [ - {file = "transformers-4.31.0-py3-none-any.whl", hash = "sha256:8487aab0195ce1c2a5ae189305118b9720daddbc7b688edb09ccd79e3b149f6b"}, - {file = "transformers-4.31.0.tar.gz", hash = "sha256:4302fba920a1c24d3a429a29efff6a63eac03f3f3cf55b55927fc795d01cb273"}, + {file = "transformers-4.17.0-py3-none-any.whl", hash = "sha256:5c7d1955693ebf4a69a0fa700b2ef730232d5d7c1528e15d44c1d473b38f57b8"}, + {file = "transformers-4.17.0.tar.gz", hash = "sha256:986fd59255460555b893a2b1827b9b8dd4e5cd6343e4409d18539208f69fb51b"}, ] [package.dependencies] filelock = "*" -huggingface-hub = ">=0.14.1,<1.0" +huggingface-hub = ">=0.1.0,<1.0" numpy = ">=1.17" packaging = ">=20.0" pyyaml = ">=5.1" regex = "!=2019.12.17" requests = "*" -safetensors = ">=0.3.1" -tokenizers = ">=0.11.1,<0.11.3 || >0.11.3,<0.14" +sacremoses = "*" +tokenizers = ">=0.11.1,<0.11.3 || >0.11.3" tqdm = ">=4.27" [package.extras] -accelerate = ["accelerate (>=0.20.3)"] -agents = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch (>=1.9,!=1.12.0)"] -all = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.2.8,!=0.3.2,<=0.4.13)", "jaxlib (>=0.1.65,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision"] -audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +all = ["Pillow", "codecarbon (==1.2.0)", "flax (>=0.3.5)", "jax (>=0.2.8)", "jaxlib (>=0.1.65)", "librosa", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.3.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.3)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3)", "torch (>=1.0)", "torchaudio"] +audio = ["librosa", "phonemizer", "pyctcdecode (>=0.3.0)"] codecarbon = ["codecarbon (==1.2.0)"] -deepspeed = ["accelerate (>=0.20.3)", "deepspeed (>=0.9.3)"] -deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.20.3)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "timeout-decorator"] -dev = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.2.8,!=0.3.2,<=0.4.13)", "jaxlib (>=0.1.65,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorflow (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "urllib3 (<2.0.0)"] -dev-torch = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "accelerate (>=0.20.3)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"] -docs = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "hf-doc-builder", "jax (>=0.2.8,!=0.3.2,<=0.4.13)", "jaxlib (>=0.1.65,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision"] -docs-specific = ["hf-doc-builder"] +deepspeed = ["deepspeed (>=0.5.9)"] +dev = ["GitPython (<3.1.19)", "Pillow", "black (>=22.0,<23.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.2)", "datasets", "faiss-cpu", "flake8 (>=3.8.3)", "flax (>=0.3.5)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.2.8)", "jaxlib (>=0.1.65)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.3.0)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rouge-score", "sacrebleu (>=1.4.12,<2.0.0)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.3)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3)", "torch (>=1.0)", "torchaudio", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] +dev-tensorflow = ["GitPython (<3.1.19)", "Pillow", "black (>=22.0,<23.0)", "cookiecutter (==1.7.2)", "datasets", "faiss-cpu", "flake8 (>=3.8.3)", "isort (>=5.5.4)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.3.0)", "pytest", "pytest-timeout", "pytest-xdist", "rouge-score", "sacrebleu (>=1.4.12,<2.0.0)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.3)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3)"] +dev-torch = ["GitPython (<3.1.19)", "Pillow", "black (>=22.0,<23.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.2)", "datasets", "faiss-cpu", "flake8 (>=3.8.3)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.3.0)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rouge-score", "sacrebleu (>=1.4.12,<2.0.0)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3)", "torch (>=1.0)", "torchaudio", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] +docs = ["Pillow", "codecarbon (==1.2.0)", "flax (>=0.3.5)", "jax (>=0.2.8)", "jaxlib (>=0.1.65)", "librosa", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.3.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.3)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3)", "torch (>=1.0)", "torchaudio"] fairscale = ["fairscale (>0.3)"] -flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.2.8,!=0.3.2,<=0.4.13)", "jaxlib (>=0.1.65,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)"] -flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +flax = ["flax (>=0.3.5)", "jax (>=0.2.8)", "jaxlib (>=0.1.65)", "optax (>=0.0.8)"] +flax-speech = ["librosa", "phonemizer", "pyctcdecode (>=0.3.0)"] ftfy = ["ftfy"] integrations = ["optuna", "ray[tune]", "sigopt"] -ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0,<1.3.1)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] -modelcreation = ["cookiecutter (==1.7.3)"] -natten = ["natten (>=0.14.6)"] +ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] +modelcreation = ["cookiecutter (==1.7.2)"] onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] optuna = ["optuna"] -quality = ["GitPython (<3.1.19)", "black (>=23.1,<24.0)", "datasets (!=2.5.0)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "ruff (>=0.0.241,<=0.0.259)", "urllib3 (<2.0.0)"] +quality = ["GitPython (<3.1.19)", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.5.4)"] ray = ["ray[tune]"] -retrieval = ["datasets (!=2.5.0)", "faiss-cpu"] +retrieval = ["datasets", "faiss-cpu"] sagemaker = ["sagemaker (>=2.31.0)"] sentencepiece = ["protobuf", "sentencepiece (>=0.1.91,!=0.1.92)"] -serving = ["fastapi", "pydantic (<2)", "starlette", "uvicorn"] +serving = ["fastapi", "pydantic", "starlette", "uvicorn"] sigopt = ["sigopt"] sklearn = ["scikit-learn"] -speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -testing = ["GitPython (<3.1.19)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf", "psutil", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "timeout-decorator"] -tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx"] -tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx"] -tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] +speech = ["librosa", "phonemizer", "pyctcdecode (>=0.3.0)", "torchaudio"] +testing = ["GitPython (<3.1.19)", "black (>=22.0,<23.0)", "cookiecutter (==1.7.2)", "datasets", "faiss-cpu", "nltk", "parameterized", "psutil", "pytest", "pytest-timeout", "pytest-xdist", "rouge-score", "sacrebleu (>=1.4.12,<2.0.0)", "timeout-decorator"] +tf = ["onnxconverter-common", "tensorflow (>=2.3)", "tf2onnx"] +tf-cpu = ["onnxconverter-common", "tensorflow-cpu (>=2.3)", "tf2onnx"] +tf-speech = ["librosa", "phonemizer", "pyctcdecode (>=0.3.0)"] timm = ["timm"] -tokenizers = ["tokenizers (>=0.11.1,!=0.11.3,<0.14)"] -torch = ["accelerate (>=0.20.3)", "torch (>=1.9,!=1.12.0)"] -torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"] -torch-vision = ["Pillow (<10.0.0)", "torchvision"] -torchhub = ["filelock", "huggingface-hub (>=0.14.1,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "tqdm (>=4.27)"] -video = ["av (==9.2.0)", "decord (==0.6.0)"] -vision = ["Pillow (<10.0.0)"] +tokenizers = ["tokenizers (>=0.11.1,!=0.11.3)"] +torch = ["torch (>=1.0)"] +torch-speech = ["librosa", "phonemizer", "pyctcdecode (>=0.3.0)", "torchaudio"] +torchhub = ["filelock", "huggingface-hub (>=0.1.0,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3)", "torch (>=1.0)", "tqdm (>=4.27)"] +vision = ["Pillow"] [[package]] name = "typer" @@ -6668,6 +6945,17 @@ files = [ {file = "types_cachetools-5.3.0.6-py3-none-any.whl", hash = "sha256:f7f8a25bfe306f2e6bc2ad0a2f949d9e72f2d91036d509c36d3810bf728bc6e1"}, ] +[[package]] +name = "types-google-cloud-ndb" +version = "2.2.0.1" +description = "Typing stubs for google-cloud-ndb" +optional = false +python-versions = "*" +files = [ + {file = "types-google-cloud-ndb-2.2.0.1.tar.gz", hash = "sha256:3dd5f7437d3f4192a32b69e9b21da582d897303680f2eaac5d070749fd866586"}, + {file = "types_google_cloud_ndb-2.2.0.1-py3-none-any.whl", hash = "sha256:88b4800f4c6421a34894bd9f61aee562605d5cc151d454d4d97241fd680b1cb1"}, +] + [[package]] name = "types-passlib" version = "1.7.7.13" @@ -6701,6 +6989,20 @@ files = [ {file = "types_pyasn1-0.4.0.6-py3-none-any.whl", hash = "sha256:dd5fc818864e63a66cd714be0a7a59a493f4a81b87ee9ac978c41f1eaa9a0cef"}, ] +[[package]] +name = "types-pyopenssl" +version = "23.2.0.2" +description = "Typing stubs for pyOpenSSL" +optional = false +python-versions = "*" +files = [ + {file = "types-pyOpenSSL-23.2.0.2.tar.gz", hash = "sha256:6a010dac9ecd42b582d7dd2cc3e9e40486b79b3b64bb2fffba1474ff96af906d"}, + {file = "types_pyOpenSSL-23.2.0.2-py3-none-any.whl", hash = "sha256:19536aa3debfbe25a918cf0d898e9f5fbbe6f3594a429da7914bf331deb1b342"}, +] + +[package.dependencies] +cryptography = ">=35.0.0" + [[package]] name = "types-python-jose" version = "3.3.4.8" @@ -6717,35 +7019,61 @@ types-pyasn1 = "*" [[package]] name = "types-pytz" -version = "2023.3.0.1" +version = "2023.3.1.1" description = "Typing stubs for pytz" optional = false python-versions = "*" files = [ - {file = "types-pytz-2023.3.0.1.tar.gz", hash = "sha256:1a7b8d4aac70981cfa24478a41eadfcd96a087c986d6f150d77e3ceb3c2bdfab"}, - {file = "types_pytz-2023.3.0.1-py3-none-any.whl", hash = "sha256:65152e872137926bb67a8fe6cc9cfd794365df86650c5d5fdc7b167b0f38892e"}, + {file = "types-pytz-2023.3.1.1.tar.gz", hash = "sha256:cc23d0192cd49c8f6bba44ee0c81e4586a8f30204970fc0894d209a6b08dab9a"}, + {file = "types_pytz-2023.3.1.1-py3-none-any.whl", hash = "sha256:1999a123a3dc0e39a2ef6d19f3f8584211de9e6a77fe7a0259f04a524e90a5cf"}, +] + +[[package]] +name = "types-pywin32" +version = "306.0.0.4" +description = "Typing stubs for pywin32" +optional = false +python-versions = "*" +files = [ + {file = "types-pywin32-306.0.0.4.tar.gz", hash = "sha256:ae4bbec80d535053236d4bebedf55f58dee89cf5883d277f0fa89e857f3ff337"}, + {file = "types_pywin32-306.0.0.4-py3-none-any.whl", hash = "sha256:f76a343ed6933008af85e158063963f923e54f2f461e697b2929b4178c7b77a1"}, ] [[package]] name = "types-pyyaml" -version = "6.0.12.11" +version = "6.0.12.12" description = "Typing stubs for PyYAML" optional = false python-versions = "*" files = [ - {file = "types-PyYAML-6.0.12.11.tar.gz", hash = "sha256:7d340b19ca28cddfdba438ee638cd4084bde213e501a3978738543e27094775b"}, - {file = "types_PyYAML-6.0.12.11-py3-none-any.whl", hash = "sha256:a461508f3096d1d5810ec5ab95d7eeecb651f3a15b71959999988942063bf01d"}, + {file = "types-PyYAML-6.0.12.12.tar.gz", hash = "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062"}, + {file = "types_PyYAML-6.0.12.12-py3-none-any.whl", hash = "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"}, ] +[[package]] +name = "types-redis" +version = "4.6.0.7" +description = "Typing stubs for redis" +optional = false +python-versions = "*" +files = [ + {file = "types-redis-4.6.0.7.tar.gz", hash = "sha256:28c4153ddb5c9d4f10def44a2454673c361d2d5fc3cd867cf3bb1520f3f59a38"}, + {file = "types_redis-4.6.0.7-py3-none-any.whl", hash = "sha256:05b1bf92879b25df20433fa1af07784a0d7928c616dc2ebf9087618db77ccbd0"}, +] + +[package.dependencies] +cryptography = ">=35.0.0" +types-pyOpenSSL = "*" + [[package]] name = "types-requests" -version = "2.31.0.2" +version = "2.31.0.5" description = "Typing stubs for requests" optional = false python-versions = "*" files = [ - {file = "types-requests-2.31.0.2.tar.gz", hash = "sha256:6aa3f7faf0ea52d728bb18c0a0d1522d9bfd8c72d26ff6f61bfc3d06a411cf40"}, - {file = "types_requests-2.31.0.2-py3-none-any.whl", hash = "sha256:56d181c85b5925cbc59f4489a57e72a8b2166f18273fd8ba7b6fe0c0b986f12a"}, + {file = "types-requests-2.31.0.5.tar.gz", hash = "sha256:e4153c2a4e48dcc661600fa5f199b483cdcbd21965de0b5e2df26e93343c0f57"}, + {file = "types_requests-2.31.0.5-py3-none-any.whl", hash = "sha256:e2523825754b2832e04cdc1e731423390e731457890113a201ebca8ad9b40427"}, ] [package.dependencies] @@ -6764,13 +7092,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, ] [[package]] @@ -6799,66 +7127,74 @@ files = [ {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, ] -[[package]] -name = "uc-micro-py" -version = "1.0.2" -description = "Micro subset of unicode data files for linkify-it-py projects." -optional = true -python-versions = ">=3.7" -files = [ - {file = "uc-micro-py-1.0.2.tar.gz", hash = "sha256:30ae2ac9c49f39ac6dce743bd187fcd2b574b16ca095fa74cd9396795c954c54"}, - {file = "uc_micro_py-1.0.2-py3-none-any.whl", hash = "sha256:8c9110c309db9d9e87302e2f4ad2c3152770930d88ab385cd544e7a7e75f3de0"}, -] - -[package.extras] -test = ["coverage", "pytest", "pytest-cov"] - [[package]] name = "unstructured" -version = "0.7.12" +version = "0.10.16" description = "A library that prepares raw documents for downstream ML tasks." optional = false python-versions = ">=3.7.0" files = [ - {file = "unstructured-0.7.12-py3-none-any.whl", hash = "sha256:6dec4f23574e213f30bccb680a4fb84c95617092ce4abf5d8955cc71af402fef"}, - {file = "unstructured-0.7.12.tar.gz", hash = "sha256:3dcddea34f52e1070f38fd10063b3b0f64bc4cbe5b778d6b86b5d33262d625cd"}, + {file = "unstructured-0.10.16-py3-none-any.whl", hash = "sha256:f654af8f22027797a570f2de74b61d136868e2c57cc87febbe8104434c461a3e"}, + {file = "unstructured-0.10.16.tar.gz", hash = "sha256:d01fae8ff72f996b0448aab20fc616909d679fdfee38e33e0feb8ac5103eecc2"}, ] [package.dependencies] -argilla = "*" +beautifulsoup4 = "*" chardet = "*" +dataclasses-json = "*" +emoji = "*" filetype = "*" lxml = "*" -markdown = "*" -msg-parser = "*" nltk = "*" -openpyxl = "*" -pandas = "*" -pdf2image = "*" -"pdfminer.six" = "*" -pillow = "*" -pypandoc = "*" -python-docx = "*" +python-iso639 = "*" python-magic = "*" -python-pptx = "*" requests = "*" tabulate = "*" -xlrd = "*" [package.extras] -azure = ["adlfs", "fsspec"] +airtable = ["pyairtable"] +all-docs = ["ebooklib", "markdown", "msg-parser", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx", "python-pptx (<=0.6.21)", "unstructured-inference", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +azure = ["adlfs", "fsspec (==2023.9.1)"] +biomed = ["bs4"] +box = ["boxfs", "fsspec (==2023.9.1)"] +confluence = ["atlassian-python-api"] +csv = ["pandas"] +delta-table = ["deltalake", "fsspec (==2023.9.1)"] discord = ["discord-py"] -dropbox = ["dropboxdrivefs", "fsspec"] -gcs = ["fsspec", "gcsfs"] -github = ["pygithub (==1.58.2)"] +doc = ["python-docx"] +docx = ["python-docx"] +dropbox = ["dropboxdrivefs", "fsspec (==2023.9.1)"] +elasticsearch = ["elasticsearch", "jq"] +epub = ["ebooklib"] +gcs = ["bs4", "fsspec (==2023.9.1)", "gcsfs"] +github = ["pygithub (>1.58.0)"] gitlab = ["python-gitlab"] google-drive = ["google-api-python-client"] huggingface = ["langdetect", "sacremoses", "sentencepiece", "torch", "transformers"] -local-inference = ["unstructured-inference (==0.5.4)"] +image = ["pdf2image", "pdfminer.six", "unstructured-inference", "unstructured.pytesseract (>=0.3.12)"] +jira = ["atlassian-python-api"] +local-inference = ["ebooklib", "markdown", "msg-parser", "openpyxl", "pandas", "pdf2image", "pdfminer.six", "pypandoc", "python-docx", "python-pptx (<=0.6.21)", "unstructured-inference", "unstructured.pytesseract (>=0.3.12)", "xlrd"] +md = ["markdown"] +msg = ["msg-parser"] +notion = ["htmlBuilder", "notion-client"] +odt = ["pypandoc", "python-docx"] +onedrive = ["Office365-REST-Python-Client (<2.4.3)", "bs4", "msal"] +org = ["pypandoc"] +outlook = ["Office365-REST-Python-Client (<2.4.3)", "msal"] +paddleocr = ["unstructured.paddleocr (==2.6.1.3)"] +pdf = ["pdf2image", "pdfminer.six", "unstructured-inference", "unstructured.pytesseract (>=0.3.12)"] +ppt = ["python-pptx (<=0.6.21)"] +pptx = ["python-pptx (<=0.6.21)"] reddit = ["praw"] -s3 = ["fsspec", "s3fs"] +rst = ["pypandoc"] +rtf = ["pypandoc"] +s3 = ["fsspec (==2023.9.1)", "s3fs"] +salesforce = ["simple-salesforce"] +sharepoint = ["Office365-REST-Python-Client (<2.4.3)", "msal"] slack = ["slack-sdk"] +tsv = ["pandas"] wikipedia = ["wikipedia"] +xlsx = ["openpyxl", "pandas", "xlrd"] [[package]] name = "uritemplate" @@ -6958,44 +7294,66 @@ test = ["Cython (>=0.29.32,<0.30.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "my [[package]] name = "validators" -version = "0.21.0" +version = "0.22.0" description = "Python Data Validation for Humansโ„ข" optional = false -python-versions = ">=3.8,<4.0" +python-versions = ">=3.8" files = [ - {file = "validators-0.21.0-py3-none-any.whl", hash = "sha256:3470db6f2384c49727ee319afa2e97aec3f8fad736faa6067e0fd7f9eaf2c551"}, - {file = "validators-0.21.0.tar.gz", hash = "sha256:245b98ab778ed9352a7269c6a8f6c2a839bed5b2a7e3e60273ce399d247dd4b3"}, + {file = "validators-0.22.0-py3-none-any.whl", hash = "sha256:61cf7d4a62bbae559f2e54aed3b000cea9ff3e2fdbe463f51179b92c58c9585a"}, + {file = "validators-0.22.0.tar.gz", hash = "sha256:77b2689b172eeeb600d9605ab86194641670cdb73b60afd577142a9397873370"}, +] + +[package.extras] +docs-offline = ["myst-parser (>=2.0.0)", "pypandoc-binary (>=1.11)", "sphinx (>=7.1.1)"] +docs-online = ["mkdocs (>=1.5.2)", "mkdocs-git-revision-date-localized-plugin (>=1.2.0)", "mkdocs-material (>=9.2.6)", "mkdocstrings[python] (>=0.22.0)", "pyaml (>=23.7.0)"] +hooks = ["pre-commit (>=3.3.3)"] +package = ["build (>=1.0.0)", "twine (>=4.0.2)"] +runner = ["tox (>=4.11.1)"] +sast = ["bandit[toml] (>=1.7.5)"] +testing = ["pytest (>=7.4.0)"] +tooling = ["black (>=23.7.0)", "pyright (>=1.1.325)", "ruff (>=0.0.287)"] +tooling-extras = ["pyaml (>=23.7.0)", "pypandoc-binary (>=1.11)", "pytest (>=7.4.0)"] + +[[package]] +name = "vine" +version = "5.0.0" +description = "Promises, promises, promises." +optional = true +python-versions = ">=3.6" +files = [ + {file = "vine-5.0.0-py2.py3-none-any.whl", hash = "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30"}, + {file = "vine-5.0.0.tar.gz", hash = "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e"}, ] [[package]] name = "watchfiles" -version = "0.19.0" +version = "0.20.0" description = "Simple, modern and high performance file watching and code reload in python." optional = false python-versions = ">=3.7" files = [ - {file = "watchfiles-0.19.0-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:91633e64712df3051ca454ca7d1b976baf842d7a3640b87622b323c55f3345e7"}, - {file = "watchfiles-0.19.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:b6577b8c6c8701ba8642ea9335a129836347894b666dd1ec2226830e263909d3"}, - {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:18b28f6ad871b82df9542ff958d0c86bb0d8310bb09eb8e87d97318a3b5273af"}, - {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fac19dc9cbc34052394dbe81e149411a62e71999c0a19e1e09ce537867f95ae0"}, - {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:09ea3397aecbc81c19ed7f025e051a7387feefdb789cf768ff994c1228182fda"}, - {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0376deac92377817e4fb8f347bf559b7d44ff556d9bc6f6208dd3f79f104aaf"}, - {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c75eff897786ee262c9f17a48886f4e98e6cfd335e011c591c305e5d083c056"}, - {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb5d45c4143c1dd60f98a16187fd123eda7248f84ef22244818c18d531a249d1"}, - {file = "watchfiles-0.19.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:79c533ff593db861ae23436541f481ec896ee3da4e5db8962429b441bbaae16e"}, - {file = "watchfiles-0.19.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3d7d267d27aceeeaa3de0dd161a0d64f0a282264d592e335fff7958cc0cbae7c"}, - {file = "watchfiles-0.19.0-cp37-abi3-win32.whl", hash = "sha256:176a9a7641ec2c97b24455135d58012a5be5c6217fc4d5fef0b2b9f75dbf5154"}, - {file = "watchfiles-0.19.0-cp37-abi3-win_amd64.whl", hash = "sha256:945be0baa3e2440151eb3718fd8846751e8b51d8de7b884c90b17d271d34cae8"}, - {file = "watchfiles-0.19.0-cp37-abi3-win_arm64.whl", hash = "sha256:0089c6dc24d436b373c3c57657bf4f9a453b13767150d17284fc6162b2791911"}, - {file = "watchfiles-0.19.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cae3dde0b4b2078f31527acff6f486e23abed307ba4d3932466ba7cdd5ecec79"}, - {file = "watchfiles-0.19.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f3920b1285a7d3ce898e303d84791b7bf40d57b7695ad549dc04e6a44c9f120"}, - {file = "watchfiles-0.19.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9afd0d69429172c796164fd7fe8e821ade9be983f51c659a38da3faaaaac44dc"}, - {file = "watchfiles-0.19.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68dce92b29575dda0f8d30c11742a8e2b9b8ec768ae414b54f7453f27bdf9545"}, - {file = "watchfiles-0.19.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:5569fc7f967429d4bc87e355cdfdcee6aabe4b620801e2cf5805ea245c06097c"}, - {file = "watchfiles-0.19.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5471582658ea56fca122c0f0d0116a36807c63fefd6fdc92c71ca9a4491b6b48"}, - {file = "watchfiles-0.19.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b538014a87f94d92f98f34d3e6d2635478e6be6423a9ea53e4dd96210065e193"}, - {file = "watchfiles-0.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20b44221764955b1e703f012c74015306fb7e79a00c15370785f309b1ed9aa8d"}, - {file = "watchfiles-0.19.0.tar.gz", hash = "sha256:d9b073073e048081e502b6c6b0b88714c026a1a4c890569238d04aca5f9ca74b"}, + {file = "watchfiles-0.20.0-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:3796312bd3587e14926013612b23066912cf45a14af71cf2b20db1c12dadf4e9"}, + {file = "watchfiles-0.20.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:d0002d81c89a662b595645fb684a371b98ff90a9c7d8f8630c82f0fde8310458"}, + {file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:570848706440373b4cd8017f3e850ae17f76dbdf1e9045fc79023b11e1afe490"}, + {file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a0351d20d03c6f7ad6b2e8a226a5efafb924c7755ee1e34f04c77c3682417fa"}, + {file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:007dcc4a401093010b389c044e81172c8a2520dba257c88f8828b3d460c6bb38"}, + {file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0d82dbc1832da83e441d112069833eedd4cf583d983fb8dd666fbefbea9d99c0"}, + {file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99f4c65fd2fce61a571b2a6fcf747d6868db0bef8a934e8ca235cc8533944d95"}, + {file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5392dd327a05f538c56edb1c6ebba6af91afc81b40822452342f6da54907bbdf"}, + {file = "watchfiles-0.20.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:08dc702529bb06a2b23859110c214db245455532da5eaea602921687cfcd23db"}, + {file = "watchfiles-0.20.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7d4e66a857621584869cfbad87039e65dadd7119f0d9bb9dbc957e089e32c164"}, + {file = "watchfiles-0.20.0-cp37-abi3-win32.whl", hash = "sha256:a03d1e6feb7966b417f43c3e3783188167fd69c2063e86bad31e62c4ea794cc5"}, + {file = "watchfiles-0.20.0-cp37-abi3-win_amd64.whl", hash = "sha256:eccc8942bcdc7d638a01435d915b913255bbd66f018f1af051cd8afddb339ea3"}, + {file = "watchfiles-0.20.0-cp37-abi3-win_arm64.whl", hash = "sha256:b17d4176c49d207865630da5b59a91779468dd3e08692fe943064da260de2c7c"}, + {file = "watchfiles-0.20.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d97db179f7566dcf145c5179ddb2ae2a4450e3a634eb864b09ea04e68c252e8e"}, + {file = "watchfiles-0.20.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:835df2da7a5df5464c4a23b2d963e1a9d35afa422c83bf4ff4380b3114603644"}, + {file = "watchfiles-0.20.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:608cd94a8767f49521901aff9ae0c92cc8f5a24d528db7d6b0295290f9d41193"}, + {file = "watchfiles-0.20.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89d1de8218874925bce7bb2ae9657efc504411528930d7a83f98b1749864f2ef"}, + {file = "watchfiles-0.20.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:13f995d5152a8ba4ed7c2bbbaeee4e11a5944defc7cacd0ccb4dcbdcfd78029a"}, + {file = "watchfiles-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9b5c8d3be7b502f8c43a33c63166ada8828dbb0c6d49c8f9ce990a96de2f5a49"}, + {file = "watchfiles-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e43af4464daa08723c04b43cf978ab86cc55c684c16172622bdac64b34e36af0"}, + {file = "watchfiles-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87d9e1f75c4f86c93d73b5bd1ebe667558357548f11b4f8af4e0e272f79413ce"}, + {file = "watchfiles-0.20.0.tar.gz", hash = "sha256:728575b6b94c90dd531514677201e8851708e6e4b5fe7028ac506a200b622019"}, ] [package.dependencies] @@ -7014,37 +7372,36 @@ files = [ [[package]] name = "weaviate-client" -version = "3.22.1" +version = "3.24.1" description = "A python native Weaviate client" optional = false python-versions = ">=3.8" files = [ - {file = "weaviate-client-3.22.1.tar.gz", hash = "sha256:aff61bd3f5d74df20a62328443e3aa9c860d5330fdfb19c4d8ddc44cb604032f"}, - {file = "weaviate_client-3.22.1-py3-none-any.whl", hash = "sha256:01843a4899a227300e570409e77628e9d1b28476313f94943c37aee3f75112e1"}, + {file = "weaviate-client-3.24.1.tar.gz", hash = "sha256:e073350c21bd4dca5c0eac5dd5d95fb474278c715aaf2041e382d86bb7518ac8"}, + {file = "weaviate_client-3.24.1-py3-none-any.whl", hash = "sha256:2179ea1093685f6f7cefbd19e2896eeb4b4c720954d953018c0853f775cdad8b"}, ] [package.dependencies] -authlib = ">=1.1.0" -requests = ">=2.28.0,<=2.31.0" -tqdm = ">=4.59.0,<5.0.0" -validators = ">=0.18.2,<=0.21.0" +authlib = ">=1.2.1,<2.0.0" +requests = ">=2.30.0,<3.0.0" +validators = ">=0.21.2,<1.0.0" [package.extras] grpc = ["grpcio", "grpcio-tools"] [[package]] name = "websocket-client" -version = "1.6.1" +version = "1.6.3" description = "WebSocket client for Python with low level API options" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "websocket-client-1.6.1.tar.gz", hash = "sha256:c951af98631d24f8df89ab1019fc365f2227c0892f12fd150e935607c79dd0dd"}, - {file = "websocket_client-1.6.1-py3-none-any.whl", hash = "sha256:f1f9f2ad5291f0225a49efad77abf9e700b6fef553900623060dad6e26503b9d"}, + {file = "websocket-client-1.6.3.tar.gz", hash = "sha256:3aad25d31284266bcfcfd1fd8a743f63282305a364b8d0948a43bd606acc652f"}, + {file = "websocket_client-1.6.3-py3-none-any.whl", hash = "sha256:6cfc30d051ebabb73a5fa246efdcc14c8fbebbd0330f8984ac3bb6d9edd2ad03"}, ] [package.extras] -docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] +docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] @@ -7238,33 +7595,6 @@ files = [ {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, ] -[[package]] -name = "xlrd" -version = "2.0.1" -description = "Library for developers to extract data from Microsoft Excel (tm) .xls spreadsheet files" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "xlrd-2.0.1-py2.py3-none-any.whl", hash = "sha256:6a33ee89877bd9abc1158129f6e94be74e2679636b8a205b43b85206c3f0bbdd"}, - {file = "xlrd-2.0.1.tar.gz", hash = "sha256:f72f148f54442c6b056bf931dbc34f986fd0c3b0b6b5a58d013c9aef274d0c88"}, -] - -[package.extras] -build = ["twine", "wheel"] -docs = ["sphinx"] -test = ["pytest", "pytest-cov"] - -[[package]] -name = "xlsxwriter" -version = "3.1.2" -description = "A Python module for creating Excel XLSX files." -optional = false -python-versions = ">=3.6" -files = [ - {file = "XlsxWriter-3.1.2-py3-none-any.whl", hash = "sha256:331508ff39d610ecdaf979e458840bc1eab6e6a02cfd5d08f044f0f73636236f"}, - {file = "XlsxWriter-3.1.2.tar.gz", hash = "sha256:78751099a770273f1c98b8d6643351f68f98ae8e6acf9d09d37dc6798f8cd3de"}, -] - [[package]] name = "yarl" version = "1.9.2" @@ -7369,19 +7699,84 @@ pydantic = ">=1.10.7" [[package]] name = "zipp" -version = "3.16.2" +version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.16.2-py3-none-any.whl", hash = "sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0"}, - {file = "zipp-3.16.2.tar.gz", hash = "sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147"}, + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +[[package]] +name = "zope-event" +version = "5.0" +description = "Very basic event publishing system" +optional = false +python-versions = ">=3.7" +files = [ + {file = "zope.event-5.0-py3-none-any.whl", hash = "sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26"}, + {file = "zope.event-5.0.tar.gz", hash = "sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd"}, +] + +[package.dependencies] +setuptools = "*" + +[package.extras] +docs = ["Sphinx"] +test = ["zope.testrunner"] + +[[package]] +name = "zope-interface" +version = "6.0" +description = "Interfaces for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "zope.interface-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f299c020c6679cb389814a3b81200fe55d428012c5e76da7e722491f5d205990"}, + {file = "zope.interface-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee4b43f35f5dc15e1fec55ccb53c130adb1d11e8ad8263d68b1284b66a04190d"}, + {file = "zope.interface-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a158846d0fca0a908c1afb281ddba88744d403f2550dc34405c3691769cdd85"}, + {file = "zope.interface-6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f72f23bab1848edb7472309e9898603141644faec9fd57a823ea6b4d1c4c8995"}, + {file = "zope.interface-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48f4d38cf4b462e75fac78b6f11ad47b06b1c568eb59896db5b6ec1094eb467f"}, + {file = "zope.interface-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:87b690bbee9876163210fd3f500ee59f5803e4a6607d1b1238833b8885ebd410"}, + {file = "zope.interface-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2363e5fd81afb650085c6686f2ee3706975c54f331b426800b53531191fdf28"}, + {file = "zope.interface-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af169ba897692e9cd984a81cb0f02e46dacdc07d6cf9fd5c91e81f8efaf93d52"}, + {file = "zope.interface-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa90bac61c9dc3e1a563e5babb3fd2c0c1c80567e815442ddbe561eadc803b30"}, + {file = "zope.interface-6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89086c9d3490a0f265a3c4b794037a84541ff5ffa28bb9c24cc9f66566968464"}, + {file = "zope.interface-6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:809fe3bf1a91393abc7e92d607976bbb8586512913a79f2bf7d7ec15bd8ea518"}, + {file = "zope.interface-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:0ec9653825f837fbddc4e4b603d90269b501486c11800d7c761eee7ce46d1bbb"}, + {file = "zope.interface-6.0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:790c1d9d8f9c92819c31ea660cd43c3d5451df1df61e2e814a6f99cebb292788"}, + {file = "zope.interface-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b39b8711578dcfd45fc0140993403b8a81e879ec25d53189f3faa1f006087dca"}, + {file = "zope.interface-6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eba51599370c87088d8882ab74f637de0c4f04a6d08a312dce49368ba9ed5c2a"}, + {file = "zope.interface-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee934f023f875ec2cfd2b05a937bd817efcc6c4c3f55c5778cbf78e58362ddc"}, + {file = "zope.interface-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:042f2381118b093714081fd82c98e3b189b68db38ee7d35b63c327c470ef8373"}, + {file = "zope.interface-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dfbbbf0809a3606046a41f8561c3eada9db811be94138f42d9135a5c47e75f6f"}, + {file = "zope.interface-6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:424d23b97fa1542d7be882eae0c0fc3d6827784105264a8169a26ce16db260d8"}, + {file = "zope.interface-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e538f2d4a6ffb6edfb303ce70ae7e88629ac6e5581870e66c306d9ad7b564a58"}, + {file = "zope.interface-6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12175ca6b4db7621aedd7c30aa7cfa0a2d65ea3a0105393e05482d7a2d367446"}, + {file = "zope.interface-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3d7dfd897a588ec27e391edbe3dd320a03684457470415870254e714126b1f"}, + {file = "zope.interface-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:b3f543ae9d3408549a9900720f18c0194ac0fe810cecda2a584fd4dca2eb3bb8"}, + {file = "zope.interface-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d0583b75f2e70ec93f100931660328965bb9ff65ae54695fb3fa0a1255daa6f2"}, + {file = "zope.interface-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:23ac41d52fd15dd8be77e3257bc51bbb82469cf7f5e9a30b75e903e21439d16c"}, + {file = "zope.interface-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99856d6c98a326abbcc2363827e16bd6044f70f2ef42f453c0bd5440c4ce24e5"}, + {file = "zope.interface-6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1592f68ae11e557b9ff2bc96ac8fc30b187e77c45a3c9cd876e3368c53dc5ba8"}, + {file = "zope.interface-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4407b1435572e3e1610797c9203ad2753666c62883b921318c5403fb7139dec2"}, + {file = "zope.interface-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:5171eb073474a5038321409a630904fd61f12dd1856dd7e9d19cd6fe092cbbc5"}, + {file = "zope.interface-6.0.tar.gz", hash = "sha256:aab584725afd10c710b8f1e6e208dbee2d0ad009f57d674cb9d1b3964037275d"}, +] + +[package.dependencies] +setuptools = "*" + +[package.extras] +docs = ["Sphinx", "repoze.sphinx.autointerface"] +test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] +testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] + [[package]] name = "zstandard" version = "0.21.0" @@ -7442,7 +7837,7 @@ cffi = ["cffi (>=1.11)"] [extras] all = [] -deploy = ["langchain-serve"] +deploy = ["celery", "flower", "redis"] local = ["ctransformers", "llama-cpp-python", "sentence-transformers"] [metadata] diff --git a/pyproject.toml b/pyproject.toml index f873554c4..f24de3e82 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langflow" -version = "0.4.11" +version = "0.5.0a1" description = "A Python package with a built-in web application" authors = ["Logspace "] maintainers = [ @@ -25,6 +25,7 @@ documentation = "https://docs.langflow.org" langflow = "langflow.__main__:main" [tool.poetry.dependencies] + python = ">=3.9,<3.11" fastapi = "^0.100.0" uvicorn = "^0.22.0" @@ -32,17 +33,17 @@ beautifulsoup4 = "^4.12.2" google-search-results = "^2.4.1" google-api-python-client = "^2.79.0" typer = "^0.9.0" -gunicorn = "^21.1.0" -langchain = "^0.0.268" +gunicorn = "^21.2.0" +langchain = "^0.0.303" openai = "^0.27.8" -pandas = "^2.0.0" +pandas = "2.0.3" chromadb = "^0.3.21" huggingface-hub = { version = "^0.16.0", extras = ["inference"] } -rich = "^13.4.2" +rich = "^13.5.0" llama-cpp-python = { version = "~0.1.0", optional = true } networkx = "^3.1" -unstructured = "^0.7.0" -pypdf = "^3.11.0" +unstructured = "^0.10.0" +pypdf = "^3.15.0" lxml = "^4.9.2" pysrt = "^1.1.2" fake-useragent = "^1.2.1" @@ -51,14 +52,13 @@ psycopg2-binary = "^2.9.6" pyarrow = "^12.0.0" tiktoken = "~0.4.0" wikipedia = "^1.4.0" -langchain-serve = { version = ">0.0.51", optional = true } -qdrant-client = "^1.3.0" +qdrant-client = "^1.4.0" websockets = "^10.3" -weaviate-client = "^3.21.0" +weaviate-client = "^3.23.0" jina = "3.15.2" sentence-transformers = { version = "^2.2.2", optional = true } ctransformers = { version = "^0.2.10", optional = true } -cohere = "^4.11.0" +cohere = "^4.21.0" python-multipart = "^0.0.6" # install sqlmodel using https://github.com/honglei/sqlmodel.git sqlmodel = { git = "https://github.com/honglei/sqlmodel.git", branch = "main" } @@ -78,15 +78,26 @@ psycopg = "^3.1.9" psycopg-binary = "^3.1.9" fastavro = "^1.8.0" langchain-experimental = "^0.0.8" +celery = { extras = ["redis"], version = "^5.3.1", optional = true } +redis = { version = "^4.6.0", optional = true } +flower = { version = "^2.0.0", optional = true } alembic = "^1.11.2" passlib = "^1.7.4" bcrypt = "^4.0.1" python-jose = "^3.3.0" metaphor-python = "^0.1.11" pydantic-settings = "^2.0.3" -zep-python = {version = "^1.1.0a0", allow-prereleases = true} +zep-python = { version = "^1.1.0a0", allow-prereleases = true } +pywin32 = { version = "^306", markers = "sys_platform == 'win32'" } +loguru = "^0.7.1" +langfuse = "^1.0.13" +pillow = "^10.0.0" +metal-sdk = "^2.0.2" +markupsafe = "^2.1.3" + [tool.poetry.group.dev.dependencies] +types-redis = "^4.6.0.5" black = "^23.1.0" ipykernel = "^6.21.2" mypy = "^1.1.1" @@ -102,10 +113,16 @@ types-appdirs = "^1.4.3.5" types-pyyaml = "^6.0.12.8" types-python-jose = "^3.3.4.8" types-passlib = "^1.7.7.13" +locust = "^2.16.1" +pytest-mock = "^3.11.1" +pytest-xdist = "^3.3.1" +types-pywin32 = "^306.0.0.4" +types-google-cloud-ndb = "^2.2.0.0" +pytest-sugar = "^0.9.7" [tool.poetry.extras] -deploy = ["langchain-serve"] +deploy = ["langchain-serve", "celery", "redis", "flower"] local = ["llama-cpp-python", "sentence-transformers", "ctransformers"] all = ["deploy", "local"] @@ -117,6 +134,7 @@ testpaths = ["tests", "integration"] console_output_style = "progress" filterwarnings = ["ignore::DeprecationWarning"] log_cli = true +markers = ["async_test"] [tool.ruff] diff --git a/render.yaml b/render.yaml index e67da9334..4c17923c6 100644 --- a/render.yaml +++ b/render.yaml @@ -3,9 +3,14 @@ services: - type: web name: langflow runtime: docker - plan: free dockerfilePath: ./Dockerfile repo: https://github.com/logspace-ai/langflow branch: main healthCheckPath: /health autoDeploy: false + envVars: + - key: LANGFLOW_DATABASE_URL + value: sqlite:////home/user/.cache/langflow/langflow.db + disk: + name: langflow-data + mountPath: /home/user/.cache/langflow diff --git a/scripts/deploy_langflow_gcp.sh b/scripts/gcp/deploy_langflow_gcp.sh similarity index 100% rename from scripts/deploy_langflow_gcp.sh rename to scripts/gcp/deploy_langflow_gcp.sh diff --git a/scripts/deploy_langflow_gcp_spot.sh b/scripts/gcp/deploy_langflow_gcp_spot.sh similarity index 100% rename from scripts/deploy_langflow_gcp_spot.sh rename to scripts/gcp/deploy_langflow_gcp_spot.sh diff --git a/scripts/walkthroughtutorial.md b/scripts/gcp/walkthroughtutorial.md similarity index 100% rename from scripts/walkthroughtutorial.md rename to scripts/gcp/walkthroughtutorial.md diff --git a/scripts/walkthroughtutorial_spot.md b/scripts/gcp/walkthroughtutorial_spot.md similarity index 100% rename from scripts/walkthroughtutorial_spot.md rename to scripts/gcp/walkthroughtutorial_spot.md diff --git a/src/backend/langflow/__init__.py b/src/backend/langflow/__init__.py index f6eb836cc..d3afbb4af 100644 --- a/src/backend/langflow/__init__.py +++ b/src/backend/langflow/__init__.py @@ -1,7 +1,7 @@ from importlib import metadata # Deactivate cache manager for now -# from langflow.services.cache import cache_manager +# from langflow.services.cache import cache_service from langflow.processing.process import load_flow_from_json from langflow.interface.custom.custom_component import CustomComponent @@ -12,4 +12,4 @@ except metadata.PackageNotFoundError: __version__ = "" del metadata # optional, avoids polluting the results of dir(__package__) -__all__ = ["load_flow_from_json", "cache_manager", "CustomComponent"] +__all__ = ["load_flow_from_json", "cache_service", "CustomComponent"] diff --git a/src/backend/langflow/__main__.py b/src/backend/langflow/__main__.py index c86e9863f..f42f44a9e 100644 --- a/src/backend/langflow/__main__.py +++ b/src/backend/langflow/__main__.py @@ -1,29 +1,60 @@ +import platform +import socket import sys import time -import httpx -from langflow.services.manager import initialize_settings_manager -from langflow.services.utils import get_settings_manager -from langflow.utils.util import get_number_of_workers -from multiprocess import Process # type: ignore -import platform +import webbrowser from pathlib import Path from typing import Optional -import socket -from rich.panel import Panel + +import httpx +import typer +from dotenv import load_dotenv +from langflow.main import setup_app +from langflow.services.database.utils import session_getter +from langflow.services.getters import get_db_service, get_settings_service +from langflow.services.utils import initialize_services, initialize_settings_service +from langflow.utils.logger import configure, logger +from multiprocess import Process, cpu_count # type: ignore from rich import box from rich import print as rprint -import typer -from langflow.main import setup_app -from langflow.utils.logger import configure, logger -import webbrowser -from dotenv import load_dotenv +from rich.console import Console +from rich.panel import Panel +from rich.table import Table -app = typer.Typer() +console = Console() + +app = typer.Typer(no_args_is_help=True) + + +def get_number_of_workers(workers=None): + if workers == -1 or workers is None: + workers = (cpu_count() * 2) + 1 + logger.debug(f"Number of workers: {workers}") + return workers + + +def display_results(results): + """ + Display the results of the migration. + """ + for table_results in results: + table = Table(title=f"Migration {table_results.table_name}") + table.add_column("Name") + table.add_column("Type") + table.add_column("Status") + + for result in table_results.results: + status = "Success" if result.success else "Failure" + color = "green" if result.success else "red" + table.add_row(result.name, result.type, f"[{color}]{status}[/{color}]") + + console.print(table) + console.print() # Print a new line def update_settings( config: str, - cache: str, + cache: Optional[str] = None, dev: bool = False, remove_api_keys: bool = False, components_path: Optional[Path] = None, @@ -31,70 +62,24 @@ def update_settings( """Update the settings from a config file.""" # Check for database_url in the environment variables - initialize_settings_manager() - settings_manager = get_settings_manager() + initialize_settings_service() + settings_service = get_settings_service() if config: logger.debug(f"Loading settings from {config}") - settings_manager.settings.update_from_yaml(config, dev=dev) + settings_service.settings.update_from_yaml(config, dev=dev) if remove_api_keys: logger.debug(f"Setting remove_api_keys to {remove_api_keys}") - settings_manager.settings.update_settings(REMOVE_API_KEYS=remove_api_keys) + settings_service.settings.update_settings(REMOVE_API_KEYS=remove_api_keys) if cache: logger.debug(f"Setting cache to {cache}") - settings_manager.settings.update_settings(CACHE=cache) + settings_service.settings.update_settings(CACHE=cache) if components_path: logger.debug(f"Adding component path {components_path}") - settings_manager.settings.update_settings(COMPONENTS_PATH=components_path) - - -def serve_on_jcloud(): - """ - Deploy Langflow server on Jina AI Cloud - """ - import asyncio - from importlib.metadata import version as mod_version - - import click - - try: - from lcserve.__main__ import serve_on_jcloud # type: ignore - except ImportError: - click.secho( - "๐Ÿšจ Please install langchain-serve to deploy Langflow server on Jina AI Cloud " - "using `pip install langchain-serve`", - fg="red", - ) - return - - app_name = "langflow.lcserve:app" - app_dir = str(Path(__file__).parent) - version = mod_version("langflow") - base_image = "jinaai+docker://deepankarm/langflow" - - click.echo("๐Ÿš€ Deploying Langflow server on Jina AI Cloud") - app_id = asyncio.run( - serve_on_jcloud( - fastapi_app_str=app_name, - app_dir=app_dir, - uses=f"{base_image}:{version}", - name="langflow", - ) - ) - click.secho( - "๐ŸŽ‰ Langflow server successfully deployed on Jina AI Cloud ๐ŸŽ‰", fg="green" - ) - click.secho( - "๐Ÿ”— Click on the link to open the server (please allow ~1-2 minutes for the server to startup): ", - nl=False, - fg="green", - ) - click.secho(f"https://{app_id}.wolf.jina.ai/", fg="blue") - click.secho("๐Ÿ“– Read more about managing the server: ", nl=False, fg="green") - click.secho("https://github.com/jina-ai/langchain-serve", fg="blue") + settings_service.settings.update_settings(COMPONENTS_PATH=components_path) @app.command() -def serve( +def run( host: str = typer.Option( "127.0.0.1", help="Host to bind the server to.", envvar="LANGFLOW_HOST" ), @@ -121,12 +106,11 @@ def serve( log_file: Path = typer.Option( "logs/langflow.log", help="Path to the log file.", envvar="LANGFLOW_LOG_FILE" ), - cache: str = typer.Option( + cache: Optional[str] = typer.Option( envvar="LANGFLOW_LANGCHAIN_CACHE", help="Type of cache to use. (InMemoryCache, SQLiteCache)", - default="SQLiteCache", + default=None, ), - jcloud: bool = typer.Option(False, help="Deploy on Jina AI Cloud"), dev: bool = typer.Option(False, help="Run in development mode (may contain bugs)"), # This variable does not work but is set by the .env file # and works with Pydantic @@ -157,15 +141,12 @@ def serve( ), ): """ - Run the Langflow server. + Run the Langflow. """ # override env variables with .env file if env_file: load_dotenv(env_file, override=True) - if jcloud: - return serve_on_jcloud() - configure(log_level=log_level, log_file=log_file) update_settings( config, @@ -312,6 +293,53 @@ def run_langflow(host, port, log_level, options, app): sys.exit(1) +@app.command() +def superuser( + username: str = typer.Option(..., prompt=True, help="Username for the superuser."), + password: str = typer.Option( + ..., prompt=True, hide_input=True, help="Password for the superuser." + ), + log_level: str = typer.Option( + "critical", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL" + ), +): + """ + Create a superuser. + """ + configure(log_level=log_level) + initialize_services() + db_service = get_db_service() + with session_getter(db_service) as session: + from langflow.services.auth.utils import create_super_user + + if create_super_user(db=session, username=username, password=password): + # Verify that the superuser was created + from langflow.services.database.models.user.user import User + + user: User = session.query(User).filter(User.username == username).first() + if user is None or not user.is_superuser: + typer.echo("Superuser creation failed.") + return + + typer.echo("Superuser created successfully.") + + else: + typer.echo("Superuser creation failed.") + + +@app.command() +def migration(test: bool = typer.Option(True, help="Run migrations in test mode.")): + """ + Run or test migrations. + """ + initialize_services() + db_service = get_db_service() + if not test: + db_service.run_migrations() + results = db_service.run_migrations_test() + display_results(results) + + def main(): app() diff --git a/src/backend/langflow/alembic/env.py b/src/backend/langflow/alembic/env.py index 310894431..e606036f1 100644 --- a/src/backend/langflow/alembic/env.py +++ b/src/backend/langflow/alembic/env.py @@ -46,6 +46,7 @@ def run_migrations_offline() -> None: target_metadata=target_metadata, literal_binds=True, dialect_opts={"paramstyle": "named"}, + render_as_batch=True, ) with context.begin_transaction(): @@ -66,7 +67,9 @@ def run_migrations_online() -> None: ) with connectable.connect() as connection: - context.configure(connection=connection, target_metadata=target_metadata) + context.configure( + connection=connection, target_metadata=target_metadata, render_as_batch=True + ) with context.begin_transaction(): context.run_migrations() diff --git a/src/backend/langflow/alembic/versions/0a534bdfd84b_remove_flowstyles_table.py b/src/backend/langflow/alembic/versions/0a534bdfd84b_remove_flowstyles_table.py deleted file mode 100644 index 0100df44d..000000000 --- a/src/backend/langflow/alembic/versions/0a534bdfd84b_remove_flowstyles_table.py +++ /dev/null @@ -1,42 +0,0 @@ -"""Remove FlowStyles table - -Revision ID: 0a534bdfd84b -Revises: 4814b6f4abfd -Create Date: 2023-08-07 14:09:06.844104 - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = "0a534bdfd84b" -down_revision: Union[str, None] = "4814b6f4abfd" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table("flowstyle") - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "flowstyle", - sa.Column("color", sa.VARCHAR(), nullable=False), - sa.Column("emoji", sa.VARCHAR(), nullable=False), - sa.Column("flow_id", sa.CHAR(length=32), nullable=True), - sa.Column("id", sa.CHAR(length=32), nullable=False), - sa.ForeignKeyConstraint( - ["flow_id"], - ["flow.id"], - ), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint("id"), - ) - # ### end Alembic commands ### diff --git a/src/backend/langflow/alembic/versions/260dbcc8b680_adds_tables.py b/src/backend/langflow/alembic/versions/260dbcc8b680_adds_tables.py new file mode 100644 index 000000000..53e049a05 --- /dev/null +++ b/src/backend/langflow/alembic/versions/260dbcc8b680_adds_tables.py @@ -0,0 +1,177 @@ +"""Adds tables + +Revision ID: 260dbcc8b680 +Revises: +Create Date: 2023-08-27 19:49:02.681355 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel +from sqlalchemy.engine.reflection import Inspector + +# revision identifiers, used by Alembic. +revision: str = "260dbcc8b680" +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + + conn = op.get_bind() + inspector = Inspector.from_engine(conn) + # List existing tables + existing_tables = inspector.get_table_names() + # Drop 'flowstyle' table if it exists + # and other related indices + if "flowstyle" in existing_tables: + op.drop_table("flowstyle") + if "ix_flowstyle_flow_id" in [ + index["name"] for index in inspector.get_indexes("flowstyle") + ]: + op.drop_index("ix_flowstyle_flow_id", table_name="flowstyle") + + existing_indices_flow = [] + existing_fks_flow = [] + if "flow" in existing_tables: + existing_indices_flow = [ + index["name"] for index in inspector.get_indexes("flow") + ] + # Existing foreign keys for the 'flow' table, if it exists + existing_fks_flow = [ + fk["referred_table"] + "." + fk["referred_columns"][0] + for fk in inspector.get_foreign_keys("flow") + ] + # Now check if the columns user_id exists in the 'flow' table + # If it does not exist, we need to create the foreign key + + if "user" not in existing_tables: + op.create_table( + "user", + sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False), + sa.Column("username", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("password", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("is_active", sa.Boolean(), nullable=False), + sa.Column("is_superuser", sa.Boolean(), nullable=False), + sa.Column("create_at", sa.DateTime(), nullable=False), + sa.Column("updated_at", sa.DateTime(), nullable=False), + sa.Column("last_login_at", sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("id"), + ) + with op.batch_alter_table("user", schema=None) as batch_op: + batch_op.create_index( + batch_op.f("ix_user_username"), ["username"], unique=True + ) + + if "apikey" not in existing_tables: + op.create_table( + "apikey", + sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("last_used_at", sa.DateTime(), nullable=True), + sa.Column("total_uses", sa.Integer(), nullable=False, default=0), + sa.Column("is_active", sa.Boolean(), nullable=False, default=True), + sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False), + sa.Column("api_key", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False), + sa.ForeignKeyConstraint( + ["user_id"], + ["user.id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("id"), + ) + with op.batch_alter_table("apikey", schema=None) as batch_op: + batch_op.create_index( + batch_op.f("ix_apikey_api_key"), ["api_key"], unique=True + ) + batch_op.create_index(batch_op.f("ix_apikey_name"), ["name"], unique=False) + batch_op.create_index( + batch_op.f("ix_apikey_user_id"), ["user_id"], unique=False + ) + if "flow" not in existing_tables: + op.create_table( + "flow", + sa.Column("data", sa.JSON(), nullable=True), + sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False), + sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False), + sa.ForeignKeyConstraint( + ["user_id"], + ["user.id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("id"), + ) + # Conditionally create indices for 'flow' table + # if _alembic_tmp_flow exists, then we need to drop it first + # This is to deal with SQLite not being able to ROLLBACK + # for some unknown reason + if "_alembic_tmp_flow" in existing_tables: + op.drop_table("_alembic_tmp_flow") + with op.batch_alter_table("flow", schema=None) as batch_op: + flow_columns = [col["name"] for col in inspector.get_columns("flow")] + if "user_id" not in flow_columns: + batch_op.add_column( + sa.Column( + "user_id", + sqlmodel.sql.sqltypes.GUID(), + nullable=True, # This should be False, but we need to allow NULL values for now + ) + ) + if "user.id" not in existing_fks_flow: + batch_op.create_foreign_key("fk_flow_user_id", "user", ["user_id"], ["id"]) + if "ix_flow_description" not in existing_indices_flow: + batch_op.create_index( + batch_op.f("ix_flow_description"), ["description"], unique=False + ) + if "ix_flow_name" not in existing_indices_flow: + batch_op.create_index(batch_op.f("ix_flow_name"), ["name"], unique=False) + with op.batch_alter_table("flow", schema=None) as batch_op: + if "ix_flow_user_id" not in existing_indices_flow: + batch_op.create_index( + batch_op.f("ix_flow_user_id"), ["user_id"], unique=False + ) + + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + + conn = op.get_bind() + inspector = Inspector.from_engine(conn) + # List existing tables + existing_tables = inspector.get_table_names() + if "flow" in existing_tables: + with op.batch_alter_table("flow", schema=None) as batch_op: + batch_op.drop_index(batch_op.f("ix_flow_user_id")) + batch_op.drop_index(batch_op.f("ix_flow_name")) + batch_op.drop_index(batch_op.f("ix_flow_description")) + + op.drop_table("flow") + if "apikey" in existing_tables: + with op.batch_alter_table("apikey", schema=None) as batch_op: + batch_op.drop_index(batch_op.f("ix_apikey_user_id")) + batch_op.drop_index(batch_op.f("ix_apikey_name")) + batch_op.drop_index(batch_op.f("ix_apikey_api_key")) + + op.drop_table("apikey") + if "user" in existing_tables: + with op.batch_alter_table("user", schema=None) as batch_op: + batch_op.drop_index(batch_op.f("ix_user_username")) + + op.drop_table("user") + + if "flowstyle" in existing_tables: + op.drop_table("flowstyle") + + if "component" in existing_tables: + op.drop_table("component") + # ### end Alembic commands ### diff --git a/src/backend/langflow/alembic/versions/4814b6f4abfd_add_flow_table.py b/src/backend/langflow/alembic/versions/4814b6f4abfd_add_flow_table.py deleted file mode 100644 index 0b2f32657..000000000 --- a/src/backend/langflow/alembic/versions/4814b6f4abfd_add_flow_table.py +++ /dev/null @@ -1,65 +0,0 @@ -"""Add Flow table - -Revision ID: 4814b6f4abfd -Revises: -Create Date: 2023-08-05 17:47:42.879824 - -""" - -import contextlib -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -import sqlmodel - - -# revision identifiers, used by Alembic. -revision: str = "4814b6f4abfd" -down_revision: Union[str, None] = None -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - - # This suppress is used to not break the migration if the table already exists. - with contextlib.suppress(sa.exc.OperationalError): - op.create_table( - "flow", - sa.Column("data", sa.JSON(), nullable=True), - sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False), - sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=True), - sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint("id"), - ) - op.create_index( - op.f("ix_flow_description"), "flow", ["description"], unique=False - ) - op.create_index(op.f("ix_flow_name"), "flow", ["name"], unique=False) - with contextlib.suppress(sa.exc.OperationalError): - op.create_table( - "flowstyle", - sa.Column("color", sqlmodel.sql.sqltypes.AutoString(), nullable=False), - sa.Column("emoji", sqlmodel.sql.sqltypes.AutoString(), nullable=False), - sa.Column("flow_id", sqlmodel.sql.sqltypes.GUID(), nullable=True), - sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False), - sa.ForeignKeyConstraint( - ["flow_id"], - ["flow.id"], - ), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint("id"), - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_table("flowstyle") - op.drop_index(op.f("ix_flow_name"), table_name="flow") - op.drop_index(op.f("ix_flow_description"), table_name="flow") - op.drop_table("flow") - # ### end Alembic commands ### diff --git a/src/backend/langflow/alembic/versions/67cc006d50bf_add_profile_image_column.py b/src/backend/langflow/alembic/versions/67cc006d50bf_add_profile_image_column.py new file mode 100644 index 000000000..ca0aa0542 --- /dev/null +++ b/src/backend/langflow/alembic/versions/67cc006d50bf_add_profile_image_column.py @@ -0,0 +1,49 @@ +"""Add profile-image column + +Revision ID: 67cc006d50bf +Revises: 260dbcc8b680 +Create Date: 2023-09-08 07:36:13.387318 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel +from sqlalchemy.engine.reflection import Inspector + +# revision identifiers, used by Alembic. +revision: str = "67cc006d50bf" +down_revision: Union[str, None] = "260dbcc8b680" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + conn = op.get_bind() + inspector = Inspector.from_engine(conn) + if "user" in inspector.get_table_names() and "profile_image" not in [ + column["name"] for column in inspector.get_columns("user") + ]: + with op.batch_alter_table("user", schema=None) as batch_op: + batch_op.add_column( + sa.Column( + "profile_image", sqlmodel.sql.sqltypes.AutoString(), nullable=True + ) + ) + + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + conn = op.get_bind() + inspector = Inspector.from_engine(conn) + if "user" in inspector.get_table_names() and "profile_image" in [ + column["name"] for column in inspector.get_columns("user") + ]: + with op.batch_alter_table("user", schema=None) as batch_op: + batch_op.drop_column("profile_image") + + # ### end Alembic commands ### diff --git a/src/backend/langflow/api/router.py b/src/backend/langflow/api/router.py index ea1938a75..dbaf20e75 100644 --- a/src/backend/langflow/api/router.py +++ b/src/backend/langflow/api/router.py @@ -6,6 +6,9 @@ from langflow.api.v1 import ( validate_router, flows_router, component_router, + users_router, + api_key_router, + login_router, ) router = APIRouter( @@ -16,3 +19,6 @@ router.include_router(endpoints_router) router.include_router(validate_router) router.include_router(component_router) router.include_router(flows_router) +router.include_router(users_router) +router.include_router(api_key_router) +router.include_router(login_router) diff --git a/src/backend/langflow/api/utils.py b/src/backend/langflow/api/utils.py index 0fb53e541..c519fffed 100644 --- a/src/backend/langflow/api/utils.py +++ b/src/backend/langflow/api/utils.py @@ -59,33 +59,6 @@ def build_input_keys_response(langchain_object, artifacts): return input_keys_response -def merge_nested_dicts(dict1, dict2): - for key, value in dict2.items(): - if isinstance(value, dict) and isinstance(dict1.get(key), dict): - dict1[key] = merge_nested_dicts(dict1[key], value) - else: - dict1[key] = value - return dict1 - - -def merge_nested_dicts_with_renaming(dict1, dict2): - for key, value in dict2.items(): - if ( - key in dict1 - and isinstance(value, dict) - and isinstance(dict1.get(key), dict) - ): - for sub_key, sub_value in value.items(): - if sub_key in dict1[key]: - new_key = get_new_key(dict1[key], sub_key) - dict1[key][new_key] = sub_value - else: - dict1[key][sub_key] = sub_value - else: - dict1[key] = value - return dict1 - - def get_new_key(dictionary, original_key): counter = 1 new_key = original_key + " (" + str(counter) + ")" diff --git a/src/backend/langflow/api/v1/__init__.py b/src/backend/langflow/api/v1/__init__.py index b6e7b36d8..9335a4607 100644 --- a/src/backend/langflow/api/v1/__init__.py +++ b/src/backend/langflow/api/v1/__init__.py @@ -3,6 +3,9 @@ from langflow.api.v1.validate import router as validate_router from langflow.api.v1.chat import router as chat_router from langflow.api.v1.flows import router as flows_router from langflow.api.v1.components import router as component_router +from langflow.api.v1.users import router as users_router +from langflow.api.v1.api_key import router as api_key_router +from langflow.api.v1.login import router as login_router __all__ = [ "chat_router", @@ -10,4 +13,7 @@ __all__ = [ "component_router", "validate_router", "flows_router", + "users_router", + "api_key_router", + "login_router", ] diff --git a/src/backend/langflow/api/v1/api_key.py b/src/backend/langflow/api/v1/api_key.py new file mode 100644 index 000000000..7f5916d06 --- /dev/null +++ b/src/backend/langflow/api/v1/api_key.py @@ -0,0 +1,61 @@ +from uuid import UUID +from fastapi import APIRouter, HTTPException, Depends +from langflow.api.v1.schemas import ApiKeysResponse +from langflow.services.auth.utils import get_current_active_user +from langflow.services.database.models.api_key.api_key import ( + ApiKeyCreate, + UnmaskedApiKeyRead, +) + +# Assuming you have these methods in your service layer +from langflow.services.database.models.api_key.crud import ( + get_api_keys, + create_api_key, + delete_api_key, +) +from langflow.services.database.models.user.user import User +from langflow.services.getters import get_session +from sqlmodel import Session + + +router = APIRouter(tags=["APIKey"], prefix="/api_key") + + +@router.get("/", response_model=ApiKeysResponse) +def get_api_keys_route( + db: Session = Depends(get_session), + current_user: User = Depends(get_current_active_user), +): + try: + user_id = current_user.id + keys = get_api_keys(db, user_id) + + return ApiKeysResponse(total_count=len(keys), user_id=user_id, api_keys=keys) + except Exception as exc: + raise HTTPException(status_code=400, detail=str(exc)) from exc + + +@router.post("/", response_model=UnmaskedApiKeyRead) +def create_api_key_route( + req: ApiKeyCreate, + current_user: User = Depends(get_current_active_user), + db: Session = Depends(get_session), +): + try: + user_id = current_user.id + return create_api_key(db, req, user_id=user_id) + except Exception as e: + raise HTTPException(status_code=400, detail=str(e)) from e + + +@router.delete("/{api_key_id}") +def delete_api_key_route( + api_key_id: UUID, + current_user=Depends(get_current_active_user), + db: Session = Depends(get_session), +): + try: + delete_api_key(db, api_key_id) + return {"detail": "API Key deleted"} + except Exception as e: + raise HTTPException(status_code=400, detail=str(e)) from e diff --git a/src/backend/langflow/api/v1/base.py b/src/backend/langflow/api/v1/base.py index 0a2087e37..af64ccaf3 100644 --- a/src/backend/langflow/api/v1/base.py +++ b/src/backend/langflow/api/v1/base.py @@ -1,3 +1,4 @@ +from typing import Optional from langflow.template.frontend_node.base import FrontendNode from pydantic import field_validator, BaseModel @@ -20,7 +21,8 @@ class FrontendNodeRequest(FrontendNode): class ValidatePromptRequest(BaseModel): name: str template: str - frontend_node: FrontendNodeRequest + # optional for tweak call + frontend_node: Optional[FrontendNodeRequest] = None # Build ValidationResponse class for {"imports": {"errors": []}, "function": {"errors": []}} @@ -41,7 +43,8 @@ class CodeValidationResponse(BaseModel): class PromptValidationResponse(BaseModel): input_variables: list - frontend_node: FrontendNodeRequest + # object return for tweak call + frontend_node: Optional[FrontendNodeRequest] = None INVALID_CHARACTERS = { diff --git a/src/backend/langflow/api/v1/callback.py b/src/backend/langflow/api/v1/callback.py index 69dbf5082..2a16a0bd2 100644 --- a/src/backend/langflow/api/v1/callback.py +++ b/src/backend/langflow/api/v1/callback.py @@ -10,7 +10,7 @@ from fastapi import WebSocket from langchain.schema import AgentAction, LLMResult, AgentFinish -from langflow.utils.logger import logger +from loguru import logger # https://github.com/hwchase17/chat-langchain/blob/master/callback.py diff --git a/src/backend/langflow/api/v1/chat.py b/src/backend/langflow/api/v1/chat.py index 611407e8d..c25c4bde2 100644 --- a/src/backend/langflow/api/v1/chat.py +++ b/src/backend/langflow/api/v1/chat.py @@ -1,59 +1,99 @@ -from fastapi import APIRouter, HTTPException, WebSocket, WebSocketException, status +from fastapi import ( + APIRouter, + Depends, + HTTPException, + Query, + WebSocket, + WebSocketException, + status, +) from fastapi.responses import StreamingResponse from langflow.api.utils import build_input_keys_response from langflow.api.v1.schemas import BuildStatus, BuiltResponse, InitResponse, StreamData -from langflow.services import service_manager, ServiceType from langflow.graph.graph.base import Graph -from langflow.utils.logger import logger -from cachetools import LRUCache +from langflow.services.auth.utils import get_current_active_user, get_current_user +from langflow.services.cache.utils import update_build_status +from loguru import logger +from langflow.services.getters import get_chat_service, get_session, get_cache_service +from sqlmodel import Session +from langflow.services.chat.manager import ChatService +from langflow.services.cache.manager import BaseCacheService + router = APIRouter(tags=["Chat"]) -flow_data_store: LRUCache = LRUCache(maxsize=10) - @router.websocket("/chat/{client_id}") -async def chat(client_id: str, websocket: WebSocket): +async def chat( + client_id: str, + websocket: WebSocket, + token: str = Query(...), + db: Session = Depends(get_session), + chat_service: "ChatService" = Depends(get_chat_service), +): """Websocket endpoint for chat.""" try: - chat_manager = service_manager.get(ServiceType.CHAT_MANAGER) - if client_id in chat_manager.in_memory_cache: - await chat_manager.handle_websocket(client_id, websocket) + await websocket.accept() + user = await get_current_user(token, db) + if not user: + await websocket.close( + code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized" + ) + if not user.is_active: + await websocket.close( + code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized" + ) + + if client_id in chat_service.cache_service: + await chat_service.handle_websocket(client_id, websocket) else: # We accept the connection but close it immediately # if the flow is not built yet - await websocket.accept() message = "Please, build the flow before sending messages" await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=message) except WebSocketException as exc: - logger.error(f"Websocket error: {exc}") + logger.error(f"Websocket exrror: {exc}") await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=str(exc)) + except Exception as exc: + logger.error(f"Error in chat websocket: {exc}") + messsage = exc.detail if isinstance(exc, HTTPException) else str(exc) + if "Could not validate credentials" in str(exc): + await websocket.close( + code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized" + ) + else: + await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=messsage) @router.post("/build/init/{flow_id}", response_model=InitResponse, status_code=201) -async def init_build(graph_data: dict, flow_id: str): +async def init_build( + graph_data: dict, + flow_id: str, + current_user=Depends(get_current_active_user), + chat_service: "ChatService" = Depends(get_chat_service), + cache_service: "BaseCacheService" = Depends(get_cache_service), +): """Initialize the build by storing graph data and returning a unique session ID.""" - try: if flow_id is None: raise ValueError("No ID provided") # Check if already building if ( - flow_id in flow_data_store - and flow_data_store[flow_id]["status"] == BuildStatus.IN_PROGRESS + flow_id in cache_service + and isinstance(cache_service[flow_id], dict) + and cache_service[flow_id].get("status") == BuildStatus.IN_PROGRESS ): return InitResponse(flowId=flow_id) # Delete from cache if already exists - chat_manager = service_manager.get(ServiceType.CHAT_MANAGER) - if flow_id in chat_manager.in_memory_cache: - with chat_manager.in_memory_cache._lock: - chat_manager.in_memory_cache.delete(flow_id) - logger.debug(f"Deleted flow {flow_id} from cache") - flow_data_store[flow_id] = { + if flow_id in chat_service.cache_service: + chat_service.cache_service.delete(flow_id) + logger.debug(f"Deleted flow {flow_id} from cache") + cache_service[flow_id] = { "graph_data": graph_data, "status": BuildStatus.STARTED, + "user_id": current_user.id, } return InitResponse(flowId=flow_id) @@ -63,12 +103,14 @@ async def init_build(graph_data: dict, flow_id: str): @router.get("/build/{flow_id}/status", response_model=BuiltResponse) -async def build_status(flow_id: str): - """Check the flow_id is in the flow_data_store.""" +async def build_status( + flow_id: str, cache_service: "BaseCacheService" = Depends(get_cache_service) +): + """Check the flow_id is in the cache_service.""" try: built = ( - flow_id in flow_data_store - and flow_data_store[flow_id]["status"] == BuildStatus.SUCCESS + flow_id in cache_service + and cache_service[flow_id]["status"] == BuildStatus.SUCCESS ) return BuiltResponse( @@ -81,24 +123,29 @@ async def build_status(flow_id: str): @router.get("/build/stream/{flow_id}", response_class=StreamingResponse) -async def stream_build(flow_id: str): +async def stream_build( + flow_id: str, + chat_service: "ChatService" = Depends(get_chat_service), + cache_service: "BaseCacheService" = Depends(get_cache_service), +): """Stream the build process based on stored flow data.""" async def event_stream(flow_id): final_response = {"end_of_stream": True} artifacts = {} try: - if flow_id not in flow_data_store: + if flow_id not in cache_service: error_message = "Invalid session ID" yield str(StreamData(event="error", data={"error": error_message})) return - if flow_data_store[flow_id].get("status") == BuildStatus.IN_PROGRESS: + if cache_service[flow_id].get("status") == BuildStatus.IN_PROGRESS: error_message = "Already building" yield str(StreamData(event="error", data={"error": error_message})) return - graph_data = flow_data_store[flow_id].get("graph_data") + graph_data = cache_service[flow_id].get("graph_data") + cache_service[flow_id]["user_id"] if not graph_data: error_message = "No data provided" @@ -106,17 +153,12 @@ async def stream_build(flow_id: str): return logger.debug("Building langchain object") - try: - # Some error could happen when building the graph - graph = Graph.from_payload(graph_data) - except Exception as exc: - logger.exception(exc) - error_message = str(exc) - yield str(StreamData(event="error", data={"error": error_message})) - return + + # Some error could happen when building the graph + graph = Graph.from_payload(graph_data) number_of_nodes = len(graph.nodes) - flow_data_store[flow_id]["status"] = BuildStatus.IN_PROGRESS + update_build_status(cache_service, flow_id, BuildStatus.IN_PROGRESS) for i, vertex in enumerate(graph.generator_build(), 1): try: @@ -124,11 +166,16 @@ async def stream_build(flow_id: str): "log": f"Building node {vertex.vertex_type}", } yield str(StreamData(event="log", data=log_dict)) - vertex.build() + if vertex.is_task: + vertex = try_running_celery_task(vertex) + else: + vertex.build() params = vertex._built_object_repr() valid = True logger.debug(f"Building node {str(vertex.vertex_type)}") - logger.debug(f"Output: {params}") + logger.debug( + f"Output: {params[:100]}{'...' if len(params) > 100 else ''}" + ) if vertex.artifacts: # The artifacts will be prompt variables # passed to build_input_keys_response @@ -138,7 +185,7 @@ async def stream_build(flow_id: str): logger.exception(exc) params = str(exc) valid = False - flow_data_store[flow_id]["status"] = BuildStatus.FAILURE + update_build_status(cache_service, flow_id, BuildStatus.FAILURE) response = { "valid": valid, @@ -162,15 +209,15 @@ async def stream_build(flow_id: str): "handle_keys": [], } yield str(StreamData(event="message", data=input_keys_response)) - chat_manager = service_manager.get(ServiceType.CHAT_MANAGER) - chat_manager.set_cache(flow_id, langchain_object) + chat_service.set_cache(flow_id, langchain_object) # We need to reset the chat history - chat_manager.chat_history.empty_history(flow_id) - flow_data_store[flow_id]["status"] = BuildStatus.SUCCESS + chat_service.chat_history.empty_history(flow_id) + update_build_status(cache_service, flow_id, BuildStatus.SUCCESS) except Exception as exc: logger.exception(exc) logger.error("Error while building the flow: %s", exc) - flow_data_store[flow_id]["status"] = BuildStatus.FAILURE + + update_build_status(cache_service, flow_id, BuildStatus.FAILURE) yield str(StreamData(event="error", data={"error": str(exc)})) finally: yield str(StreamData(event="message", data=final_response)) @@ -180,3 +227,20 @@ async def stream_build(flow_id: str): except Exception as exc: logger.error(f"Error streaming build: {exc}") raise HTTPException(status_code=500, detail=str(exc)) + + +def try_running_celery_task(vertex): + # Try running the task in celery + # and set the task_id to the local vertex + # if it fails, run the task locally + try: + from langflow.worker import build_vertex + + task = build_vertex.delay(vertex) + vertex.task_id = task.id + except Exception as exc: + logger.exception(exc) + logger.error("Error running task in celery, running locally") + vertex.task_id = None + vertex.build() + return vertex diff --git a/src/backend/langflow/api/v1/components.py b/src/backend/langflow/api/v1/components.py index 4071461fb..d2b39dfd2 100644 --- a/src/backend/langflow/api/v1/components.py +++ b/src/backend/langflow/api/v1/components.py @@ -2,7 +2,7 @@ from datetime import timezone from typing import List from uuid import UUID from langflow.services.database.models.component import Component, ComponentModel -from langflow.services.utils import get_session +from langflow.services.getters import get_session from sqlmodel import Session, select from fastapi import APIRouter, Depends, HTTPException from sqlalchemy.exc import IntegrityError diff --git a/src/backend/langflow/api/v1/endpoints.py b/src/backend/langflow/api/v1/endpoints.py index 221091b7f..864467f9b 100644 --- a/src/backend/langflow/api/v1/endpoints.py +++ b/src/backend/langflow/api/v1/endpoints.py @@ -1,97 +1,103 @@ from http import HTTPStatus from typing import Annotated, Optional, Union +from langflow.services.auth.utils import api_key_security, get_current_active_user + from langflow.services.cache.utils import save_uploaded_file from langflow.services.database.models.flow import Flow from langflow.processing.process import process_graph_cached, process_tweaks -from langflow.services.utils import get_settings_manager -from langflow.utils.logger import logger -from fastapi import APIRouter, Depends, HTTPException, UploadFile, Body - +from langflow.services.database.models.user.user import User +from langflow.services.getters import ( + get_session_service, + get_settings_service, + get_task_service, +) +from loguru import logger +from fastapi import APIRouter, Depends, HTTPException, UploadFile, Body, status +import sqlalchemy as sa from langflow.interface.custom.custom_component import CustomComponent from langflow.api.v1.schemas import ( ProcessResponse, + TaskResponse, + TaskStatusResponse, UploadFileResponse, CustomComponentCode, ) -from langflow.api.utils import merge_nested_dicts_with_renaming -from langflow.interface.types import ( - build_langchain_types_dict, - build_langchain_template_custom_component, - build_langchain_custom_component_list_from_path, -) +from langflow.services.getters import get_session + +try: + from langflow.worker import process_graph_cached_task +except ImportError: + + def process_graph_cached_task(*args, **kwargs): + raise NotImplementedError("Celery is not installed") + -from langflow.services.utils import get_session from sqlmodel import Session + +from langflow.services.task.manager import TaskService + # build router router = APIRouter(tags=["Base"]) -@router.get("/all") -def get_all(): +@router.get("/all", dependencies=[Depends(get_current_active_user)]) +def get_all( + settings_service=Depends(get_settings_service), +): + from langflow.interface.types import get_all_types_dict + logger.debug("Building langchain types dict") - native_components = build_langchain_types_dict() - # custom_components is a list of dicts - # need to merge all the keys into one dict - custom_components_from_file = {} - settings_manager = get_settings_manager() - if settings_manager.settings.COMPONENTS_PATH: - logger.info( - f"Building custom components from {settings_manager.settings.COMPONENTS_PATH}" - ) - - custom_component_dicts = [] - processed_paths = [] - for path in settings_manager.settings.COMPONENTS_PATH: - if str(path) in processed_paths: - continue - custom_component_dict = build_langchain_custom_component_list_from_path( - str(path) - ) - custom_component_dicts.append(custom_component_dict) - processed_paths.append(str(path)) - - logger.info(f"Loading {len(custom_component_dicts)} category(ies)") - for custom_component_dict in custom_component_dicts: - # custom_component_dict is a dict of dicts - if not custom_component_dict: - continue - category = list(custom_component_dict.keys())[0] - logger.info( - f"Loading {len(custom_component_dict[category])} component(s) from category {category}" - ) - logger.debug(custom_component_dict) - custom_components_from_file = merge_nested_dicts_with_renaming( - custom_components_from_file, custom_component_dict - ) - - return merge_nested_dicts_with_renaming( - native_components, custom_components_from_file - ) + try: + return get_all_types_dict(settings_service) + except Exception as exc: + raise HTTPException(status_code=500, detail=str(exc)) from exc # For backwards compatibility we will keep the old endpoint -@router.post("/predict/{flow_id}", response_model=ProcessResponse) -@router.post("/process/{flow_id}", response_model=ProcessResponse) +@router.post( + "/predict/{flow_id}", + response_model=ProcessResponse, + dependencies=[Depends(api_key_security)], +) +@router.post( + "/process/{flow_id}", + response_model=ProcessResponse, +) async def process_flow( + session: Annotated[Session, Depends(get_session)], flow_id: str, inputs: Optional[dict] = None, tweaks: Optional[dict] = None, clear_cache: Annotated[bool, Body(embed=True)] = False, # noqa: F821 session_id: Annotated[Union[None, str], Body(embed=True)] = None, # noqa: F821 - session: Session = Depends(get_session), + task_service: "TaskService" = Depends(get_task_service), + api_key_user: User = Depends(api_key_security), + sync: Annotated[bool, Body(embed=True)] = True, # noqa: F821 ): """ Endpoint to process an input with a given flow_id. """ try: - flow = session.get(Flow, flow_id) + if api_key_user is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid API Key", + ) + + # Get the flow that matches the flow_id and belongs to the user + flow = ( + session.query(Flow) + .filter(Flow.id == flow_id) + .filter(Flow.user_id == api_key_user.id) + .first() + ) if flow is None: raise ValueError(f"Flow {flow_id} not found") @@ -103,16 +109,94 @@ async def process_flow( graph_data = process_tweaks(graph_data, tweaks) except Exception as exc: logger.error(f"Error processing tweaks: {exc}") - response, session_id = process_graph_cached( - graph_data, inputs, clear_cache, session_id + if sync: + task_id, result = await task_service.launch_and_await_task( + process_graph_cached_task + if task_service.use_celery + else process_graph_cached, + graph_data, + inputs, + clear_cache, + session_id, + ) + if isinstance(result, dict) and "result" in result: + task_result = result["result"] + session_id = result["session_id"] + elif hasattr(result, "result") and hasattr(result, "session_id"): + task_result = result.result + + session_id = result.session_id + else: + logger.warning( + "This is an experimental feature and may not work as expected." + "Please report any issues to our GitHub repository." + ) + if session_id is None: + # Generate a session ID + session_id = get_session_service().generate_key( + session_id=session_id, data_graph=graph_data + ) + task_id, task = await task_service.launch_task( + process_graph_cached_task + if task_service.use_celery + else process_graph_cached, + graph_data, + inputs, + clear_cache, + session_id, + ) + task_result = task.status + + if task_id: + task_response = TaskResponse(id=task_id, href=f"api/v1/task/{task_id}") + else: + task_response = None + + return ProcessResponse( + result=task_result, + task=task_response, + session_id=session_id, + backend=str(type(task_service.backend)), ) - return ProcessResponse(result=response, session_id=session_id) + except sa.exc.StatementError as exc: + # StatementError('(builtins.ValueError) badly formed hexadecimal UUID string') + if "badly formed hexadecimal UUID string" in str(exc): + # This means the Flow ID is not a valid UUID which means it can't find the flow + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=str(exc) + ) from exc + except ValueError as exc: + if f"Flow {flow_id} not found" in str(exc): + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=str(exc) + ) from exc + else: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc) + ) from exc except Exception as e: # Log stack trace logger.exception(e) raise HTTPException(status_code=500, detail=str(e)) from e +@router.get("/task/{task_id}", response_model=TaskStatusResponse) +async def get_task_status(task_id: str): + task_service = get_task_service() + task = task_service.get_task(task_id) + result = None + if task.ready(): + result = task.result + if isinstance(result, dict) and "result" in result: + result = result["result"] + elif hasattr(result, "result"): + result = result.result + + if task is None: + raise HTTPException(status_code=404, detail="Task not found") + return TaskStatusResponse(status=task.status, result=result) + + @router.post( "/upload/{flow_id}", response_model=UploadFileResponse, @@ -121,7 +205,7 @@ async def process_flow( async def create_upload_file(file: UploadFile, flow_id: str): # Cache file try: - file_path = save_uploaded_file(file.file, folder_name=flow_id) + file_path = save_uploaded_file(file, folder_name=flow_id) return UploadFileResponse( flowId=flow_id, @@ -144,6 +228,10 @@ def get_version(): async def custom_component( raw_code: CustomComponentCode, ): + from langflow.interface.types import ( + build_langchain_template_custom_component, + ) + extractor = CustomComponent(code=raw_code.code) extractor.is_check_valid() diff --git a/src/backend/langflow/api/v1/flows.py b/src/backend/langflow/api/v1/flows.py index 3145ced3c..9953db0db 100644 --- a/src/backend/langflow/api/v1/flows.py +++ b/src/backend/langflow/api/v1/flows.py @@ -1,30 +1,42 @@ from typing import List from uuid import UUID +from fastapi.encoders import jsonable_encoder + from langflow.api.utils import remove_api_keys from langflow.api.v1.schemas import FlowListCreate, FlowListRead +from langflow.services.auth.utils import get_current_active_user from langflow.services.database.models.flow import ( Flow, FlowCreate, FlowRead, FlowUpdate, ) -from langflow.services.utils import get_session -from langflow.services.utils import get_settings_manager -from sqlmodel import Session, select +from langflow.services.database.models.user.user import User +from langflow.services.getters import get_session +from langflow.services.getters import get_settings_service +import orjson +from sqlmodel import Session from fastapi import APIRouter, Depends, HTTPException -from fastapi.encoders import jsonable_encoder from fastapi import File, UploadFile -import json # build router router = APIRouter(prefix="/flows", tags=["Flows"]) @router.post("/", response_model=FlowRead, status_code=201) -def create_flow(*, session: Session = Depends(get_session), flow: FlowCreate): +def create_flow( + *, + session: Session = Depends(get_session), + flow: FlowCreate, + current_user: User = Depends(get_current_active_user), +): """Create a new flow.""" + if flow.user_id is None: + flow.user_id = current_user.id + db_flow = Flow.from_orm(flow) + session.add(db_flow) session.commit() session.refresh(db_flow) @@ -32,39 +44,58 @@ def create_flow(*, session: Session = Depends(get_session), flow: FlowCreate): @router.get("/", response_model=list[FlowRead], status_code=200) -def read_flows(*, session: Session = Depends(get_session)): +def read_flows( + *, + session: Session = Depends(get_session), + current_user: User = Depends(get_current_active_user), +): """Read all flows.""" try: - flows = session.exec(select(Flow)).all() + flows = current_user.flows except Exception as e: raise HTTPException(status_code=500, detail=str(e)) from e return [jsonable_encoder(flow) for flow in flows] @router.get("/{flow_id}", response_model=FlowRead, status_code=200) -def read_flow(*, session: Session = Depends(get_session), flow_id: UUID): +def read_flow( + *, + session: Session = Depends(get_session), + flow_id: UUID, + current_user: User = Depends(get_current_active_user), +): """Read a flow.""" - if flow := session.get(Flow, flow_id): - return flow + if user_flow := ( + session.query(Flow) + .filter(Flow.id == flow_id) + .filter(Flow.user_id == current_user.id) + .first() + ): + return user_flow else: raise HTTPException(status_code=404, detail="Flow not found") @router.patch("/{flow_id}", response_model=FlowRead, status_code=200) def update_flow( - *, session: Session = Depends(get_session), flow_id: UUID, flow: FlowUpdate + *, + session: Session = Depends(get_session), + flow_id: UUID, + flow: FlowUpdate, + current_user: User = Depends(get_current_active_user), + settings_service=Depends(get_settings_service), ): """Update a flow.""" - db_flow = session.get(Flow, flow_id) + db_flow = read_flow(session=session, flow_id=flow_id, current_user=current_user) if not db_flow: raise HTTPException(status_code=404, detail="Flow not found") flow_data = flow.dict(exclude_unset=True) - settings_manager = get_settings_manager() - if settings_manager.settings.REMOVE_API_KEYS: + if settings_service.settings.REMOVE_API_KEYS: flow_data = remove_api_keys(flow_data) for key, value in flow_data.items(): - setattr(db_flow, key, value) + if value is not None: + setattr(db_flow, key, value) session.add(db_flow) session.commit() session.refresh(db_flow) @@ -72,9 +103,14 @@ def update_flow( @router.delete("/{flow_id}", status_code=200) -def delete_flow(*, session: Session = Depends(get_session), flow_id: UUID): +def delete_flow( + *, + session: Session = Depends(get_session), + flow_id: UUID, + current_user: User = Depends(get_current_active_user), +): """Delete a flow.""" - flow = session.get(Flow, flow_id) + flow = read_flow(session=session, flow_id=flow_id, current_user=current_user) if not flow: raise HTTPException(status_code=404, detail="Flow not found") session.delete(flow) @@ -86,10 +122,16 @@ def delete_flow(*, session: Session = Depends(get_session), flow_id: UUID): @router.post("/batch/", response_model=List[FlowRead], status_code=201) -def create_flows(*, session: Session = Depends(get_session), flow_list: FlowListCreate): +def create_flows( + *, + session: Session = Depends(get_session), + flow_list: FlowListCreate, + current_user: User = Depends(get_current_active_user), +): """Create multiple new flows.""" db_flows = [] for flow in flow_list.flows: + flow.user_id = current_user.id db_flow = Flow.from_orm(flow) session.add(db_flow) db_flows.append(db_flow) @@ -101,20 +143,31 @@ def create_flows(*, session: Session = Depends(get_session), flow_list: FlowList @router.post("/upload/", response_model=List[FlowRead], status_code=201) async def upload_file( - *, session: Session = Depends(get_session), file: UploadFile = File(...) + *, + session: Session = Depends(get_session), + file: UploadFile = File(...), + current_user: User = Depends(get_current_active_user), ): """Upload flows from a file.""" contents = await file.read() - data = json.loads(contents) + data = orjson.loads(contents) if "flows" in data: flow_list = FlowListCreate(**data) else: flow_list = FlowListCreate(flows=[FlowCreate(**flow) for flow in data]) - return create_flows(session=session, flow_list=flow_list) + # Now we set the user_id for all flows + for flow in flow_list.flows: + flow.user_id = current_user.id + + return create_flows(session=session, flow_list=flow_list, current_user=current_user) @router.get("/download/", response_model=FlowListRead, status_code=200) -async def download_file(*, session: Session = Depends(get_session)): +async def download_file( + *, + session: Session = Depends(get_session), + current_user: User = Depends(get_current_active_user), +): """Download all flows as a file.""" - flows = read_flows(session=session) + flows = read_flows(session=session, current_user=current_user) return FlowListRead(flows=flows) diff --git a/src/backend/langflow/routers/login.py b/src/backend/langflow/api/v1/login.py similarity index 58% rename from src/backend/langflow/routers/login.py rename to src/backend/langflow/api/v1/login.py index 7d114473d..4dfc723b5 100644 --- a/src/backend/langflow/routers/login.py +++ b/src/backend/langflow/api/v1/login.py @@ -1,20 +1,20 @@ -from uuid import UUID -from sqlalchemy.orm import Session +from sqlmodel import Session from fastapi import APIRouter, Depends, HTTPException, status from fastapi.security import OAuth2PasswordRequestForm -from langflow.services.utils import get_session -from langflow.database.models.token import Token -from langflow.auth.auth import ( +from langflow.services.getters import get_session +from langflow.api.v1.schemas import Token +from langflow.services.auth.utils import ( authenticate_user, create_user_tokens, create_refresh_token, create_user_longterm_token, + get_current_active_user, ) -from langflow.services.utils import get_settings_manager +from langflow.services.getters import get_settings_service -router = APIRouter() +router = APIRouter(tags=["Login"]) @router.post("/login", response_model=Token) @@ -23,7 +23,17 @@ async def login_to_get_access_token( db: Session = Depends(get_session), # _: Session = Depends(get_current_active_user) ): - if user := authenticate_user(form_data.username, form_data.password, db): + try: + user = authenticate_user(form_data.username, form_data.password, db) + except Exception as exc: + if isinstance(exc, HTTPException): + raise exc + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=str(exc), + ) from exc + + if user: return create_user_tokens(user_id=user.id, db=db, update_last_login=True) else: raise HTTPException( @@ -34,12 +44,11 @@ async def login_to_get_access_token( @router.get("/auto_login") -async def auto_login(db: Session = Depends(get_session)): - settings_manager = get_settings_manager() - - if settings_manager.settings.AUTO_LOGIN: - user_id = UUID("3fa85f64-5717-4562-b3fc-2c963f66afa6") - return create_user_longterm_token(user_id, db) +async def auto_login( + db: Session = Depends(get_session), settings_service=Depends(get_settings_service) +): + if settings_service.auth_settings.AUTO_LOGIN: + return create_user_longterm_token(db) raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, @@ -51,7 +60,9 @@ async def auto_login(db: Session = Depends(get_session)): @router.post("/refresh") -async def refresh_token(token: str): +async def refresh_token( + token: str, current_user: Session = Depends(get_current_active_user) +): if token: return create_refresh_token(token) else: diff --git a/src/backend/langflow/api/v1/schemas.py b/src/backend/langflow/api/v1/schemas.py index d38c32d26..4e05b5038 100644 --- a/src/backend/langflow/api/v1/schemas.py +++ b/src/backend/langflow/api/v1/schemas.py @@ -1,9 +1,18 @@ from enum import Enum from pathlib import Path from typing import Any, Dict, List, Optional, Union +from uuid import UUID +from langflow.services.database.models.api_key.api_key import ApiKeyRead from langflow.services.database.models.flow import FlowCreate, FlowRead +<<<<<<< HEAD from pydantic import BaseModel, Field, field_validator import json +======= +from langflow.services.database.models.user import UserRead +from langflow.services.database.models.base import orjson_dumps + +from pydantic import BaseModel, Field, validator +>>>>>>> origin/dev class BuildStatus(Enum): @@ -43,11 +52,30 @@ class UpdateTemplateRequest(BaseModel): template: dict +class TaskResponse(BaseModel): + """Task response schema.""" + + id: Optional[str] = Field(None) + href: Optional[str] = Field(None) + + class ProcessResponse(BaseModel): """Process response schema.""" - result: dict + result: Any + task: Optional[TaskResponse] = None session_id: Optional[str] = None + backend: Optional[str] = None + + +# TaskStatusResponse( +# status=task.status, result=task.result if task.ready() else None +# ) +class TaskStatusResponse(BaseModel): + """Task status response schema.""" + + status: str + result: Optional[Any] = None class ChatMessage(BaseModel): @@ -118,7 +146,9 @@ class StreamData(BaseModel): data: dict def __str__(self) -> str: - return f"event: {self.event}\ndata: {json.dumps(self.data)}\n\n" + return ( + f"event: {self.event}\ndata: {orjson_dumps(self.data, indent_2=False)}\n\n" + ) class CustomComponentCode(BaseModel): @@ -136,3 +166,32 @@ class ComponentListCreate(BaseModel): class ComponentListRead(BaseModel): flows: List[FlowRead] + + +class UsersResponse(BaseModel): + total_count: int + users: List[UserRead] + + +class ApiKeyResponse(BaseModel): + id: str + api_key: str + name: str + created_at: str + last_used_at: str + + +class ApiKeysResponse(BaseModel): + total_count: int + user_id: UUID + api_keys: List[ApiKeyRead] + + +class CreateApiKeyRequest(BaseModel): + name: str + + +class Token(BaseModel): + access_token: str + refresh_token: str + token_type: str diff --git a/src/backend/langflow/api/v1/users.py b/src/backend/langflow/api/v1/users.py new file mode 100644 index 000000000..73c7346d9 --- /dev/null +++ b/src/backend/langflow/api/v1/users.py @@ -0,0 +1,169 @@ +from uuid import UUID +from langflow.api.v1.schemas import UsersResponse +from langflow.services.database.models.user import ( + User, + UserCreate, + UserRead, + UserUpdate, +) + +from sqlalchemy import func +from sqlalchemy.exc import IntegrityError + +from sqlmodel import Session, select +from fastapi import APIRouter, Depends, HTTPException + +from langflow.services.getters import get_session, get_settings_service +from langflow.services.auth.utils import ( + get_current_active_superuser, + get_current_active_user, + get_password_hash, + verify_password, +) +from langflow.services.database.models.user.crud import ( + get_user_by_id, + update_user, +) + +router = APIRouter(tags=["Users"], prefix="/users") + + +@router.post("/", response_model=UserRead, status_code=201) +def add_user( + user: UserCreate, + session: Session = Depends(get_session), + settings_service=Depends(get_settings_service), +) -> User: + """ + Add a new user to the database. + """ + new_user = User.from_orm(user) + try: + new_user.password = get_password_hash(user.password) + new_user.is_active = settings_service.auth_settings.NEW_USER_IS_ACTIVE + session.add(new_user) + session.commit() + session.refresh(new_user) + except IntegrityError as e: + session.rollback() + raise HTTPException( + status_code=400, detail="This username is unavailable." + ) from e + + return new_user + + +@router.get("/whoami", response_model=UserRead) +def read_current_user( + current_user: User = Depends(get_current_active_user), +) -> User: + """ + Retrieve the current user's data. + """ + return current_user + + +@router.get("/", response_model=UsersResponse) +def read_all_users( + skip: int = 0, + limit: int = 10, + _: Session = Depends(get_current_active_superuser), + session: Session = Depends(get_session), +) -> UsersResponse: + """ + Retrieve a list of users from the database with pagination. + """ + query = select(User).offset(skip).limit(limit) + users = session.execute(query).fetchall() + + count_query = select(func.count()).select_from(User) # type: ignore + total_count = session.execute(count_query).scalar() + + return UsersResponse( + total_count=total_count, # type: ignore + users=[UserRead(**dict(user.User)) for user in users], + ) + + +@router.patch("/{user_id}", response_model=UserRead) +def patch_user( + user_id: UUID, + user_update: UserUpdate, + user: User = Depends(get_current_active_user), + session: Session = Depends(get_session), +) -> User: + """ + Update an existing user's data. + """ + if not user.is_superuser and user.id != user_id: + raise HTTPException( + status_code=403, detail="You don't have the permission to update this user" + ) + if user_update.password: + if not user.is_superuser: + raise HTTPException( + status_code=400, detail="You can't change your password here" + ) + user_update.password = get_password_hash(user_update.password) + + if user_db := get_user_by_id(session, user_id): + return update_user(user_db, user_update, session) + else: + raise HTTPException(status_code=404, detail="User not found") + + +@router.patch("/{user_id}/reset-password", response_model=UserRead) +def reset_password( + user_id: UUID, + user_update: UserUpdate, + user: User = Depends(get_current_active_user), + session: Session = Depends(get_session), +) -> User: + """ + Reset a user's password. + """ + if user_id != user.id: + raise HTTPException( + status_code=400, detail="You can't change another user's password" + ) + + if not user: + raise HTTPException(status_code=404, detail="User not found") + if verify_password(user_update.password, user.password): + raise HTTPException( + status_code=400, detail="You can't use your current password" + ) + new_password = get_password_hash(user_update.password) + user.password = new_password + session.commit() + session.refresh(user) + + return user + + +@router.delete("/{user_id}", response_model=dict) +def delete_user( + user_id: UUID, + current_user: User = Depends(get_current_active_superuser), + session: Session = Depends(get_session), +) -> dict: + """ + Delete a user from the database. + """ + if current_user.id == user_id: + raise HTTPException( + status_code=400, detail="You can't delete your own user account" + ) + elif not current_user.is_superuser: + raise HTTPException( + status_code=403, detail="You don't have the permission to delete this user" + ) + + user_db = session.query(User).filter(User.id == user_id).first() + if not user_db: + raise HTTPException(status_code=404, detail="User not found") + + session.delete(user_db) + session.commit() + + return {"detail": "User deleted"} diff --git a/src/backend/langflow/api/v1/validate.py b/src/backend/langflow/api/v1/validate.py index 2a5bdd673..65fb66bd2 100644 --- a/src/backend/langflow/api/v1/validate.py +++ b/src/backend/langflow/api/v1/validate.py @@ -8,7 +8,7 @@ from langflow.api.v1.base import ( validate_prompt, ) from langflow.template.field.base import TemplateField -from langflow.utils.logger import logger +from loguru import logger from langflow.utils.validate import validate_code # build router @@ -31,7 +31,12 @@ def post_validate_code(code: Code): def post_validate_prompt(prompt_request: ValidatePromptRequest): try: input_variables = validate_prompt(prompt_request.template) - + # Check if frontend_node is None before proceeding to avoid attempting to update a non-existent node. + if prompt_request.frontend_node is None: + return PromptValidationResponse( + input_variables=input_variables, + frontend_node=None, + ) old_custom_fields = get_old_custom_fields(prompt_request) add_new_variables_to_template(input_variables, prompt_request) @@ -53,6 +58,16 @@ def post_validate_prompt(prompt_request: ValidatePromptRequest): def get_old_custom_fields(prompt_request): try: + if ( + len(prompt_request.frontend_node.custom_fields) == 1 + and prompt_request.name == "" + ): + # If there is only one custom field and the name is empty string + # then we are dealing with the first prompt request after the node was created + prompt_request.name = list( + prompt_request.frontend_node.custom_fields.keys() + )[0] + old_custom_fields = prompt_request.frontend_node.custom_fields[ prompt_request.name ].copy() diff --git a/src/backend/langflow/auth/auth.py b/src/backend/langflow/auth/auth.py deleted file mode 100644 index b9e8dba3a..000000000 --- a/src/backend/langflow/auth/auth.py +++ /dev/null @@ -1,177 +0,0 @@ -from uuid import UUID -from typing import Annotated -from jose import JWTError, jwt -from sqlalchemy.orm import Session -from passlib.context import CryptContext -from fastapi.security import OAuth2PasswordBearer -from fastapi import Depends, HTTPException, status -from datetime import datetime, timedelta, timezone - -from langflow.services.utils import get_settings_manager - -from langflow.services.utils import get_session -from langflow.database.models.user import ( - User, - get_user_by_id, - get_user_by_username, - update_user_last_login_at, -) - - -pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") -oauth2_scheme = OAuth2PasswordBearer(tokenUrl="login") - - -async def get_current_user( - token: Annotated[str, Depends(oauth2_scheme)], db: Session = Depends(get_session) -) -> User: - settings_manager = get_settings_manager() - - credentials_exception = HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Could not validate credentials", - headers={"WWW-Authenticate": "Bearer"}, - ) - - try: - payload = jwt.decode( - token, - settings_manager.settings.SECRET_KEY, - algorithms=[settings_manager.settings.ALGORITHM], - ) - user_id: UUID = payload.get("sub") # type: ignore - token_type: str = payload.get("type") # type: ignore - - if user_id is None or token_type: - raise credentials_exception - except JWTError as e: - raise credentials_exception from e - - user = get_user_by_id(db, user_id) # type: ignore - if user is None: - raise credentials_exception - return user - - -async def get_current_active_user( - current_user: Annotated[User, Depends(get_current_user)] -): - if not current_user.is_active: - raise HTTPException(status_code=400, detail="Inactive user") - return current_user - - -def verify_password(plain_password, hashed_password): - return pwd_context.verify(plain_password, hashed_password) - - -def get_password_hash(password): - return pwd_context.hash(password) - - -def create_token(data: dict, expires_delta: timedelta): - settings_manager = get_settings_manager() - - to_encode = data.copy() - expire = datetime.now(timezone.utc) + expires_delta - to_encode["exp"] = expire - - return jwt.encode( - to_encode, - settings_manager.settings.SECRET_KEY, - algorithm=settings_manager.settings.ALGORITHM, - ) - - -def create_user_longterm_token( - user_id: UUID, db: Session = Depends(get_session), update_last_login: bool = False -) -> dict: - access_token_expires_longterm = timedelta(days=365) - access_token = create_token( - data={"sub": str(user_id)}, - expires_delta=access_token_expires_longterm, - ) - - # Update: last_login_at - if update_last_login: - update_user_last_login_at(user_id, db) - - return { - "access_token": access_token, - "refresh_token": None, - "token_type": "bearer", - } - - -def create_user_tokens( - user_id: UUID, db: Session = Depends(get_session), update_last_login: bool = False -) -> dict: - settings_manager = get_settings_manager() - - access_token_expires = timedelta( - minutes=settings_manager.settings.ACCESS_TOKEN_EXPIRE_MINUTES - ) - access_token = create_token( - data={"sub": str(user_id)}, - expires_delta=access_token_expires, - ) - - refresh_token_expires = timedelta( - minutes=settings_manager.settings.REFRESH_TOKEN_EXPIRE_MINUTES - ) - refresh_token = create_token( - data={"sub": str(user_id), "type": "rf"}, - expires_delta=refresh_token_expires, - ) - - # Update: last_login_at - if update_last_login: - update_user_last_login_at(user_id, db) - - return { - "access_token": access_token, - "refresh_token": refresh_token, - "token_type": "bearer", - } - - -def create_refresh_token(refresh_token: str, db: Session = Depends(get_session)): - settings_manager = get_settings_manager() - - try: - payload = jwt.decode( - refresh_token, - settings_manager.settings.SECRET_KEY, - algorithms=[settings_manager.settings.ALGORITHM], - ) - user_id: UUID = payload.get("sub") # type: ignore - token_type: str = payload.get("type") # type: ignore - - if user_id is None or token_type is None: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid refresh token" - ) - - return create_user_tokens(user_id, db) - - except JWTError as e: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="Invalid refresh token", - ) from e - - -def authenticate_user( - username: str, password: str, db: Session = Depends(get_session) -) -> User | None: - user = get_user_by_username(db, username) - - if not user: - return None - - if not user.is_active: - if not user.last_login_at: - raise HTTPException(status_code=400, detail="Waiting for approval") - raise HTTPException(status_code=400, detail="Inactive user") - - return user if verify_password(password, user.password) else None diff --git a/src/backend/langflow/components/agents/OpenAIConversationalAgent.py b/src/backend/langflow/components/agents/OpenAIConversationalAgent.py index e2b876978..ff1a993b1 100644 --- a/src/backend/langflow/components/agents/OpenAIConversationalAgent.py +++ b/src/backend/langflow/components/agents/OpenAIConversationalAgent.py @@ -42,8 +42,8 @@ class ConversationalAgent(CustomComponent): self, model_name: str, openai_api_key: str, - openai_api_base: str, tools: Tool, + openai_api_base: Optional[str] = None, memory: Optional[BaseMemory] = None, system_message: Optional[SystemMessagePromptTemplate] = None, max_token_limit: int = 2000, diff --git a/src/backend/langflow/components/chains/PromptRunner.py b/src/backend/langflow/components/chains/PromptRunner.py index 141941c38..db9283b07 100644 --- a/src/backend/langflow/components/chains/PromptRunner.py +++ b/src/backend/langflow/components/chains/PromptRunner.py @@ -16,17 +16,14 @@ class PromptRunner(CustomComponent): "info": "Make sure the prompt has all variables filled.", }, "code": {"show": False}, - "inputs": {"field_type": "code"}, } def build( - self, - llm: BaseLLM, - prompt: PromptTemplate, + self, llm: BaseLLM, prompt: PromptTemplate, inputs: dict = {} ) -> Document: chain = prompt | llm # The input is an empty dict because the prompt is already filled - result = chain.invoke({}) + result = chain.invoke(input=inputs) if hasattr(result, "content"): result = result.content self.repr_value = result diff --git a/src/backend/langflow/components/llms/HuggingFaceEndpoints.py b/src/backend/langflow/components/llms/HuggingFaceEndpoints.py new file mode 100644 index 000000000..ea2b4f20b --- /dev/null +++ b/src/backend/langflow/components/llms/HuggingFaceEndpoints.py @@ -0,0 +1,42 @@ +from typing import Optional +from langflow import CustomComponent +from langchain.llms import HuggingFaceEndpoint +from langchain.llms.base import BaseLLM + + +class HuggingFaceEndpointsComponent(CustomComponent): + display_name: str = "Hugging Face Inference API" + description: str = "LLM model from Hugging Face Inference API." + + def build_config(self): + return { + "endpoint_url": {"display_name": "Endpoint URL", "password": True}, + "task": { + "display_name": "Task", + "type": "select", + "options": ["text2text-generation", "text-generation", "summarization"], + }, + "huggingfacehub_api_token": {"display_name": "API token", "password": True}, + "model_kwargs": { + "display_name": "Model Keyword Arguments", + "field_type": "code", + }, + "code": {"show": False}, + } + + def build( + self, + endpoint_url: str, + task="text2text-generation", + huggingfacehub_api_token: Optional[str] = None, + model_kwargs: Optional[dict] = None, + ) -> BaseLLM: + try: + output = HuggingFaceEndpoint( + endpoint_url=endpoint_url, + task=task, + huggingfacehub_api_token=huggingfacehub_api_token, + ) + except Exception as e: + raise ValueError("Could not connect to HuggingFace Endpoints API.") from e + return output diff --git a/src/backend/langflow/auth/__init__.py b/src/backend/langflow/components/llms/__init__.py similarity index 100% rename from src/backend/langflow/auth/__init__.py rename to src/backend/langflow/components/llms/__init__.py diff --git a/src/backend/langflow/components/retrievers/MetalRetriever.py b/src/backend/langflow/components/retrievers/MetalRetriever.py new file mode 100644 index 000000000..b105cd24f --- /dev/null +++ b/src/backend/langflow/components/retrievers/MetalRetriever.py @@ -0,0 +1,28 @@ +from typing import Optional +from langflow import CustomComponent +from langchain.retrievers import MetalRetriever +from langchain.schema import BaseRetriever +from metal_sdk.metal import Metal # type: ignore + + +class MetalRetrieverComponent(CustomComponent): + display_name: str = "Metal Retriever" + description: str = "Retriever that uses the Metal API." + + def build_config(self): + return { + "api_key": {"display_name": "API Key", "password": True}, + "client_id": {"display_name": "Client ID", "password": True}, + "index_id": {"display_name": "Index ID"}, + "params": {"display_name": "Parameters"}, + "code": {"show": False}, + } + + def build( + self, api_key: str, client_id: str, index_id: str, params: Optional[dict] = None + ) -> BaseRetriever: + try: + metal = Metal(api_key=api_key, client_id=client_id, index_id=index_id) + except Exception as e: + raise ValueError("Could not connect to Metal API.") from e + return MetalRetriever(client=metal, params=params or {}) diff --git a/src/backend/langflow/routers/__init__.py b/src/backend/langflow/components/retrievers/__init__.py similarity index 100% rename from src/backend/langflow/routers/__init__.py rename to src/backend/langflow/components/retrievers/__init__.py diff --git a/src/backend/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py b/src/backend/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py new file mode 100644 index 000000000..636eb427e --- /dev/null +++ b/src/backend/langflow/components/textsplitters/LanguageRecursiveTextSplitter.py @@ -0,0 +1,80 @@ +from typing import Optional +from langflow import CustomComponent +from langchain.text_splitter import Language +from langchain.schema import Document + + +class LanguageRecursiveTextSplitterComponent(CustomComponent): + display_name: str = "Language Recursive Text Splitter" + description: str = "Split text into chunks of a specified length based on language." + documentation: str = "https://docs.langflow.org/components/text-splitters#languagerecursivetextsplitter" + + def build_config(self): + options = [x.value for x in Language] + return { + "documents": { + "display_name": "Documents", + "info": "The documents to split.", + }, + "separator_type": { + "display_name": "Separator Type", + "info": "The type of separator to use.", + "field_type": "str", + "options": options, + "value": "Python", + }, + "separators": { + "display_name": "Separators", + "info": "The characters to split on.", + "is_list": True, + }, + "chunk_size": { + "display_name": "Chunk Size", + "info": "The maximum length of each chunk.", + "field_type": "int", + "value": 1000, + }, + "chunk_overlap": { + "display_name": "Chunk Overlap", + "info": "The amount of overlap between chunks.", + "field_type": "int", + "value": 200, + }, + "code": {"show": False}, + } + + def build( + self, + documents: list[Document], + chunk_size: Optional[int] = 1000, + chunk_overlap: Optional[int] = 200, + separator_type: Optional[str] = "Python", + ) -> list[Document]: + """ + Split text into chunks of a specified length. + + Args: + separators (list[str]): The characters to split on. + chunk_size (int): The maximum length of each chunk. + chunk_overlap (int): The amount of overlap between chunks. + length_function (function): The function to use to calculate the length of the text. + + Returns: + list[str]: The chunks of text. + """ + from langchain.text_splitter import RecursiveCharacterTextSplitter + + # Make sure chunk_size and chunk_overlap are ints + if isinstance(chunk_size, str): + chunk_size = int(chunk_size) + if isinstance(chunk_overlap, str): + chunk_overlap = int(chunk_overlap) + + splitter = RecursiveCharacterTextSplitter.from_language( + language=Language(separator_type), + chunk_size=chunk_size, + chunk_overlap=chunk_overlap, + ) + + docs = splitter.split_documents(documents) + return docs diff --git a/src/backend/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py b/src/backend/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py new file mode 100644 index 000000000..751d2518c --- /dev/null +++ b/src/backend/langflow/components/textsplitters/RecursiveCharacterTextSplitter.py @@ -0,0 +1,79 @@ +from typing import Optional +from langflow import CustomComponent +from langchain.schema import Document +from langflow.utils.util import build_loader_repr_from_documents + + +class RecursiveCharacterTextSplitterComponent(CustomComponent): + display_name: str = "Recursive Character Text Splitter" + description: str = "Split text into chunks of a specified length." + documentation: str = "https://docs.langflow.org/components/text-splitters#recursivecharactertextsplitter" + + def build_config(self): + return { + "documents": { + "display_name": "Documents", + "info": "The documents to split.", + }, + "separators": { + "display_name": "Separators", + "info": 'The characters to split on.\nIf left empty defaults to ["\\n\\n", "\\n", " ", ""].', + "is_list": True, + }, + "chunk_size": { + "display_name": "Chunk Size", + "info": "The maximum length of each chunk.", + "field_type": "int", + "value": 1000, + }, + "chunk_overlap": { + "display_name": "Chunk Overlap", + "info": "The amount of overlap between chunks.", + "field_type": "int", + "value": 200, + }, + "code": {"show": False}, + } + + def build( + self, + documents: list[Document], + separators: Optional[list[str]] = None, + chunk_size: Optional[int] = 1000, + chunk_overlap: Optional[int] = 200, + ) -> list[Document]: + """ + Split text into chunks of a specified length. + + Args: + separators (list[str]): The characters to split on. + chunk_size (int): The maximum length of each chunk. + chunk_overlap (int): The amount of overlap between chunks. + length_function (function): The function to use to calculate the length of the text. + + Returns: + list[str]: The chunks of text. + """ + from langchain.text_splitter import RecursiveCharacterTextSplitter + + if separators == "": + separators = None + elif separators: + # check if the separators list has escaped characters + # if there are escaped characters, unescape them + separators = [x.encode().decode("unicode-escape") for x in separators] + + # Make sure chunk_size and chunk_overlap are ints + if isinstance(chunk_size, str): + chunk_size = int(chunk_size) + if isinstance(chunk_overlap, str): + chunk_overlap = int(chunk_overlap) + splitter = RecursiveCharacterTextSplitter( + separators=separators, + chunk_size=chunk_size, + chunk_overlap=chunk_overlap, + ) + + docs = splitter.split_documents(documents) + self.repr_value = build_loader_repr_from_documents(docs) + return docs diff --git a/src/backend/langflow/components/textsplitters/__init__.py b/src/backend/langflow/components/textsplitters/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/components/utilities/GetRequest.py b/src/backend/langflow/components/utilities/GetRequest.py new file mode 100644 index 000000000..d5df32cca --- /dev/null +++ b/src/backend/langflow/components/utilities/GetRequest.py @@ -0,0 +1,75 @@ +from langflow import CustomComponent +from langchain.schema import Document +from langflow.services.database.models.base import orjson_dumps +import requests +from typing import Optional + + +class GetRequest(CustomComponent): + display_name: str = "GET Request" + description: str = "Make a GET request to the given URL." + output_types: list[str] = ["Document"] + documentation: str = "https://docs.langflow.org/components/utilities#get-request" + beta = True + field_config = { + "url": { + "display_name": "URL", + "info": "The URL to make the request to", + "is_list": True, + }, + "headers": { + "display_name": "Headers", + "info": "The headers to send with the request.", + }, + "code": {"show": False}, + "timeout": { + "display_name": "Timeout", + "field_type": "int", + "info": "The timeout to use for the request.", + "value": 5, + }, + } + + def get_document( + self, session: requests.Session, url: str, headers: Optional[dict], timeout: int + ) -> Document: + try: + response = session.get(url, headers=headers, timeout=int(timeout)) + try: + response_json = response.json() + result = orjson_dumps(response_json, indent_2=False) + except Exception: + result = response.text + self.repr_value = result + return Document( + page_content=result, + metadata={ + "source": url, + "headers": headers, + "status_code": response.status_code, + }, + ) + except requests.Timeout: + return Document( + page_content="Request Timed Out", + metadata={"source": url, "headers": headers, "status_code": 408}, + ) + except Exception as exc: + return Document( + page_content=str(exc), + metadata={"source": url, "headers": headers, "status_code": 500}, + ) + + def build( + self, + url: str, + headers: Optional[dict] = None, + timeout: int = 5, + ) -> list[Document]: + if headers is None: + headers = {} + urls = url if isinstance(url, list) else [url] + with requests.Session() as session: + documents = [self.get_document(session, u, headers, timeout) for u in urls] + self.repr_value = documents + return documents diff --git a/src/backend/langflow/components/utilities/JSONDocumentBuilder.py b/src/backend/langflow/components/utilities/JSONDocumentBuilder.py new file mode 100644 index 000000000..26a2afd94 --- /dev/null +++ b/src/backend/langflow/components/utilities/JSONDocumentBuilder.py @@ -0,0 +1,55 @@ +### JSON Document Builder + +# Build a Document containing a JSON object using a key and another Document page content. + +# **Params** + +# - **Key:** The key to use for the JSON object. +# - **Document:** The Document page to use for the JSON object. + +# **Output** + +# - **Document:** The Document containing the JSON object. + +from langflow import CustomComponent +from langchain.schema import Document +from langflow.services.database.models.base import orjson_dumps + + +class JSONDocumentBuilder(CustomComponent): + display_name: str = "JSON Document Builder" + description: str = "Build a Document containing a JSON object using a key and another Document page content." + output_types: list[str] = ["Document"] + beta = True + documentation: str = ( + "https://docs.langflow.org/components/utilities#json-document-builder" + ) + + field_config = { + "key": {"display_name": "Key"}, + "document": {"display_name": "Document"}, + } + + def build( + self, + key: str, + document: Document, + ) -> Document: + documents = None + if isinstance(document, list): + documents = [ + Document( + page_content=orjson_dumps({key: doc.page_content}, indent_2=False) + ) + for doc in document + ] + elif isinstance(document, Document): + documents = Document( + page_content=orjson_dumps({key: document.page_content}, indent_2=False) + ) + else: + raise TypeError( + f"Expected Document or list of Documents, got {type(document)}" + ) + self.repr_value = documents + return documents diff --git a/src/backend/langflow/components/utilities/PostRequest.py b/src/backend/langflow/components/utilities/PostRequest.py new file mode 100644 index 000000000..6857f4866 --- /dev/null +++ b/src/backend/langflow/components/utilities/PostRequest.py @@ -0,0 +1,80 @@ +from langflow import CustomComponent +from langchain.schema import Document +from langflow.services.database.models.base import orjson_dumps +import requests +from typing import Optional + + +class PostRequest(CustomComponent): + display_name: str = "POST Request" + description: str = "Make a POST request to the given URL." + output_types: list[str] = ["Document"] + documentation: str = "https://docs.langflow.org/components/utilities#post-request" + beta = True + field_config = { + "url": {"display_name": "URL", "info": "The URL to make the request to."}, + "headers": { + "display_name": "Headers", + "info": "The headers to send with the request.", + }, + "code": {"show": False}, + "document": {"display_name": "Document"}, + } + + def post_document( + self, + session: requests.Session, + document: Document, + url: str, + headers: Optional[dict] = None, + ) -> Document: + try: + response = session.post(url, headers=headers, data=document.page_content) + try: + response_json = response.json() + result = orjson_dumps(response_json, indent_2=False) + except Exception: + result = response.text + self.repr_value = result + return Document( + page_content=result, + metadata={ + "source": url, + "headers": headers, + "status_code": response, + }, + ) + except Exception as exc: + return Document( + page_content=str(exc), + metadata={ + "source": url, + "headers": headers, + "status_code": 500, + }, + ) + + def build( + self, + document: Document, + url: str, + headers: Optional[dict] = None, + ) -> list[Document]: + if headers is None: + headers = {} + + if not isinstance(document, list) and isinstance(document, Document): + documents: list[Document] = [document] + elif isinstance(document, list) and all( + isinstance(doc, Document) for doc in document + ): + documents = document + else: + raise ValueError("document must be a Document or a list of Documents") + + with requests.Session() as session: + documents = [ + self.post_document(session, doc, url, headers) for doc in documents + ] + self.repr_value = documents + return documents diff --git a/src/backend/langflow/components/utilities/UpdateRequest.py b/src/backend/langflow/components/utilities/UpdateRequest.py new file mode 100644 index 000000000..d18c94a56 --- /dev/null +++ b/src/backend/langflow/components/utilities/UpdateRequest.py @@ -0,0 +1,94 @@ +from typing import List, Optional +import requests +from langflow import CustomComponent +from langchain.schema import Document +from langflow.services.database.models.base import orjson_dumps + + +class UpdateRequest(CustomComponent): + display_name: str = "Update Request" + description: str = "Make a PATCH request to the given URL." + output_types: list[str] = ["Document"] + documentation: str = "https://docs.langflow.org/components/utilities#update-request" + beta = True + field_config = { + "url": {"display_name": "URL", "info": "The URL to make the request to."}, + "headers": { + "display_name": "Headers", + "field_type": "NestedDict", + "info": "The headers to send with the request.", + }, + "code": {"show": False}, + "document": {"display_name": "Document"}, + "method": { + "display_name": "Method", + "field_type": "str", + "info": "The HTTP method to use.", + "options": ["PATCH", "PUT"], + "value": "PATCH", + }, + } + + def update_document( + self, + session: requests.Session, + document: Document, + url: str, + headers: Optional[dict] = None, + method: str = "PATCH", + ) -> Document: + try: + if method == "PATCH": + response = session.patch( + url, headers=headers, data=document.page_content + ) + elif method == "PUT": + response = session.put(url, headers=headers, data=document.page_content) + else: + raise ValueError(f"Unsupported method: {method}") + try: + response_json = response.json() + result = orjson_dumps(response_json, indent_2=False) + except Exception: + result = response.text + self.repr_value = result + return Document( + page_content=result, + metadata={ + "source": url, + "headers": headers, + "status_code": response.status_code, + }, + ) + except Exception as exc: + return Document( + page_content=str(exc), + metadata={"source": url, "headers": headers, "status_code": 500}, + ) + + def build( + self, + method: str, + document: Document, + url: str, + headers: Optional[dict] = None, + ) -> List[Document]: + if headers is None: + headers = {} + + if not isinstance(document, list) and isinstance(document, Document): + documents: list[Document] = [document] + elif isinstance(document, list) and all( + isinstance(doc, Document) for doc in document + ): + documents = document + else: + raise ValueError("document must be a Document or a list of Documents") + + with requests.Session() as session: + documents = [ + self.update_document(session, doc, url, headers, method) + for doc in documents + ] + self.repr_value = documents + return documents diff --git a/src/backend/langflow/components/vectorstores/Chroma.py b/src/backend/langflow/components/vectorstores/Chroma.py new file mode 100644 index 000000000..3cfd4771e --- /dev/null +++ b/src/backend/langflow/components/vectorstores/Chroma.py @@ -0,0 +1,109 @@ +from typing import Optional, Union +from langflow import CustomComponent + +from langchain.vectorstores import Chroma +from langchain.schema import Document +from langchain.vectorstores.base import VectorStore +from langchain.schema import BaseRetriever +from langchain.embeddings.base import Embeddings +import chromadb # type: ignore + + +class ChromaComponent(CustomComponent): + """ + A custom component for implementing a Vector Store using Chroma. + """ + + display_name: str = "Chroma (Custom Component)" + description: str = "Implementation of Vector Store using Chroma" + documentation = "https://python.langchain.com/docs/integrations/vectorstores/chroma" + beta = True + + def build_config(self): + """ + Builds the configuration for the component. + + Returns: + - dict: A dictionary containing the configuration options for the component. + """ + return { + "collection_name": {"display_name": "Collection Name", "value": "langflow"}, + "persist": {"display_name": "Persist"}, + "persist_directory": {"display_name": "Persist Directory"}, + "code": {"show": False, "display_name": "Code"}, + "documents": {"display_name": "Documents", "is_list": True}, + "embedding": {"display_name": "Embedding"}, + "chroma_server_cors_allow_origins": { + "display_name": "Server CORS Allow Origins", + "advanced": True, + }, + "chroma_server_host": {"display_name": "Server Host", "advanced": True}, + "chroma_server_port": {"display_name": "Server Port", "advanced": True}, + "chroma_server_grpc_port": { + "display_name": "Server gRPC Port", + "advanced": True, + }, + "chroma_server_ssl_enabled": { + "display_name": "Server SSL Enabled", + "advanced": True, + }, + } + + def build( + self, + collection_name: str, + persist: bool, + chroma_server_ssl_enabled: bool, + persist_directory: Optional[str] = None, + embedding: Optional[Embeddings] = None, + documents: Optional[Document] = None, + chroma_server_cors_allow_origins: Optional[str] = None, + chroma_server_host: Optional[str] = None, + chroma_server_port: Optional[int] = None, + chroma_server_grpc_port: Optional[int] = None, + ) -> Union[VectorStore, BaseRetriever]: + """ + Builds the Vector Store or BaseRetriever object. + + Args: + - collection_name (str): The name of the collection. + - persist_directory (Optional[str]): The directory to persist the Vector Store to. + - chroma_server_ssl_enabled (bool): Whether to enable SSL for the Chroma server. + - persist (bool): Whether to persist the Vector Store or not. + - embedding (Optional[Embeddings]): The embeddings to use for the Vector Store. + - documents (Optional[Document]): The documents to use for the Vector Store. + - chroma_server_cors_allow_origins (Optional[str]): The CORS allow origins for the Chroma server. + - chroma_server_host (Optional[str]): The host for the Chroma server. + - chroma_server_port (Optional[int]): The port for the Chroma server. + - chroma_server_grpc_port (Optional[int]): The gRPC port for the Chroma server. + + Returns: + - Union[VectorStore, BaseRetriever]: The Vector Store or BaseRetriever object. + """ + + # Chroma settings + chroma_settings = None + + if chroma_server_host is not None: + chroma_settings = chromadb.config.Settings( + chroma_server_cors_allow_origins=chroma_server_cors_allow_origins + or None, + chroma_server_host=chroma_server_host, + chroma_server_port=chroma_server_port or None, + chroma_server_grpc_port=chroma_server_grpc_port or None, + chroma_server_ssl_enabled=chroma_server_ssl_enabled, + ) + + # If documents, then we need to create a Chroma instance using .from_documents + if documents is not None and embedding is not None: + return Chroma.from_documents( + documents=documents, # type: ignore + persist_directory=persist_directory if persist else None, + collection_name=collection_name, + embedding=embedding, + client_settings=chroma_settings, + ) + + return Chroma( + persist_directory=persist_directory, client_settings=chroma_settings + ) diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml index d25893c25..48f7b1da1 100644 --- a/src/backend/langflow/config.yaml +++ b/src/backend/langflow/config.yaml @@ -171,8 +171,6 @@ prompts: textsplitters: CharacterTextSplitter: documentation: "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/character_text_splitter" - RecursiveCharacterTextSplitter: - documentation: "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/recursive_text_splitter" toolkits: OpenAPIToolkit: documentation: "" diff --git a/src/backend/langflow/core/__init__.py b/src/backend/langflow/core/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/core/celery_app.py b/src/backend/langflow/core/celery_app.py new file mode 100644 index 000000000..ef3fc6545 --- /dev/null +++ b/src/backend/langflow/core/celery_app.py @@ -0,0 +1,11 @@ +from celery import Celery # type: ignore + + +def make_celery(app_name: str, config: str) -> Celery: + celery_app = Celery(app_name) + celery_app.config_from_object(config) + celery_app.conf.task_routes = {"langflow.worker.tasks.*": {"queue": "langflow"}} + return celery_app + + +celery_app = make_celery("langflow", "langflow.core.celeryconfig") diff --git a/src/backend/langflow/core/celeryconfig.py b/src/backend/langflow/core/celeryconfig.py new file mode 100644 index 000000000..35d51bba0 --- /dev/null +++ b/src/backend/langflow/core/celeryconfig.py @@ -0,0 +1,14 @@ +# celeryconfig.py +import os + +langflow_redis_host = os.environ.get("LANGFLOW_REDIS_HOST") +langflow_redis_port = os.environ.get("LANGFLOW_REDIS_PORT") +if "BROKER_URL" in os.environ and "RESULT_BACKEND" in os.environ: + # RabbitMQ + broker_url = os.environ.get("BROKER_URL", "amqp://localhost") + result_backend = os.environ.get("RESULT_BACKEND", "redis://localhost:6379/0") +elif langflow_redis_host and langflow_redis_port: + broker_url = f"redis://{langflow_redis_host}:{langflow_redis_port}/0" + result_backend = f"redis://{langflow_redis_host}:{langflow_redis_port}/0" +# tasks should be json or pickle +accept_content = ["json", "pickle"] diff --git a/src/backend/langflow/database/models/token.py b/src/backend/langflow/database/models/token.py deleted file mode 100644 index 68c70f07f..000000000 --- a/src/backend/langflow/database/models/token.py +++ /dev/null @@ -1,7 +0,0 @@ -from pydantic import BaseModel - - -class Token(BaseModel): - access_token: str - refresh_token: str - token_type: str diff --git a/src/backend/langflow/database/models/user.py b/src/backend/langflow/database/models/user.py deleted file mode 100644 index 94ceb4e15..000000000 --- a/src/backend/langflow/database/models/user.py +++ /dev/null @@ -1,94 +0,0 @@ -from sqlmodel import Field -from uuid import UUID, uuid4 -from pydantic import BaseModel -from typing import Optional, List -from sqlalchemy.orm import Session -from datetime import timezone, datetime -from sqlalchemy.exc import IntegrityError -from fastapi import HTTPException, Depends - -from langflow.services.utils import get_session -from langflow.services.database.models.base import SQLModelSerializable, SQLModel - - -class User(SQLModelSerializable, table=True): - id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) - username: str = Field(index=True, unique=True) - password: str = Field() - is_active: bool = Field(default=False) - is_superuser: bool = Field(default=False) - create_at: datetime = Field(default_factory=datetime.utcnow) - updated_at: datetime = Field(default_factory=datetime.utcnow) - last_login_at: Optional[datetime] = Field() - - -class UserAddModel(SQLModel): - username: str = Field() - password: str = Field() - - -class UserListModel(SQLModel): - id: UUID = Field(default_factory=uuid4) - username: str = Field() - is_active: bool = Field() - is_superuser: bool = Field() - create_at: datetime = Field() - updated_at: datetime = Field() - last_login_at: Optional[datetime] = Field() - - -class UserPatchModel(SQLModel): - username: Optional[str] = Field() - is_active: Optional[bool] = Field() - is_superuser: Optional[bool] = Field() - last_login_at: Optional[datetime] = Field() - - -class UsersResponse(BaseModel): - total_count: int - users: List[UserListModel] - - -def get_user_by_username(db: Session, username: str) -> User: - db_user = db.query(User).filter(User.username == username).first() - return User.from_orm(db_user) if db_user else None # type: ignore - - -def get_user_by_id(db: Session, id: UUID) -> User: - db_user = db.query(User).filter(User.id == id).first() - return User.from_orm(db_user) if db_user else None # type: ignore - - -def update_user( - user_id: UUID, user: UserPatchModel, db: Session = Depends(get_session) -) -> User: - user_db = get_user_by_username(db, user.username) # type: ignore - if user_db and user_db.id != user_id: - raise HTTPException(status_code=409, detail="Username already exists") - - user_db = get_user_by_id(db, user_id) - if not user_db: - raise HTTPException(status_code=404, detail="User not found") - - try: - user_data = user.dict(exclude_unset=True) - for key, value in user_data.items(): - setattr(user_db, key, value) - - user_db.updated_at = datetime.now(timezone.utc) - user_db = db.merge(user_db) - db.commit() - if db.identity_key(instance=user_db) is not None: - db.refresh(user_db) - - except IntegrityError as e: - db.rollback() - raise HTTPException(status_code=400, detail=str(e)) from e - - return user_db - - -def update_user_last_login_at(user_id: UUID, db: Session = Depends(get_session)): - user_data = UserPatchModel(last_login_at=datetime.now(timezone.utc)) # type: ignore - - return update_user(user_id, user_data, db) diff --git a/src/backend/langflow/field_typing/__init__.py b/src/backend/langflow/field_typing/__init__.py new file mode 100644 index 000000000..927716b11 --- /dev/null +++ b/src/backend/langflow/field_typing/__init__.py @@ -0,0 +1,3 @@ +from .base import NestedDict + +__all__ = ["NestedDict"] diff --git a/src/backend/langflow/field_typing/base.py b/src/backend/langflow/field_typing/base.py new file mode 100644 index 000000000..ed3219888 --- /dev/null +++ b/src/backend/langflow/field_typing/base.py @@ -0,0 +1,4 @@ +from typing import Union, Dict + +# Type alias for more complex dicts +NestedDict = Dict[str, Union[str, Dict]] diff --git a/src/backend/langflow/graph/edge/base.py b/src/backend/langflow/graph/edge/base.py index 569d33ec0..2c60b0288 100644 --- a/src/backend/langflow/graph/edge/base.py +++ b/src/backend/langflow/graph/edge/base.py @@ -1,4 +1,4 @@ -from langflow.utils.logger import logger +from loguru import logger from typing import TYPE_CHECKING if TYPE_CHECKING: @@ -17,6 +17,17 @@ class Edge: self.validate_edge() + def __setstate__(self, state): + self.source = state["source"] + self.target = state["target"] + self.target_param = state["target_param"] + self.source_handle = state["source_handle"] + self.target_handle = state["target_handle"] + + def reset(self) -> None: + self.source._build_params() + self.target._build_params() + def validate_edge(self) -> None: # Validate that the outputs of the source node are valid inputs # for the target node @@ -40,7 +51,6 @@ class Edge: if no_matched_type: logger.debug(self.source_types) logger.debug(self.target_reqs) - if no_matched_type: raise ValueError( f"Edge between {self.source.vertex_type} and {self.target.vertex_type} " f"has no matched type" diff --git a/src/backend/langflow/graph/graph/base.py b/src/backend/langflow/graph/graph/base.py index f0d3986cf..227b04bf9 100644 --- a/src/backend/langflow/graph/graph/base.py +++ b/src/backend/langflow/graph/graph/base.py @@ -10,7 +10,7 @@ from langflow.graph.vertex.types import ( ) from langflow.interface.tools.constants import FILE_TOOLS from langflow.utils import payload -from langflow.utils.logger import logger +from loguru import logger from langchain.chains.base import Chain @@ -26,6 +26,12 @@ class Graph: self._edges = edges self._build_graph() + def __setstate__(self, state): + self.__dict__.update(state) + for edge in self.edges: + edge.reset() + edge.validate_edge() + @classmethod def from_payload(cls, payload: Dict) -> "Graph": """ @@ -48,6 +54,11 @@ class Graph: f"Invalid payload. Expected keys 'nodes' and 'edges'. Found {list(payload.keys())}" ) from exc + def __eq__(self, other: object) -> bool: + if not isinstance(other, Graph): + return False + return self.__repr__() == other.__repr__() + def _build_graph(self) -> None: """Builds the graph from the nodes and edges.""" self.nodes = self._build_vertices() @@ -144,10 +155,10 @@ class Graph: return list(reversed(sorted_vertices)) - def generator_build(self) -> Generator: + def generator_build(self) -> Generator[Vertex, None, None]: """Builds each vertex in the graph and yields it.""" sorted_vertices = self.topological_sort() - logger.debug("Sorted vertices: %s", sorted_vertices) + logger.debug("There are %s vertices in the graph", len(sorted_vertices)) yield from sorted_vertices def get_node_neighbors(self, node: Vertex) -> Dict[Vertex, int]: diff --git a/src/backend/langflow/graph/vertex/base.py b/src/backend/langflow/graph/vertex/base.py index 425f66315..51fa8d815 100644 --- a/src/backend/langflow/graph/vertex/base.py +++ b/src/backend/langflow/graph/vertex/base.py @@ -1,9 +1,11 @@ import ast +import pickle from langflow.graph.utils import UnbuiltObject +from langflow.graph.vertex.utils import is_basic_type from langflow.interface.initialize import loading from langflow.interface.listing import lazy_load_dict from langflow.utils.constants import DIRECT_TYPES -from langflow.utils.logger import logger +from loguru import logger from langflow.utils.util import sync_to_async @@ -12,12 +14,19 @@ import types from typing import Any, Dict, List, Optional from typing import TYPE_CHECKING + if TYPE_CHECKING: from langflow.graph.edge.base import Edge class Vertex: - def __init__(self, data: Dict, base_type: Optional[str] = None) -> None: + def __init__( + self, + data: Dict, + base_type: Optional[str] = None, + is_task: bool = False, + params: Optional[Dict] = None, + ) -> None: self.id: str = data["id"] self._data = data self.edges: List["Edge"] = [] @@ -26,6 +35,59 @@ class Vertex: self._built_object = UnbuiltObject() self._built = False self.artifacts: Dict[str, Any] = {} + self.task_id: Optional[str] = None + self.is_task = is_task + self.params = params or {} + + def reset_params(self): + for edge in self.edges: + if edge.source != self: + target_param = edge.target_param + if target_param in ["document", "texts"]: + # this means they got data and have already ingested it + # so we continue after removing the param + self.params.pop(target_param, None) + continue + + if target_param in self.params and not is_basic_type( + self.params[target_param] + ): + # edge.source.params = {} + edge.source._build_params() + edge.source._built_object = UnbuiltObject() + edge.source._built = False + + self.params[target_param] = edge.source + + def __getstate__(self): + state_dict = self.__dict__.copy() + try: + # try pickling the built object + # if it fails, then we need to delete it + # and build it again + pickle.dumps(state_dict["_built_object"]) + except Exception: + self.reset_params() + del state_dict["_built_object"] + del state_dict["_built"] + return state_dict + + def __setstate__(self, state): + self._data = state["_data"] + self.params = state["params"] + self.base_type = state["base_type"] + self.is_task = state["is_task"] + self.edges = state["edges"] + self.id = state["id"] + self._parse_data() + if "_built_object" in state: + self._built_object = state["_built_object"] + self._built = state["_built"] + else: + self._built_object = UnbuiltObject() + self._built = False + self.artifacts: Dict[str, Any] = {} + self.task_id: Optional[str] = None def _parse_data(self) -> None: self.data = self._data["data"] @@ -68,6 +130,13 @@ class Vertex: self.base_type = base_type break + def get_task(self): + # using the task_id, get the task from celery + # and return it + from celery.result import AsyncResult # type: ignore + + return AsyncResult(self.task_id) + def _build_params(self): # sourcery skip: merge-list-append, remove-redundant-if # Some params are required, some are optional @@ -89,9 +158,11 @@ class Vertex: for key, value in self.data["node"]["template"].items() if isinstance(value, dict) } - params = {} + params = self.params.copy() if self.params else {} for edge in self.edges: + if not hasattr(edge, "target_param"): + continue param_key = edge.target_param if param_key in template_dict: if template_dict[param_key]["list"]: @@ -102,6 +173,8 @@ class Vertex: params[param_key] = edge.source for key, value in template_dict.items(): + if key in params: + continue # Skip _type and any value that has show == False and is not code # If we don't want to show code but we want to use it if key == "_type" or (not value.get("show") and key != "code"): @@ -112,9 +185,10 @@ class Vertex: # Load the type in value.get('suffixes') using # what is inside value.get('content') # value.get('value') is the file name - file_path = value.get("file_path") - - params[key] = file_path + if file_path := value.get("file_path"): + params[key] = file_path + else: + raise ValueError(f"File path not found for {self.vertex_type}") elif value.get("type") in DIRECT_TYPES and params.get(key) is None: if value.get("type") == "code": try: @@ -122,6 +196,19 @@ class Vertex: except Exception as exc: logger.debug(f"Error parsing code: {exc}") params[key] = value.get("value") + elif value.get("type") in ["dict", "NestedDict"]: + # When dict comes from the frontend it comes as a + # list of dicts, so we need to convert it to a dict + # before passing it to the build method + _value = value.get("value") + if isinstance(_value, list): + params[key] = { + k: v + for item in value.get("value", []) + for k, v in item.items() + } + elif isinstance(_value, dict): + params[key] = _value else: params[key] = value.get("value") @@ -131,20 +218,21 @@ class Vertex: else: params.pop(key, None) # Add _type to params + self._raw_params = params self.params = params - def _build(self): + def _build(self, user_id=None): """ Initiate the build process. """ logger.debug(f"Building {self.vertex_type}") - self._build_each_node_in_params_dict() - self._get_and_instantiate_class() + self._build_each_node_in_params_dict(user_id) + self._get_and_instantiate_class(user_id) self._validate_built_object() self._built = True - def _build_each_node_in_params_dict(self): + def _build_each_node_in_params_dict(self, user_id=None): """ Iterates over each node in the params dictionary and builds it. """ @@ -153,9 +241,9 @@ class Vertex: if value == self: del self.params[key] continue - self._build_node_and_update_params(key, value) + self._build_node_and_update_params(key, value, user_id) elif isinstance(value, list) and self._is_list_of_nodes(value): - self._build_list_of_nodes_and_update_params(key, value) + self._build_list_of_nodes_and_update_params(key, value, user_id) def _is_node(self, value): """ @@ -169,23 +257,45 @@ class Vertex: """ return all(self._is_node(node) for node in value) - def _build_node_and_update_params(self, key, node): + def get_result(self, user_id=None, timeout=None) -> Any: + # Check if the Vertex was built already + if self._built: + return self._built_object + + if self.is_task and self.task_id is not None: + task = self.get_task() + result = task.get(timeout=timeout) + if result is not None: # If result is ready + self._update_built_object_and_artifacts(result) + return self._built_object + else: + # Handle the case when the result is not ready (retry, throw exception, etc.) + pass + + # If there's no task_id, build the vertex locally + self.build(user_id) + return self._built_object + + def _build_node_and_update_params(self, key, node, user_id=None): """ Builds a given node and updates the params dictionary accordingly. """ - result = node.build() + + result = node.get_result(user_id) self._handle_func(key, result) if isinstance(result, list): self._extend_params_list_with_result(key, result) self.params[key] = result - def _build_list_of_nodes_and_update_params(self, key, nodes): + def _build_list_of_nodes_and_update_params( + self, key, nodes: List["Vertex"], user_id=None + ): """ Iterates over a list of nodes, builds each and updates the params dictionary. """ self.params[key] = [] for node in nodes: - built = node.build() + built = node.get_result(user_id) if isinstance(built, list): if key not in self.params: self.params[key] = [] @@ -215,7 +325,7 @@ class Vertex: if isinstance(self.params[key], list): self.params[key].extend(result) - def _get_and_instantiate_class(self): + def _get_and_instantiate_class(self, user_id=None): """ Gets the class from a dictionary and instantiates it with the params. """ @@ -226,9 +336,11 @@ class Vertex: node_type=self.vertex_type, base_type=self.base_type, params=self.params, + user_id=user_id, ) self._update_built_object_and_artifacts(result) except Exception as exc: + logger.exception(exc) raise ValueError( f"Error building node {self.vertex_type}: {str(exc)}" ) from exc @@ -255,9 +367,9 @@ class Vertex: raise ValueError(message) - def build(self, force: bool = False) -> Any: + def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any: if not self._built or force: - self._build() + self._build(user_id, *args, **kwargs) return self._built_object @@ -269,7 +381,10 @@ class Vertex: return f"Vertex(id={self.id}, data={self.data})" def __eq__(self, __o: object) -> bool: - return self.id == __o.id if isinstance(__o, Vertex) else False + try: + return self.id == __o.id if isinstance(__o, Vertex) else False + except AttributeError: + return False def __hash__(self) -> int: return id(self) diff --git a/src/backend/langflow/graph/vertex/types.py b/src/backend/langflow/graph/vertex/types.py index 9a2dc21c5..7609982a5 100644 --- a/src/backend/langflow/graph/vertex/types.py +++ b/src/backend/langflow/graph/vertex/types.py @@ -7,55 +7,68 @@ from langflow.interface.utils import extract_input_variables_from_prompt class AgentVertex(Vertex): - def __init__(self, data: Dict): - super().__init__(data, base_type="agents") + def __init__(self, data: Dict, params: Optional[Dict] = None): + super().__init__(data, base_type="agents", params=params) self.tools: List[Union[ToolkitVertex, ToolVertex]] = [] self.chains: List[ChainVertex] = [] + def __getstate__(self): + state = super().__getstate__() + state["tools"] = self.tools + state["chains"] = self.chains + return state + + def __setstate__(self, state): + self.tools = state["tools"] + self.chains = state["chains"] + super().__setstate__(state) + def _set_tools_and_chains(self) -> None: for edge in self.edges: + if not hasattr(edge, "source"): + continue source_node = edge.source if isinstance(source_node, (ToolVertex, ToolkitVertex)): self.tools.append(source_node) elif isinstance(source_node, ChainVertex): self.chains.append(source_node) - def build(self, force: bool = False) -> Any: + def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any: if not self._built or force: self._set_tools_and_chains() # First, build the tools for tool_node in self.tools: - tool_node.build() + tool_node.build(user_id=user_id) # Next, build the chains and the rest for chain_node in self.chains: - chain_node.build(tools=self.tools) + chain_node.build(tools=self.tools, user_id=user_id) - self._build() + self._build(user_id=user_id) return self._built_object class ToolVertex(Vertex): - def __init__(self, data: Dict): - super().__init__(data, base_type="tools") + def __init__(self, data: Dict, params: Optional[Dict] = None): + super().__init__(data, base_type="tools", params=params) class LLMVertex(Vertex): built_node_type = None class_built_object = None - def __init__(self, data: Dict): - super().__init__(data, base_type="llms") + def __init__(self, data: Dict, params: Optional[Dict] = None): + super().__init__(data, base_type="llms", params=params) - def build(self, force: bool = False) -> Any: + def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any: # LLM is different because some models might take up too much memory # or time to load. So we only load them when we need them.รŸ if self.vertex_type == self.built_node_type: return self.class_built_object if not self._built or force: - self._build() + self._build(user_id=user_id) self.built_node_type = self.vertex_type self.class_built_object = self._built_object # Avoid deepcopying the LLM @@ -64,39 +77,41 @@ class LLMVertex(Vertex): class ToolkitVertex(Vertex): - def __init__(self, data: Dict): - super().__init__(data, base_type="toolkits") + def __init__(self, data: Dict, params=None): + super().__init__(data, base_type="toolkits", params=params) class FileToolVertex(ToolVertex): - def __init__(self, data: Dict): - super().__init__(data) + def __init__(self, data: Dict, params=None): + super().__init__(data, params=params) class WrapperVertex(Vertex): def __init__(self, data: Dict): super().__init__(data, base_type="wrappers") - def build(self, force: bool = False) -> Any: + def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any: if not self._built or force: if "headers" in self.params: self.params["headers"] = ast.literal_eval(self.params["headers"]) - self._build() + self._build(user_id=user_id) return self._built_object class DocumentLoaderVertex(Vertex): - def __init__(self, data: Dict): - super().__init__(data, base_type="documentloaders") + def __init__(self, data: Dict, params: Optional[Dict] = None): + super().__init__(data, base_type="documentloaders", params=params) def _built_object_repr(self): # This built_object is a list of documents. Maybe we should # show how many documents are in the list? if self._built_object: - avg_length = sum(len(doc.page_content) for doc in self._built_object) / len( - self._built_object - ) + avg_length = sum( + len(doc.page_content) + for doc in self._built_object + if hasattr(doc, "page_content") + ) / len(self._built_object) return f"""{self.vertex_type}({len(self._built_object)} documents) \nAvg. Document Length (characters): {int(avg_length)} Documents: {self._built_object[:3]}...""" @@ -104,14 +119,51 @@ class DocumentLoaderVertex(Vertex): class EmbeddingVertex(Vertex): - def __init__(self, data: Dict): - super().__init__(data, base_type="embeddings") + def __init__(self, data: Dict, params: Optional[Dict] = None): + super().__init__(data, base_type="embeddings", params=params) class VectorStoreVertex(Vertex): - def __init__(self, data: Dict): + def __init__(self, data: Dict, params=None): super().__init__(data, base_type="vectorstores") + self.params = params or {} + + # VectorStores may contain databse connections + # so we need to define the __reduce__ method and the __setstate__ method + # to avoid pickling errors + def clean_edges_for_pickling(self): + # for each edge that has self as source + # we need to clear the _built_object of the target + # so that we don't try to pickle a database connection + for edge in self.edges: + if edge.source == self: + edge.target._built_object = None + edge.target._built = False + edge.target.params[edge.target_param] = self + + def remove_docs_and_texts_from_params(self): + # remove documents and texts from params + # so that we don't try to pickle a database connection + self.params.pop("documents", None) + self.params.pop("texts", None) + + def __getstate__(self): + # We want to save the params attribute + # and if "documents" or "texts" are in the params + # we want to remove them because they have already + # been processed. + params = self.params.copy() + params.pop("documents", None) + params.pop("texts", None) + self.clean_edges_for_pickling() + + return super().__getstate__() + + def __setstate__(self, state): + super().__setstate__(state) + self.remove_docs_and_texts_from_params() + class MemoryVertex(Vertex): def __init__(self, data: Dict): @@ -124,8 +176,8 @@ class RetrieverVertex(Vertex): class TextSplitterVertex(Vertex): - def __init__(self, data: Dict): - super().__init__(data, base_type="textsplitters") + def __init__(self, data: Dict, params: Optional[Dict] = None): + super().__init__(data, base_type="textsplitters", params=params) def _built_object_repr(self): # This built_object is a list of documents. Maybe we should @@ -148,16 +200,19 @@ class ChainVertex(Vertex): def build( self, force: bool = False, - tools: Optional[List[Union[ToolkitVertex, ToolVertex]]] = None, + user_id=None, + *args, + **kwargs, ) -> Any: if not self._built or force: # Check if the chain requires a PromptVertex for key, value in self.params.items(): if isinstance(value, PromptVertex): # Build the PromptVertex, passing the tools if available + tools = kwargs.get("tools", None) self.params[key] = value.build(tools=tools, force=force) - self._build() + self._build(user_id=user_id) return self._built_object @@ -169,7 +224,10 @@ class PromptVertex(Vertex): def build( self, force: bool = False, + user_id=None, tools: Optional[List[Union[ToolkitVertex, ToolVertex]]] = None, + *args, + **kwargs, ) -> Any: if not self._built or force: if ( @@ -180,7 +238,7 @@ class PromptVertex(Vertex): # Check if it is a ZeroShotPrompt and needs a tool if "ShotPrompt" in self.vertex_type: tools = ( - [tool_node.build() for tool_node in tools] + [tool_node.build(user_id=user_id) for tool_node in tools] if tools is not None else [] ) @@ -205,10 +263,10 @@ class PromptVertex(Vertex): self.params["input_variables"] = list( set(self.params["input_variables"]) ) - else: + elif isinstance(self.params, dict): self.params.pop("input_variables", None) - self._build() + self._build(user_id=user_id) return self._built_object def _built_object_repr(self): @@ -252,8 +310,13 @@ class OutputParserVertex(Vertex): class CustomComponentVertex(Vertex): def __init__(self, data: Dict): - super().__init__(data, base_type="custom_components") + super().__init__(data, base_type="custom_components", is_task=True) def _built_object_repr(self): + if self.task_id and self.is_task: + if task := self.get_task(): + return str(task.info) + else: + return f"Task {self.task_id} is not running" if self.artifacts and "repr" in self.artifacts: return self.artifacts["repr"] or super()._built_object_repr() diff --git a/src/backend/langflow/graph/vertex/utils.py b/src/backend/langflow/graph/vertex/utils.py new file mode 100644 index 000000000..e1d439f4b --- /dev/null +++ b/src/backend/langflow/graph/vertex/utils.py @@ -0,0 +1,5 @@ +from langflow.utils.constants import PYTHON_BASIC_TYPES + + +def is_basic_type(obj): + return type(obj) in PYTHON_BASIC_TYPES diff --git a/src/backend/langflow/interface/agents/base.py b/src/backend/langflow/interface/agents/base.py index 16295f455..74f07cd1e 100644 --- a/src/backend/langflow/interface/agents/base.py +++ b/src/backend/langflow/interface/agents/base.py @@ -5,10 +5,10 @@ from langchain.agents import types from langflow.custom.customs import get_custom_nodes from langflow.interface.agents.custom import CUSTOM_AGENTS from langflow.interface.base import LangChainTypeCreator -from langflow.services.utils import get_settings_manager +from langflow.services.getters import get_settings_service from langflow.template.frontend_node.agents import AgentFrontendNode -from langflow.utils.logger import logger +from loguru import logger from langflow.utils.util import build_template_from_class, build_template_from_method @@ -54,7 +54,7 @@ class AgentCreator(LangChainTypeCreator): # Now this is a generator def to_list(self) -> List[str]: names = [] - settings_manager = get_settings_manager() + settings_service = get_settings_service() for _, agent in self.type_to_loader_dict.items(): agent_name = ( agent.function_name() @@ -62,8 +62,8 @@ class AgentCreator(LangChainTypeCreator): else agent.__name__ ) if ( - agent_name in settings_manager.settings.AGENTS - or settings_manager.settings.DEV + agent_name in settings_service.settings.AGENTS + or settings_service.settings.DEV ): names.append(agent_name) return names diff --git a/src/backend/langflow/interface/base.py b/src/backend/langflow/interface/base.py index d1ed83b5a..13dd05619 100644 --- a/src/backend/langflow/interface/base.py +++ b/src/backend/langflow/interface/base.py @@ -2,13 +2,13 @@ from abc import ABC, abstractmethod from typing import Any, Dict, List, Optional, Type, Union from langchain.chains.base import Chain from langchain.agents import AgentExecutor -from langflow.services.utils import get_settings_manager +from langflow.services.getters import get_settings_service from pydantic import BaseModel from langflow.template.field.base import TemplateField from langflow.template.frontend_node.base import FrontendNode from langflow.template.template.base import Template -from langflow.utils.logger import logger +from loguru import logger # Assuming necessary imports for Field, Template, and FrontendNode classes @@ -27,11 +27,11 @@ class LangChainTypeCreator(BaseModel, ABC): @property def docs_map(self) -> Dict[str, str]: """A dict with the name of the component as key and the documentation link as value.""" - settings_manager = get_settings_manager() + settings_service = get_settings_service() if self.name_docs_dict is None: try: type_settings = getattr( - settings_manager.settings, self.type_name.upper() + settings_service.settings, self.type_name.upper() ) self.name_docs_dict = { name: value_dict["documentation"] diff --git a/src/backend/langflow/interface/chains/base.py b/src/backend/langflow/interface/chains/base.py index d847ca66d..5bdc3efd8 100644 --- a/src/backend/langflow/interface/chains/base.py +++ b/src/backend/langflow/interface/chains/base.py @@ -3,10 +3,10 @@ from typing import Any, ClassVar, Dict, List, Optional, Type from langflow.custom.customs import get_custom_nodes from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class -from langflow.services.utils import get_settings_manager +from langflow.services.getters import get_settings_service from langflow.template.frontend_node.chains import ChainFrontendNode -from langflow.utils.logger import logger +from loguru import logger from langflow.utils.util import build_template_from_class, build_template_from_method from langchain import chains @@ -30,7 +30,7 @@ class ChainCreator(LangChainTypeCreator): @property def type_to_loader_dict(self) -> Dict: if self.type_dict is None: - settings_manager = get_settings_manager() + settings_service = get_settings_service() self.type_dict: dict[str, Any] = { chain_name: import_class(f"langchain.chains.{chain_name}") for chain_name in chains.__all__ @@ -44,8 +44,8 @@ class ChainCreator(LangChainTypeCreator): self.type_dict = { name: chain for name, chain in self.type_dict.items() - if name in settings_manager.settings.CHAINS - or settings_manager.settings.DEV + if name in settings_service.settings.CHAINS + or settings_service.settings.DEV } return self.type_dict diff --git a/src/backend/langflow/interface/custom/base.py b/src/backend/langflow/interface/custom/base.py index 06e874fa7..45a3ea215 100644 --- a/src/backend/langflow/interface/custom/base.py +++ b/src/backend/langflow/interface/custom/base.py @@ -8,7 +8,7 @@ from langflow.interface.custom.custom_component import CustomComponent from langflow.template.frontend_node.custom_components import ( CustomComponentFrontendNode, ) -from langflow.utils.logger import logger +from loguru import logger # Assuming necessary imports for Field, Template, and FrontendNode classes diff --git a/src/backend/langflow/interface/custom/custom_component.py b/src/backend/langflow/interface/custom/custom_component.py index 308dac614..0fd57558d 100644 --- a/src/backend/langflow/interface/custom/custom_component.py +++ b/src/backend/langflow/interface/custom/custom_component.py @@ -1,9 +1,15 @@ +<<<<<<< HEAD from typing import Any, Callable, ClassVar, Dict, List, Optional +======= +from typing import Any, Callable, List, Optional, Union +from uuid import UUID +>>>>>>> origin/dev from fastapi import HTTPException from langflow.interface.custom.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES from langflow.interface.custom.component import Component from langflow.interface.custom.directory_reader import DirectoryReader -from langflow.services.utils import get_db_manager +from langflow.services.getters import get_db_service +from langflow.interface.custom.utils import extract_inner_type from langflow.utils import validate @@ -19,10 +25,16 @@ class CustomComponent(Component, extra=Extra.allow): code_class_base_inheritance: ClassVar[Dict] = "CustomComponent" function_entrypoint_name: ClassVar[Dict] = "build" function: Optional[Callable] = None +<<<<<<< HEAD return_type_valid_list: ClassVar[Dict] = list( CUSTOM_COMPONENT_SUPPORTED_TYPES.keys() ) repr_value: Optional[str] = "" +======= + return_type_valid_list = list(CUSTOM_COMPONENT_SUPPORTED_TYPES.keys()) + repr_value: Optional[Any] = "" + user_id: Optional[Union[UUID, str]] = None +>>>>>>> origin/dev def __init__(self, **data): super().__init__(**data) @@ -94,7 +106,20 @@ class CustomComponent(Component, extra=Extra.allow): build_method = build_methods[0] - return build_method["args"] + args = build_method["args"] + for arg in args: + if arg.get("type") == "prompt": + raise HTTPException( + status_code=400, + detail={ + "error": "Type hint Error", + "traceback": ( + "Prompt type is not supported in the build method." + " Try using PromptTemplate instead." + ), + }, + ) + return args @property def get_function_entrypoint_return_type(self) -> List[str]: @@ -125,6 +150,10 @@ class CustomComponent(Component, extra=Extra.allow): return_type = build_method["return_type"] if not return_type: return [] + # If list or List is in the return type, then we remove it and return the inner type + if return_type.startswith("list") or return_type.startswith("List"): + return_type = extract_inner_type(return_type) + # If the return type is not a Union, then we just return it as a list if "Union" not in return_type: return [return_type] if return_type in self.return_type_valid_list else [] @@ -171,24 +200,29 @@ class CustomComponent(Component, extra=Extra.allow): return validate.create_function(self.code, self.function_entrypoint_name) def load_flow(self, flow_id: str, tweaks: Optional[dict] = None) -> Any: - from langflow.processing.process import build_sorted_vertices_with_caching + from langflow.processing.process import build_sorted_vertices from langflow.processing.process import process_tweaks - db_manager = get_db_manager() - with session_getter(db_manager) as session: + db_service = get_db_service() + with session_getter(db_service) as session: graph_data = flow.data if (flow := session.get(Flow, flow_id)) else None if not graph_data: raise ValueError(f"Flow {flow_id} not found") if tweaks: graph_data = process_tweaks(graph_data=graph_data, tweaks=tweaks) - return build_sorted_vertices_with_caching(graph_data) + return build_sorted_vertices(graph_data) def list_flows(self, *, get_session: Optional[Callable] = None) -> List[Flow]: - get_session = get_session or session_getter - db_manager = get_db_manager() - with get_session(db_manager) as session: - flows = session.query(Flow).all() - return flows + if not self.user_id: + raise ValueError("Session is invalid") + try: + get_session = get_session or session_getter + db_service = get_db_service() + with get_session(db_service) as session: + flows = session.query(Flow).filter(Flow.user_id == self.user_id).all() + return flows + except Exception as e: + raise ValueError("Session is invalid") from e def get_flow( self, @@ -199,12 +233,16 @@ class CustomComponent(Component, extra=Extra.allow): get_session: Optional[Callable] = None, ) -> Flow: get_session = get_session or session_getter - db_manager = get_db_manager() - with get_session(db_manager) as session: + db_service = get_db_service() + with get_session(db_service) as session: if flow_id: flow = session.query(Flow).get(flow_id) elif flow_name: - flow = session.query(Flow).filter(Flow.name == flow_name).first() + flow = ( + session.query(Flow) + .filter(Flow.name == flow_name) + .filter(Flow.user_id == self.user_id) + ).first() else: raise ValueError("Either flow_name or flow_id must be provided") diff --git a/src/backend/langflow/interface/custom/directory_reader.py b/src/backend/langflow/interface/custom/directory_reader.py index 7bff7b5f5..01b11a4a6 100644 --- a/src/backend/langflow/interface/custom/directory_reader.py +++ b/src/backend/langflow/interface/custom/directory_reader.py @@ -1,7 +1,7 @@ import os import ast import zlib -from langflow.utils.logger import logger +from loguru import logger class CustomComponentPathValueError(ValueError): @@ -77,7 +77,7 @@ class DirectoryReader: ] filtered = [menu for menu in items if menu["components"]] logger.debug( - f'Filtered components {"with errors" if with_errors else ""}: {filtered}' + f'Filtered components {"with errors" if with_errors else ""}: {len(filtered)}' ) return {"menu": filtered} diff --git a/src/backend/langflow/interface/custom/utils.py b/src/backend/langflow/interface/custom/utils.py new file mode 100644 index 000000000..99b0d4bc6 --- /dev/null +++ b/src/backend/langflow/interface/custom/utils.py @@ -0,0 +1,10 @@ +import re + + +def extract_inner_type(return_type: str) -> str: + """ + Extracts the inner type from a type hint that is a list. + """ + if match := re.match(r"list\[(.*)\]", return_type, re.IGNORECASE): + return match[1] + return return_type diff --git a/src/backend/langflow/interface/document_loaders/base.py b/src/backend/langflow/interface/document_loaders/base.py index db0832ff3..e2c379b67 100644 --- a/src/backend/langflow/interface/document_loaders/base.py +++ b/src/backend/langflow/interface/document_loaders/base.py @@ -1,11 +1,11 @@ from typing import Dict, List, Optional, Type from langflow.interface.base import LangChainTypeCreator -from langflow.services.utils import get_settings_manager +from langflow.services.getters import get_settings_service from langflow.template.frontend_node.documentloaders import DocumentLoaderFrontNode from langflow.interface.custom_lists import documentloaders_type_to_cls_dict -from langflow.utils.logger import logger +from loguru import logger from langflow.utils.util import build_template_from_class @@ -31,12 +31,12 @@ class DocumentLoaderCreator(LangChainTypeCreator): return None def to_list(self) -> List[str]: - settings_manager = get_settings_manager() + settings_service = get_settings_service() return [ documentloader.__name__ for documentloader in self.type_to_loader_dict.values() - if documentloader.__name__ in settings_manager.settings.DOCUMENTLOADERS - or settings_manager.settings.DEV + if documentloader.__name__ in settings_service.settings.DOCUMENTLOADERS + or settings_service.settings.DEV ] diff --git a/src/backend/langflow/interface/embeddings/base.py b/src/backend/langflow/interface/embeddings/base.py index 169985d37..d280cf1c1 100644 --- a/src/backend/langflow/interface/embeddings/base.py +++ b/src/backend/langflow/interface/embeddings/base.py @@ -2,11 +2,11 @@ from typing import Dict, List, Optional, Type from langflow.interface.base import LangChainTypeCreator from langflow.interface.custom_lists import embedding_type_to_cls_dict -from langflow.services.utils import get_settings_manager +from langflow.services.getters import get_settings_service from langflow.template.frontend_node.base import FrontendNode from langflow.template.frontend_node.embeddings import EmbeddingFrontendNode -from langflow.utils.logger import logger +from loguru import logger from langflow.utils.util import build_template_from_class @@ -33,12 +33,12 @@ class EmbeddingCreator(LangChainTypeCreator): return None def to_list(self) -> List[str]: - settings_manager = get_settings_manager() + settings_service = get_settings_service() return [ embedding.__name__ for embedding in self.type_to_loader_dict.values() - if embedding.__name__ in settings_manager.settings.EMBEDDINGS - or settings_manager.settings.DEV + if embedding.__name__ in settings_service.settings.EMBEDDINGS + or settings_service.settings.DEV ] diff --git a/src/backend/langflow/interface/importing/utils.py b/src/backend/langflow/interface/importing/utils.py index d07222dd1..000eed205 100644 --- a/src/backend/langflow/interface/importing/utils.py +++ b/src/backend/langflow/interface/importing/utils.py @@ -144,6 +144,8 @@ def import_chain(chain: str) -> Type[Chain]: if chain in CUSTOM_CHAINS: return CUSTOM_CHAINS[chain] + if chain == "SQLDatabaseChain": + return import_class("langchain_experimental.sql.SQLDatabaseChain") return import_class(f"langchain.chains.{chain}") diff --git a/src/backend/langflow/interface/initialize/loading.py b/src/backend/langflow/interface/initialize/loading.py index c589aaae2..39677db52 100644 --- a/src/backend/langflow/interface/initialize/loading.py +++ b/src/backend/langflow/interface/initialize/loading.py @@ -1,6 +1,7 @@ import json -from typing import Any, Callable, Dict, Sequence, Type - +import orjson +from typing import Any, Callable, Dict, Sequence, Type, TYPE_CHECKING +from langchain.schema import Document from langchain.agents import agent as agent_module from langchain.agents.agent import AgentExecutor from langchain.agents.agent_toolkits.base import BaseToolkit @@ -33,13 +34,29 @@ from langflow.utils import validate from langchain.chains.base import Chain from langchain.vectorstores.base import VectorStore from langchain.document_loaders.base import BaseLoader -from langflow.utils.logger import logger +from loguru import logger + +if TYPE_CHECKING: + from langflow import CustomComponent -def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any: +def build_vertex_in_params(params: Dict) -> Dict: + from langflow.graph.vertex.base import Vertex + + # If any of the values in params is a Vertex, we will build it + return { + key: value.build() if isinstance(value, Vertex) else value + for key, value in params.items() + } + + +def instantiate_class( + node_type: str, base_type: str, params: Dict, user_id=None +) -> Any: """Instantiate class from module type and key, and params""" params = convert_params_to_sets(params) params = convert_kwargs(params) + if node_type in CUSTOM_NODES: if custom_node := CUSTOM_NODES.get(node_type): if hasattr(custom_node, "initialize"): @@ -47,7 +64,9 @@ def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any: return custom_node(**params) logger.debug(f"Instantiating {node_type} of type {base_type}") class_object = import_by_type(_type=base_type, name=node_type) - return instantiate_based_on_type(class_object, base_type, node_type, params) + return instantiate_based_on_type( + class_object, base_type, node_type, params, user_id=user_id + ) def convert_params_to_sets(params): @@ -66,7 +85,7 @@ def convert_kwargs(params): for key in kwargs_keys: if isinstance(params[key], str): try: - params[key] = json.loads(params[key]) + params[key] = orjson.loads(params[key]) except json.JSONDecodeError: # if the string is not a valid json string, we will # remove the key from the params @@ -74,7 +93,7 @@ def convert_kwargs(params): return params -def instantiate_based_on_type(class_object, base_type, node_type, params): +def instantiate_based_on_type(class_object, base_type, node_type, params, user_id): if base_type == "agents": return instantiate_agent(node_type, class_object, params) elif base_type == "prompts": @@ -108,19 +127,19 @@ def instantiate_based_on_type(class_object, base_type, node_type, params): elif base_type == "memory": return instantiate_memory(node_type, class_object, params) elif base_type == "custom_components": - return instantiate_custom_component(node_type, class_object, params) + return instantiate_custom_component(node_type, class_object, params, user_id) elif base_type == "wrappers": return instantiate_wrapper(node_type, class_object, params) else: return class_object(**params) -def instantiate_custom_component(node_type, class_object, params): +def instantiate_custom_component(node_type, class_object, params, user_id): # we need to make a copy of the params because we will be # modifying it params_copy = params.copy() - class_object = get_function_custom(params_copy.pop("code")) - custom_component = class_object() + class_object: "CustomComponent" = get_function_custom(params_copy.pop("code")) + custom_component = class_object(user_id=user_id) built_object = custom_component.build(**params_copy) return built_object, {"repr": custom_component.custom_repr()} @@ -281,6 +300,13 @@ def instantiate_embedding(node_type, class_object, params: Dict): def instantiate_vectorstore(class_object: Type[VectorStore], params: Dict): search_kwargs = params.pop("search_kwargs", {}) + # clean up docs or texts to have only documents + if "texts" in params: + params["documents"] = params.pop("texts") + if "documents" in params: + params["documents"] = [ + doc for doc in params["documents"] if isinstance(doc, Document) + ] if initializer := vecstore_initializer.get(class_object.__name__): vecstore = initializer(class_object, params) else: @@ -310,7 +336,7 @@ def instantiate_documentloader(class_object: Type[BaseLoader], params: Dict): metadata = params.pop("metadata", None) if metadata and isinstance(metadata, str): try: - metadata = json.loads(metadata) + metadata = orjson.loads(metadata) except json.JSONDecodeError as exc: raise ValueError( "The metadata you provided is not a valid JSON string." diff --git a/src/backend/langflow/interface/initialize/utils.py b/src/backend/langflow/interface/initialize/utils.py index ceb8a53a1..199626de5 100644 --- a/src/backend/langflow/interface/initialize/utils.py +++ b/src/backend/langflow/interface/initialize/utils.py @@ -1,5 +1,7 @@ import contextlib import json +from langflow.services.database.models.base import orjson_dumps +import orjson from typing import Any, Dict, List from langchain.agents import ZeroShotAgent @@ -95,9 +97,11 @@ def format_content(variable): def try_to_load_json(content): with contextlib.suppress(json.JSONDecodeError): - content = json.loads(content) + content = orjson.loads(content) if isinstance(content, list): content = ",".join([str(item) for item in content]) + else: + content = orjson_dumps(content) return content diff --git a/src/backend/langflow/interface/initialize/vector_store.py b/src/backend/langflow/interface/initialize/vector_store.py index 1bc2d73e0..59e7c0d3b 100644 --- a/src/backend/langflow/interface/initialize/vector_store.py +++ b/src/backend/langflow/interface/initialize/vector_store.py @@ -1,4 +1,3 @@ -import json from typing import Any, Callable, Dict, Type from langchain.vectorstores import ( Pinecone, @@ -9,9 +8,11 @@ from langchain.vectorstores import ( SupabaseVectorStore, MongoDBAtlasVectorSearch, ) - +from langchain.schema import Document import os +import orjson + def docs_in_params(params: dict) -> bool: """Check if params has documents OR texts and one of them is not an empty list, @@ -92,7 +93,7 @@ def initialize_weaviate(class_object: Type[Weaviate], params: dict): import weaviate # type: ignore client_kwargs_json = params.get("client_kwargs", "{}") - client_kwargs = json.loads(client_kwargs_json) + client_kwargs = orjson.loads(client_kwargs_json) client_params = { "url": params.get("weaviate_url"), } @@ -200,11 +201,16 @@ def initialize_chroma(class_object: Type[Chroma], params: dict): if "texts" in params: params["documents"] = params.pop("texts") for doc in params["documents"]: + if not isinstance(doc, Document): + # remove any non-Document objects from the list + params["documents"].remove(doc) + continue if doc.metadata is None: doc.metadata = {} for key, value in doc.metadata.items(): if value is None: doc.metadata[key] = "" + chromadb = class_object.from_documents(**params) if persist: chromadb.persist() diff --git a/src/backend/langflow/interface/listing.py b/src/backend/langflow/interface/listing.py index 1cab1efbc..aa72e568e 100644 --- a/src/backend/langflow/interface/listing.py +++ b/src/backend/langflow/interface/listing.py @@ -1,19 +1,4 @@ -from langflow.interface.agents.base import agent_creator -from langflow.interface.chains.base import chain_creator -from langflow.interface.document_loaders.base import documentloader_creator -from langflow.interface.embeddings.base import embedding_creator -from langflow.interface.llms.base import llm_creator -from langflow.interface.memories.base import memory_creator -from langflow.interface.prompts.base import prompt_creator -from langflow.interface.text_splitters.base import textsplitter_creator -from langflow.interface.toolkits.base import toolkits_creator -from langflow.interface.tools.base import tool_creator -from langflow.interface.utilities.base import utility_creator -from langflow.interface.vector_store.base import vectorstore_creator -from langflow.interface.wrappers.base import wrapper_creator -from langflow.interface.output_parsers.base import output_parser_creator -from langflow.interface.retrievers.base import retriever_creator -from langflow.interface.custom.base import custom_component_creator +from langflow.services.getters import get_settings_service from langflow.utils.lazy_load import LazyLoadDictBase @@ -33,24 +18,10 @@ class AllTypesDict(LazyLoadDictBase): } def get_type_dict(self): - return { - "agents": agent_creator.to_list(), - "prompts": prompt_creator.to_list(), - "llms": llm_creator.to_list(), - "tools": tool_creator.to_list(), - "chains": chain_creator.to_list(), - "memory": memory_creator.to_list(), - "toolkits": toolkits_creator.to_list(), - "wrappers": wrapper_creator.to_list(), - "documentLoaders": documentloader_creator.to_list(), - "vectorStore": vectorstore_creator.to_list(), - "embeddings": embedding_creator.to_list(), - "textSplitters": textsplitter_creator.to_list(), - "utilities": utility_creator.to_list(), - "outputParsers": output_parser_creator.to_list(), - "retrievers": retriever_creator.to_list(), - "custom_components": custom_component_creator.to_list(), - } + from langflow.interface.types import get_all_types_dict + + settings_service = get_settings_service() + return get_all_types_dict(settings_service=settings_service) lazy_load_dict = AllTypesDict() diff --git a/src/backend/langflow/interface/llms/base.py b/src/backend/langflow/interface/llms/base.py index f562b99ed..ffb7fa2f2 100644 --- a/src/backend/langflow/interface/llms/base.py +++ b/src/backend/langflow/interface/llms/base.py @@ -2,10 +2,10 @@ from typing import Dict, List, Optional, Type from langflow.interface.base import LangChainTypeCreator from langflow.interface.custom_lists import llm_type_to_cls_dict -from langflow.services.utils import get_settings_manager +from langflow.services.getters import get_settings_service from langflow.template.frontend_node.llms import LLMFrontendNode -from langflow.utils.logger import logger +from loguru import logger from langflow.utils.util import build_template_from_class @@ -34,12 +34,12 @@ class LLMCreator(LangChainTypeCreator): return None def to_list(self) -> List[str]: - settings_manager = get_settings_manager() + settings_service = get_settings_service() return [ llm.__name__ for llm in self.type_to_loader_dict.values() - if llm.__name__ in settings_manager.settings.LLMS - or settings_manager.settings.DEV + if llm.__name__ in settings_service.settings.LLMS + or settings_service.settings.DEV ] diff --git a/src/backend/langflow/interface/memories/base.py b/src/backend/langflow/interface/memories/base.py index 980d1640f..140c53666 100644 --- a/src/backend/langflow/interface/memories/base.py +++ b/src/backend/langflow/interface/memories/base.py @@ -2,11 +2,11 @@ from typing import ClassVar, Dict, List, Optional, Type from langflow.interface.base import LangChainTypeCreator from langflow.interface.custom_lists import memory_type_to_cls_dict -from langflow.services.utils import get_settings_manager +from langflow.services.getters import get_settings_service from langflow.template.frontend_node.base import FrontendNode from langflow.template.frontend_node.memories import MemoryFrontendNode -from langflow.utils.logger import logger +from loguru import logger from langflow.utils.util import build_template_from_class, build_template_from_method from langflow.custom.customs import get_custom_nodes @@ -49,12 +49,12 @@ class MemoryCreator(LangChainTypeCreator): return None def to_list(self) -> List[str]: - settings_manager = get_settings_manager() + settings_service = get_settings_service() return [ memory.__name__ for memory in self.type_to_loader_dict.values() - if memory.__name__ in settings_manager.settings.MEMORIES - or settings_manager.settings.DEV + if memory.__name__ in settings_service.settings.MEMORIES + or settings_service.settings.DEV ] diff --git a/src/backend/langflow/interface/output_parsers/base.py b/src/backend/langflow/interface/output_parsers/base.py index 72f861bbf..39269c2b4 100644 --- a/src/backend/langflow/interface/output_parsers/base.py +++ b/src/backend/langflow/interface/output_parsers/base.py @@ -4,10 +4,10 @@ from langchain import output_parsers from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class -from langflow.services.utils import get_settings_manager +from langflow.services.getters import get_settings_service from langflow.template.frontend_node.output_parsers import OutputParserFrontendNode -from langflow.utils.logger import logger +from loguru import logger from langflow.utils.util import build_template_from_class, build_template_from_method @@ -24,7 +24,7 @@ class OutputParserCreator(LangChainTypeCreator): @property def type_to_loader_dict(self) -> Dict: if self.type_dict is None: - settings_manager = get_settings_manager() + settings_service = get_settings_service() self.type_dict = { output_parser_name: import_class( f"langchain.output_parsers.{output_parser_name}" @@ -35,8 +35,8 @@ class OutputParserCreator(LangChainTypeCreator): self.type_dict = { name: output_parser for name, output_parser in self.type_dict.items() - if name in settings_manager.settings.OUTPUT_PARSERS - or settings_manager.settings.DEV + if name in settings_service.settings.OUTPUT_PARSERS + or settings_service.settings.DEV } return self.type_dict diff --git a/src/backend/langflow/interface/prompts/base.py b/src/backend/langflow/interface/prompts/base.py index 5aa41dfb2..29d3e8ba8 100644 --- a/src/backend/langflow/interface/prompts/base.py +++ b/src/backend/langflow/interface/prompts/base.py @@ -5,10 +5,10 @@ from langchain import prompts from langflow.custom.customs import get_custom_nodes from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class -from langflow.services.utils import get_settings_manager +from langflow.services.getters import get_settings_service from langflow.template.frontend_node.prompts import PromptFrontendNode -from langflow.utils.logger import logger +from loguru import logger from langflow.utils.util import build_template_from_class @@ -21,7 +21,7 @@ class PromptCreator(LangChainTypeCreator): @property def type_to_loader_dict(self) -> Dict: - settings_manager = get_settings_manager() + settings_service = get_settings_service() if self.type_dict is None: self.type_dict = { prompt_name: import_class(f"langchain.prompts.{prompt_name}") @@ -36,8 +36,8 @@ class PromptCreator(LangChainTypeCreator): self.type_dict = { name: prompt for name, prompt in self.type_dict.items() - if name in settings_manager.settings.PROMPTS - or settings_manager.settings.DEV + if name in settings_service.settings.PROMPTS + or settings_service.settings.DEV } return self.type_dict diff --git a/src/backend/langflow/interface/retrievers/base.py b/src/backend/langflow/interface/retrievers/base.py index 2f385d80c..44d3ad692 100644 --- a/src/backend/langflow/interface/retrievers/base.py +++ b/src/backend/langflow/interface/retrievers/base.py @@ -4,10 +4,10 @@ from langchain import retrievers from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class -from langflow.services.utils import get_settings_manager +from langflow.services.getters import get_settings_service from langflow.template.frontend_node.retrievers import RetrieverFrontendNode -from langflow.utils.logger import logger +from loguru import logger from langflow.utils.util import build_template_from_method, build_template_from_class @@ -52,12 +52,12 @@ class RetrieverCreator(LangChainTypeCreator): return None def to_list(self) -> List[str]: - settings_manager = get_settings_manager() + settings_service = get_settings_service() return [ retriever for retriever in self.type_to_loader_dict.keys() - if retriever in settings_manager.settings.RETRIEVERS - or settings_manager.settings.DEV + if retriever in settings_service.settings.RETRIEVERS + or settings_service.settings.DEV ] diff --git a/src/backend/langflow/interface/run.py b/src/backend/langflow/interface/run.py index 42cea0e98..63391204a 100644 --- a/src/backend/langflow/interface/run.py +++ b/src/backend/langflow/interface/run.py @@ -1,22 +1,9 @@ -from typing import Any, Dict, Tuple -from langflow.services.cache.utils import memoize_dict +from typing import Dict, Tuple from langflow.graph import Graph -from langflow.utils.logger import logger +from loguru import logger -@memoize_dict(maxsize=10) -def build_langchain_object_with_caching(data_graph): - """ - Build langchain object from data_graph. - """ - - logger.debug("Building langchain object") - graph = Graph.from_payload(data_graph) - return graph.build() - - -@memoize_dict(maxsize=10) -def build_sorted_vertices_with_caching(data_graph) -> Tuple[Any, Dict]: +def build_sorted_vertices(data_graph) -> Tuple[Graph, Dict]: """ Build langchain object from data_graph. """ @@ -29,7 +16,7 @@ def build_sorted_vertices_with_caching(data_graph) -> Tuple[Any, Dict]: vertex.build() if vertex.artifacts: artifacts.update(vertex.artifacts) - return graph.build(), artifacts + return graph, artifacts def build_langchain_object(data_graph): @@ -58,8 +45,12 @@ def get_memory_key(langchain_object): "chat_history": "history", "history": "chat_history", } - memory_key = langchain_object.memory.memory_key - return mem_key_dict.get(memory_key) + # Check if memory_key attribute exists + if hasattr(langchain_object.memory, "memory_key"): + memory_key = langchain_object.memory.memory_key + return mem_key_dict.get(memory_key) + else: + return None # or some other default value or action def update_memory_keys(langchain_object, possible_new_mem_key): diff --git a/src/backend/langflow/interface/text_splitters/base.py b/src/backend/langflow/interface/text_splitters/base.py index 87b778c4c..4f337817f 100644 --- a/src/backend/langflow/interface/text_splitters/base.py +++ b/src/backend/langflow/interface/text_splitters/base.py @@ -1,11 +1,11 @@ from typing import Dict, List, Optional, Type from langflow.interface.base import LangChainTypeCreator -from langflow.services.utils import get_settings_manager +from langflow.services.getters import get_settings_service from langflow.template.frontend_node.textsplitters import TextSplittersFrontendNode from langflow.interface.custom_lists import textsplitter_type_to_cls_dict -from langflow.utils.logger import logger +from loguru import logger from langflow.utils.util import build_template_from_class @@ -31,12 +31,12 @@ class TextSplitterCreator(LangChainTypeCreator): return None def to_list(self) -> List[str]: - settings_manager = get_settings_manager() + settings_service = get_settings_service() return [ textsplitter.__name__ for textsplitter in self.type_to_loader_dict.values() - if textsplitter.__name__ in settings_manager.settings.TEXTSPLITTERS - or settings_manager.settings.DEV + if textsplitter.__name__ in settings_service.settings.TEXTSPLITTERS + or settings_service.settings.DEV ] diff --git a/src/backend/langflow/interface/toolkits/base.py b/src/backend/langflow/interface/toolkits/base.py index c13ffdbd9..73ca4852f 100644 --- a/src/backend/langflow/interface/toolkits/base.py +++ b/src/backend/langflow/interface/toolkits/base.py @@ -4,9 +4,9 @@ from langchain.agents import agent_toolkits from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class, import_module -from langflow.services.utils import get_settings_manager +from langflow.services.getters import get_settings_service -from langflow.utils.logger import logger +from loguru import logger from langflow.utils.util import build_template_from_class @@ -30,7 +30,7 @@ class ToolkitCreator(LangChainTypeCreator): @property def type_to_loader_dict(self) -> Dict: if self.type_dict is None: - settings_manager = get_settings_manager() + settings_service = get_settings_service() self.type_dict = { toolkit_name: import_class( f"langchain.agents.agent_toolkits.{toolkit_name}" @@ -38,7 +38,7 @@ class ToolkitCreator(LangChainTypeCreator): # if toolkit_name is not lower case it is a class for toolkit_name in agent_toolkits.__all__ if not toolkit_name.islower() - and toolkit_name in settings_manager.settings.TOOLKITS + and toolkit_name in settings_service.settings.TOOLKITS } return self.type_dict diff --git a/src/backend/langflow/interface/tools/base.py b/src/backend/langflow/interface/tools/base.py index 82d334512..4be0452e7 100644 --- a/src/backend/langflow/interface/tools/base.py +++ b/src/backend/langflow/interface/tools/base.py @@ -15,7 +15,7 @@ from langflow.interface.tools.constants import ( OTHER_TOOLS, ) from langflow.interface.tools.util import get_tool_params -from langflow.services.utils import get_settings_manager +from langflow.services.getters import get_settings_service from langflow.template.field.base import TemplateField from langflow.template.template.base import Template @@ -68,7 +68,7 @@ class ToolCreator(LangChainTypeCreator): @property def type_to_loader_dict(self) -> Dict: - settings_manager = get_settings_manager() + settings_service = get_settings_service() if self.tools_dict is None: all_tools = {} @@ -82,8 +82,8 @@ class ToolCreator(LangChainTypeCreator): tool_name = tool_params.get("name") or tool if ( - tool_name in settings_manager.settings.TOOLS - or settings_manager.settings.DEV + tool_name in settings_service.settings.TOOLS + or settings_service.settings.DEV ): if tool_name == "JsonSpec": tool_params["path"] = tool_params.pop("dict_") # type: ignore diff --git a/src/backend/langflow/interface/tools/util.py b/src/backend/langflow/interface/tools/util.py index 48d9368d1..8e4f582c1 100644 --- a/src/backend/langflow/interface/tools/util.py +++ b/src/backend/langflow/interface/tools/util.py @@ -1,12 +1,13 @@ import ast import inspect +import textwrap from typing import Dict, Union from langchain.agents.tools import Tool def get_func_tool_params(func, **kwargs) -> Union[Dict, None]: - tree = ast.parse(inspect.getsource(func)) + tree = ast.parse(textwrap.dedent(inspect.getsource(func))) # Iterate over the statements in the abstract syntax tree for node in ast.walk(tree): @@ -57,7 +58,7 @@ def get_func_tool_params(func, **kwargs) -> Union[Dict, None]: def get_class_tool_params(cls, **kwargs) -> Union[Dict, None]: - tree = ast.parse(inspect.getsource(cls)) + tree = ast.parse(textwrap.dedent(inspect.getsource(cls))) tool_params = {} diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index 885e33694..6708c11c9 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -1,10 +1,11 @@ import ast import contextlib from typing import Any, List -from langflow.api.utils import merge_nested_dicts_with_renaming +from langflow.api.utils import get_new_key from langflow.interface.agents.base import agent_creator from langflow.interface.chains.base import chain_creator from langflow.interface.custom.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES +from langflow.interface.custom.utils import extract_inner_type from langflow.interface.document_loaders.base import documentloader_creator from langflow.interface.embeddings.base import embedding_creator from langflow.interface.importing.utils import get_function_custom @@ -29,7 +30,7 @@ from langflow.template.frontend_node.custom_components import ( from langflow.interface.retrievers.base import retriever_creator from langflow.interface.custom.directory_reader import DirectoryReader -from langflow.utils.logger import logger +from loguru import logger from langflow.utils.util import get_base_classes import re @@ -84,6 +85,8 @@ def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union def process_type(field_type: str): + if field_type.startswith("list") or field_type.startswith("List"): + return extract_inner_type(field_type) return "prompt" if field_type == "Prompt" else field_type @@ -100,6 +103,7 @@ def add_new_custom_field( # if it is, update the value display_name = field_config.pop("display_name", field_name) field_type = field_config.pop("field_type", field_type) + field_contains_list = "list" in field_type.lower() field_type = process_type(field_type) field_value = field_config.pop("value", field_value) field_advanced = field_config.pop("advanced", False) @@ -110,7 +114,9 @@ def add_new_custom_field( # If options is a list, then it's a dropdown # If options is None, then it's a list of strings is_list = isinstance(field_config.get("options"), list) - field_config["is_list"] = is_list or field_config.get("is_list", False) + field_config["is_list"] = ( + is_list or field_config.get("is_list", False) or field_contains_list + ) if "name" in field_config: warnings.warn( @@ -172,7 +178,7 @@ def extract_type_from_optional(field_type): Returns: str: The extracted type, or an empty string if no type was found. """ - match = re.search(r"\[(.*?)\]", field_type) + match = re.search(r"\[(.*?)\]$", field_type) return match[1] if match else None @@ -190,17 +196,16 @@ def build_frontend_node(custom_component: CustomComponent): def update_attributes(frontend_node, template_config): """Update the display name and description of a frontend node""" - if "display_name" in template_config: - frontend_node["display_name"] = template_config["display_name"] - - if "description" in template_config: - frontend_node["description"] = template_config["description"] - - if "beta" in template_config: - frontend_node["beta"] = template_config["beta"] - - if "documentation" in template_config: - frontend_node["documentation"] = template_config["documentation"] + attributes = [ + "display_name", + "description", + "beta", + "documentation", + "output_types", + ] + for attribute in attributes: + if attribute in template_config: + frontend_node[attribute] = template_config[attribute] def build_field_config(custom_component: CustomComponent): @@ -285,31 +290,44 @@ def add_base_classes(frontend_node, return_types: List[str]): def build_langchain_template_custom_component(custom_component: CustomComponent): """Build a custom component template for the langchain""" - logger.debug("Building custom component template") - frontend_node = build_frontend_node(custom_component) + try: + logger.debug("Building custom component template") + frontend_node = build_frontend_node(custom_component) - if frontend_node is None: - return None - logger.debug("Built base frontend node") - template_config = custom_component.build_template_config + if frontend_node is None: + return None + logger.debug("Built base frontend node") + template_config = custom_component.build_template_config - update_attributes(frontend_node, template_config) - logger.debug("Updated attributes") - field_config = build_field_config(custom_component) - logger.debug("Built field config") - add_extra_fields( - frontend_node, field_config, custom_component.get_function_entrypoint_args - ) - logger.debug("Added extra fields") - frontend_node = add_code_field( - frontend_node, custom_component.code, field_config.get("code", {}) - ) - logger.debug("Added code field") - add_base_classes( - frontend_node, custom_component.get_function_entrypoint_return_type - ) - logger.debug("Added base classes") - return frontend_node + update_attributes(frontend_node, template_config) + logger.debug("Updated attributes") + field_config = build_field_config(custom_component) + logger.debug("Built field config") + entrypoint_args = custom_component.get_function_entrypoint_args + + add_extra_fields(frontend_node, field_config, entrypoint_args) + logger.debug("Added extra fields") + frontend_node = add_code_field( + frontend_node, custom_component.code, field_config.get("code", {}) + ) + logger.debug("Added code field") + add_base_classes( + frontend_node, custom_component.get_function_entrypoint_return_type + ) + logger.debug("Added base classes") + return frontend_node + except Exception as exc: + if isinstance(exc, HTTPException): + raise exc + raise HTTPException( + status_code=400, + detail={ + "error": ( + "Invalid type convertion. Please check your code and try again." + ), + "traceback": traceback.format_exc(), + }, + ) from exc def load_files_from_path(path: str): @@ -338,7 +356,9 @@ def build_valid_menu(valid_components): valid_menu[menu_name] = {} for component in menu_item["components"]: - logger.debug(f"Building component: {component}") + logger.debug( + f"Building component: {component.get('name'), component.get('output_types')}" + ) try: component_name = component["name"] component_code = component["code"] @@ -415,6 +435,24 @@ def build_invalid_menu(invalid_components): return invalid_menu +def merge_nested_dicts_with_renaming(dict1, dict2): + for key, value in dict2.items(): + if ( + key in dict1 + and isinstance(value, dict) + and isinstance(dict1.get(key), dict) + ): + for sub_key, sub_value in value.items(): + if sub_key in dict1[key]: + new_key = get_new_key(dict1[key], sub_key) + dict1[key][new_key] = sub_value + else: + dict1[key][sub_key] = sub_value + else: + dict1[key] = value + return dict1 + + def build_langchain_custom_component_list_from_path(path: str): """Build a list of custom components for the langchain from a given path""" file_list = load_files_from_path(path) @@ -428,3 +466,51 @@ def build_langchain_custom_component_list_from_path(path: str): invalid_menu = build_invalid_menu(invalid_components) return merge_nested_dicts_with_renaming(valid_menu, invalid_menu) + + +def get_all_types_dict(settings_service): + native_components = build_langchain_types_dict() + # custom_components is a list of dicts + # need to merge all the keys into one dict + custom_components_from_file: dict[str, Any] = {} + if settings_service.settings.COMPONENTS_PATH: + logger.info( + f"Building custom components from {settings_service.settings.COMPONENTS_PATH}" + ) + + custom_component_dicts = [] + processed_paths = [] + for path in settings_service.settings.COMPONENTS_PATH: + if str(path) in processed_paths: + continue + custom_component_dict = build_langchain_custom_component_list_from_path( + str(path) + ) + custom_component_dicts.append(custom_component_dict) + processed_paths.append(str(path)) + + logger.info(f"Loading {len(custom_component_dicts)} category(ies)") + for custom_component_dict in custom_component_dicts: + # custom_component_dict is a dict of dicts + if not custom_component_dict: + continue + category = list(custom_component_dict.keys())[0] + logger.info( + f"Loading {len(custom_component_dict[category])} component(s) from category {category}" + ) + custom_components_from_file = merge_nested_dicts_with_renaming( + custom_components_from_file, custom_component_dict + ) + + return merge_nested_dicts_with_renaming( + native_components, custom_components_from_file + ) + + +def merge_nested_dicts(dict1, dict2): + for key, value in dict2.items(): + if isinstance(value, dict) and isinstance(dict1.get(key), dict): + dict1[key] = merge_nested_dicts(dict1[key], value) + else: + dict1[key] = value + return dict1 diff --git a/src/backend/langflow/interface/utilities/base.py b/src/backend/langflow/interface/utilities/base.py index eb8cd60af..665143da2 100644 --- a/src/backend/langflow/interface/utilities/base.py +++ b/src/backend/langflow/interface/utilities/base.py @@ -5,10 +5,10 @@ from langchain import SQLDatabase, utilities from langflow.custom.customs import get_custom_nodes from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class -from langflow.services.utils import get_settings_manager +from langflow.services.getters import get_settings_service from langflow.template.frontend_node.utilities import UtilitiesFrontendNode -from langflow.utils.logger import logger +from loguru import logger from langflow.utils.util import build_template_from_class @@ -27,7 +27,7 @@ class UtilityCreator(LangChainTypeCreator): from the langchain.chains module and filtering them according to the settings.utilities list. """ if self.type_dict is None: - settings_manager = get_settings_manager() + settings_service = get_settings_service() self.type_dict = { utility_name: import_class(f"langchain.utilities.{utility_name}") for utility_name in utilities.__all__ @@ -37,8 +37,8 @@ class UtilityCreator(LangChainTypeCreator): self.type_dict = { name: utility for name, utility in self.type_dict.items() - if name in settings_manager.settings.UTILITIES - or settings_manager.settings.DEV + if name in settings_service.settings.UTILITIES + or settings_service.settings.DEV } return self.type_dict diff --git a/src/backend/langflow/interface/utils.py b/src/backend/langflow/interface/utils.py index 1fddbf80f..7b78e75c5 100644 --- a/src/backend/langflow/interface/utils.py +++ b/src/backend/langflow/interface/utils.py @@ -8,9 +8,9 @@ import re import yaml from langchain.base_language import BaseLanguageModel from PIL.Image import Image -from langflow.utils.logger import logger +from loguru import logger from langflow.services.chat.config import ChatConfig -from langflow.services.utils import get_settings_manager +from langflow.services.getters import get_settings_service def load_file_into_dict(file_path: str) -> dict: @@ -64,11 +64,11 @@ def extract_input_variables_from_prompt(prompt: str) -> list[str]: def setup_llm_caching(): """Setup LLM caching.""" - settings_manager = get_settings_manager() + settings_service = get_settings_service() try: - set_langchain_cache(settings_manager.settings) + set_langchain_cache(settings_service.settings) except ImportError: - logger.warning(f"Could not import {settings_manager.settings.CACHE}. ") + logger.warning(f"Could not import {settings_service.settings.CACHE_TYPE}. ") except Exception as exc: logger.warning(f"Could not setup LLM caching. Error: {exc}") @@ -77,9 +77,16 @@ def set_langchain_cache(settings): import langchain from langflow.interface.importing.utils import import_class - cache_type = os.getenv("LANGFLOW_LANGCHAIN_CACHE") - cache_class = import_class(f"langchain.cache.{cache_type or settings.CACHE}") + if cache_type := os.getenv("LANGFLOW_LANGCHAIN_CACHE"): + try: + cache_class = import_class( + f"langchain.cache.{cache_type or settings.LANGCHAIN_CACHE}" + ) - logger.debug(f"Setting up LLM caching with {cache_class.__name__}") - langchain.llm_cache = cache_class() - logger.info(f"LLM caching setup with {cache_class.__name__}") + logger.debug(f"Setting up LLM caching with {cache_class.__name__}") + langchain.llm_cache = cache_class() + logger.info(f"LLM caching setup with {cache_class.__name__}") + except ImportError: + logger.warning(f"Could not import {cache_type}. ") + else: + logger.info("No LLM cache set.") diff --git a/src/backend/langflow/interface/vector_store/base.py b/src/backend/langflow/interface/vector_store/base.py index 4b8ca2b64..786031a6b 100644 --- a/src/backend/langflow/interface/vector_store/base.py +++ b/src/backend/langflow/interface/vector_store/base.py @@ -4,10 +4,10 @@ from langchain import vectorstores from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class -from langflow.services.utils import get_settings_manager +from langflow.services.getters import get_settings_service from langflow.template.frontend_node.vectorstores import VectorStoreFrontendNode -from langflow.utils.logger import logger +from loguru import logger from langflow.utils.util import build_template_from_method @@ -44,12 +44,12 @@ class VectorstoreCreator(LangChainTypeCreator): return None def to_list(self) -> List[str]: - settings_manager = get_settings_manager() + settings_service = get_settings_service() return [ vectorstore for vectorstore in self.type_to_loader_dict.keys() - if vectorstore in settings_manager.settings.VECTORSTORES - or settings_manager.settings.DEV + if vectorstore in settings_service.settings.VECTORSTORES + or settings_service.settings.DEV ] diff --git a/src/backend/langflow/interface/wrappers/base.py b/src/backend/langflow/interface/wrappers/base.py index c5fe14ad8..30625c264 100644 --- a/src/backend/langflow/interface/wrappers/base.py +++ b/src/backend/langflow/interface/wrappers/base.py @@ -3,7 +3,7 @@ from typing import ClassVar, Dict, List, Optional from langchain import requests, sql_database from langflow.interface.base import LangChainTypeCreator -from langflow.utils.logger import logger +from loguru import logger from langflow.utils.util import build_template_from_class, build_template_from_method diff --git a/src/backend/langflow/jcloud.yml b/src/backend/langflow/jcloud.yml deleted file mode 100644 index ffc1c9b8a..000000000 --- a/src/backend/langflow/jcloud.yml +++ /dev/null @@ -1,2 +0,0 @@ -instance: C4 -autoscale_min: 1 \ No newline at end of file diff --git a/src/backend/langflow/lcserve.py b/src/backend/langflow/lcserve.py deleted file mode 100644 index 87f69e014..000000000 --- a/src/backend/langflow/lcserve.py +++ /dev/null @@ -1,15 +0,0 @@ -# This file is used by lc-serve to load the mounted app and serve it. - -import os - -# Use the JCLOUD_WORKSPACE for db URL if it's provided by JCloud. -if "JCLOUD_WORKSPACE" in os.environ: - os.environ[ - "LANGFLOW_DATABASE_URL" - ] = f"sqlite:///{os.environ['JCLOUD_WORKSPACE']}/langflow.db" - -from langflow.main import setup_app -from langflow.utils.logger import configure - -configure(log_level="DEBUG") -app = setup_app() diff --git a/src/backend/langflow/main.py b/src/backend/langflow/main.py index b63caff24..9caa157d0 100644 --- a/src/backend/langflow/main.py +++ b/src/backend/langflow/main.py @@ -6,11 +6,14 @@ from fastapi.responses import FileResponse from fastapi.staticfiles import StaticFiles from langflow.api import router -from langflow.routers import login, users, health + from langflow.interface.utils import setup_llm_caching -from langflow.services.database.utils import initialize_database -from langflow.services.manager import initialize_services +from langflow.services.utils import initialize_services +from langflow.services.plugins.langfuse import LangfuseInstance +from langflow.services.utils import ( + teardown_services, +) from langflow.utils.logger import configure @@ -31,15 +34,19 @@ def create_app(): allow_headers=["*"], ) - app.include_router(login.router) - app.include_router(users.router) - app.include_router(health.router) + @app.get("/health") + def health(): + return {"status": "ok"} app.include_router(router) app.on_event("startup")(initialize_services) - app.on_event("startup")(initialize_database) app.on_event("startup")(setup_llm_caching) + app.on_event("startup")(LangfuseInstance.update) + + app.on_event("shutdown")(teardown_services) + app.on_event("shutdown")(LangfuseInstance.teardown) + return app @@ -89,7 +96,7 @@ def setup_app( if __name__ == "__main__": import uvicorn - from langflow.utils.util import get_number_of_workers + from langflow.__main__ import get_number_of_workers configure() uvicorn.run( diff --git a/src/backend/langflow/processing/base.py b/src/backend/langflow/processing/base.py index 13ff6a385..e5816306c 100644 --- a/src/backend/langflow/processing/base.py +++ b/src/backend/langflow/processing/base.py @@ -1,11 +1,57 @@ -from typing import Union +from typing import List, Union, TYPE_CHECKING from langflow.api.v1.callback import ( AsyncStreamingLLMCallbackHandler, StreamingLLMCallbackHandler, ) from langflow.processing.process import fix_memory_inputs, format_actions -from langflow.utils.logger import logger +from loguru import logger from langchain.agents.agent import AgentExecutor +from langchain.callbacks.base import BaseCallbackHandler + +if TYPE_CHECKING: + from langfuse.callback import CallbackHandler # type: ignore + + +def setup_callbacks(sync, trace_id, **kwargs): + """Setup callbacks for langchain object""" + callbacks = [] + if sync: + callbacks.append(StreamingLLMCallbackHandler(**kwargs)) + else: + callbacks.append(AsyncStreamingLLMCallbackHandler(**kwargs)) + + if langfuse_callback := get_langfuse_callback(trace_id=trace_id): + logger.debug("Langfuse callback loaded") + callbacks.append(langfuse_callback) + return callbacks + + +def get_langfuse_callback(trace_id): + from langflow.services.plugins.langfuse import LangfuseInstance + from langfuse.callback import CreateTrace + + logger.debug("Initializing langfuse callback") + if langfuse := LangfuseInstance.get(): + logger.debug("Langfuse credentials found") + try: + trace = langfuse.trace(CreateTrace(id=trace_id)) + return trace.getNewHandler() + except Exception as exc: + logger.error(f"Error initializing langfuse callback: {exc}") + + return None + + +def flush_langfuse_callback_if_present( + callbacks: List[Union[BaseCallbackHandler, "CallbackHandler"]] +): + """ + If langfuse callback is present, run callback.langfuse.flush() + """ + for callback in callbacks: + if hasattr(callback, "langfuse"): + callback.langfuse.flush() + break async def get_result_and_steps(langchain_object, inputs: Union[dict, str], **kwargs): @@ -27,13 +73,18 @@ async def get_result_and_steps(langchain_object, inputs: Union[dict, str], **kwa logger.error(f"Error fixing memory inputs: {exc}") try: - async_callbacks = [AsyncStreamingLLMCallbackHandler(**kwargs)] - output = await langchain_object.acall(inputs, callbacks=async_callbacks) + trace_id = kwargs.pop("session_id", None) + callbacks = setup_callbacks(sync=False, trace_id=trace_id, **kwargs) + output = await langchain_object.acall(inputs, callbacks=callbacks) except Exception as exc: # make the error message more informative logger.debug(f"Error: {str(exc)}") - sync_callbacks = [StreamingLLMCallbackHandler(**kwargs)] - output = langchain_object(inputs, callbacks=sync_callbacks) + trace_id = kwargs.pop("session_id", None) + callbacks = setup_callbacks(sync=True, trace_id=trace_id, **kwargs) + output = langchain_object(inputs, callbacks=callbacks) + + # if langfuse callback is present, run callback.langfuse.flush() + flush_langfuse_callback_if_present(callbacks) intermediate_steps = ( output.get("intermediate_steps", []) if isinstance(output, dict) else [] diff --git a/src/backend/langflow/processing/process.py b/src/backend/langflow/processing/process.py index 396135e16..4c96e2fde 100644 --- a/src/backend/langflow/processing/process.py +++ b/src/backend/langflow/processing/process.py @@ -1,16 +1,20 @@ +import json from pathlib import Path from langchain.schema import AgentAction -import json from langflow.interface.run import ( - build_sorted_vertices_with_caching, + build_sorted_vertices, get_memory_key, update_memory_keys, ) -from langflow.utils.logger import logger +from langflow.services.getters import get_session_service +from loguru import logger from langflow.graph import Graph from langchain.chains.base import Chain from langchain.vectorstores.base import VectorStore from typing import Any, Dict, List, Optional, Tuple, Union +from langchain.schema import Document + +from pydantic import BaseModel def fix_memory_inputs(langchain_object): @@ -64,7 +68,7 @@ def get_result_and_thought(langchain_object: Any, inputs: dict): langchain_object.verbose = True if hasattr(langchain_object, "return_intermediate_steps"): - langchain_object.return_intermediate_steps = True + langchain_object.return_intermediate_steps = False fix_memory_inputs(langchain_object) @@ -92,26 +96,19 @@ def get_build_result(data_graph, session_id): # otherwise, build the graph and return the result if session_id: logger.debug(f"Loading LangChain object from session {session_id}") - result = build_sorted_vertices_with_caching.get_result_by_session_id(session_id) + result = build_sorted_vertices(data_graph=data_graph) if result is not None: logger.debug("Loaded LangChain object") return result logger.debug("Building langchain object") - return build_sorted_vertices_with_caching(data_graph) - - -def clear_caches_if_needed(clear_cache: bool): - if clear_cache: - build_sorted_vertices_with_caching.clear_cache() - logger.debug("Cleared cache") + return build_sorted_vertices(data_graph) def load_langchain_object( data_graph: Dict[str, Any], session_id: str ) -> Tuple[Union[Chain, VectorStore], Dict[str, Any], str]: langchain_object, artifacts = get_build_result(data_graph, session_id) - session_id = build_sorted_vertices_with_caching.hash logger.debug("Loaded LangChain object") if langchain_object is None: @@ -139,33 +136,47 @@ def generate_result(langchain_object: Union[Chain, VectorStore], inputs: dict): raise ValueError("Inputs must be provided for a Chain") logger.debug("Generating result and thought") result = get_result_and_thought(langchain_object, inputs) + logger.debug("Generated result and thought") elif isinstance(langchain_object, VectorStore): result = langchain_object.search(**inputs) + elif isinstance(langchain_object, Document): + result = langchain_object.dict() else: - raise ValueError( - f"Unknown langchain_object type: {type(langchain_object).__name__}" - ) + logger.warning(f"Unknown langchain_object type: {type(langchain_object)}") + result = langchain_object return result -def process_graph_cached( +class Result(BaseModel): + result: Any + session_id: str + + +async def process_graph_cached( data_graph: Dict[str, Any], inputs: Optional[dict] = None, clear_cache=False, session_id=None, -) -> Tuple[Any, str]: - clear_caches_if_needed(clear_cache) - # If session_id is provided, load the langchain_object from the session - # else build the graph and return the result and the new session_id - langchain_object, artifacts, session_id = load_langchain_object( - data_graph, session_id - ) +) -> Result: + session_service = get_session_service() + if clear_cache: + session_service.clear_session(session_id) + if session_id is None: + session_id = session_service.generate_key( + session_id=session_id, data_graph=data_graph + ) + # Load the graph using SessionService + graph, artifacts = session_service.load_session(session_id, data_graph) + built_object = graph.build() processed_inputs = process_inputs(inputs, artifacts) - result = generate_result(langchain_object, processed_inputs) + result = generate_result(built_object, processed_inputs) + # langchain_object is now updated with the new memory + # we need to update the cache with the updated langchain_object + session_service.update_session(session_id, (graph, artifacts)) - return result, session_id + return Result(result=result, session_id=session_id) def load_flow_from_json( diff --git a/src/backend/langflow/routers/health.py b/src/backend/langflow/routers/health.py deleted file mode 100644 index 244ef001d..000000000 --- a/src/backend/langflow/routers/health.py +++ /dev/null @@ -1,8 +0,0 @@ -from fastapi import APIRouter - -router = APIRouter() - - -@router.get("/health") -def get_health(): - return {"status": "OK"} diff --git a/src/backend/langflow/routers/users.py b/src/backend/langflow/routers/users.py deleted file mode 100644 index da738a5cd..000000000 --- a/src/backend/langflow/routers/users.py +++ /dev/null @@ -1,133 +0,0 @@ -from uuid import UUID - -from sqlalchemy import func -from sqlalchemy.exc import IntegrityError - -from sqlmodel import Session, select -from fastapi import APIRouter, Depends, HTTPException - -from langflow.services.utils import get_session -from langflow.auth.auth import get_current_active_user, get_password_hash -from langflow.database.models.user import ( - User, - UserAddModel, - UserListModel, - UserPatchModel, - UsersResponse, - update_user, -) - -router = APIRouter(tags=["Login"]) - - -@router.post("/user", response_model=UserListModel) -def add_user( - user: UserAddModel, - db: Session = Depends(get_session), -) -> User: - """ - Add a new user to the database. - """ - new_user = User(**user.dict()) - try: - new_user.password = get_password_hash(user.password) - - db.add(new_user) - db.commit() - db.refresh(new_user) - except IntegrityError as e: - db.rollback() - raise HTTPException(status_code=400, detail="User exists") from e - - return new_user - - -@router.get("/user", response_model=UserListModel) -def read_current_user(current_user: User = Depends(get_current_active_user)) -> User: - """ - Retrieve the current user's data. - """ - return current_user - - -@router.get("/users", response_model=UsersResponse) -def read_all_users( - skip: int = 0, - limit: int = 10, - _: Session = Depends(get_current_active_user), - db: Session = Depends(get_session), -) -> UsersResponse: - """ - Retrieve a list of users from the database with pagination. - """ - query = select(User).offset(skip).limit(limit) - users = db.execute(query).fetchall() - - count_query = select(func.count()).select_from(User) # type: ignore - total_count = db.execute(count_query).scalar() - - return UsersResponse( - total_count=total_count, # type: ignore - users=[UserListModel(**dict(user.User)) for user in users], - ) - - -@router.patch("/user/{user_id}", response_model=UserListModel) -def patch_user( - user_id: UUID, - user: UserPatchModel, - _: Session = Depends(get_current_active_user), - db: Session = Depends(get_session), -) -> User: - """ - Update an existing user's data. - """ - return update_user(user_id, user, db) - - -@router.delete("/user/{user_id}") -def delete_user( - user_id: UUID, - _: Session = Depends(get_current_active_user), - db: Session = Depends(get_session), -) -> dict: - """ - Delete a user from the database. - """ - user_db = db.query(User).filter(User.id == user_id).first() - if not user_db: - raise HTTPException(status_code=404, detail="User not found") - - db.delete(user_db) - db.commit() - - return {"detail": "User deleted"} - - -# TODO: REMOVE - Just for testing purposes -@router.post("/super_user", response_model=User) -def add_super_user_for_testing_purposes_delete_me_before_merge_into_dev( - db: Session = Depends(get_session), -) -> User: - """ - Add a superuser for testing purposes. - (This should be removed in production) - """ - new_user = User( - username="superuser", - password="12345", - is_active=True, - is_superuser=True, - last_login_at=None, - ) - - try: - new_user.password = get_password_hash(new_user.password) - db.add(new_user) - db.commit() - db.refresh(new_user) - except IntegrityError as e: - db.rollback() - raise HTTPException(status_code=400, detail="User exists") from e - - return new_user diff --git a/src/backend/langflow/services/auth/__init__.py b/src/backend/langflow/services/auth/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/services/auth/factory.py b/src/backend/langflow/services/auth/factory.py new file mode 100644 index 000000000..b44019289 --- /dev/null +++ b/src/backend/langflow/services/auth/factory.py @@ -0,0 +1,12 @@ +from langflow.services.factory import ServiceFactory +from langflow.services.auth.service import AuthService + + +class AuthServiceFactory(ServiceFactory): + name = "auth_service" + + def __init__(self): + super().__init__(AuthService) + + def create(self, settings_service): + return AuthService(settings_service) diff --git a/src/backend/langflow/services/auth/service.py b/src/backend/langflow/services/auth/service.py new file mode 100644 index 000000000..5b0acf8c6 --- /dev/null +++ b/src/backend/langflow/services/auth/service.py @@ -0,0 +1,12 @@ +from langflow.services.base import Service +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from langflow.services.settings.manager import SettingsService + + +class AuthService(Service): + name = "auth_service" + + def __init__(self, settings_service: "SettingsService"): + self.settings_service = settings_service diff --git a/src/backend/langflow/services/auth/utils.py b/src/backend/langflow/services/auth/utils.py new file mode 100644 index 000000000..f88a1cd12 --- /dev/null +++ b/src/backend/langflow/services/auth/utils.py @@ -0,0 +1,296 @@ +from datetime import datetime, timedelta, timezone +from fastapi import Depends, HTTPException, Security, status +from fastapi.security import APIKeyHeader, APIKeyQuery, OAuth2PasswordBearer +from jose import JWTError, jwt +from typing import Annotated, Coroutine, Optional, Union +from uuid import UUID +from langflow.services.database.models.api_key.api_key import ApiKey +from langflow.services.database.models.api_key.crud import check_key +from langflow.services.database.models.user.user import User +from langflow.services.database.models.user.crud import ( + get_user_by_id, + get_user_by_username, + update_user_last_login_at, +) +from langflow.services.getters import get_session, get_settings_service +from sqlmodel import Session + +oauth2_login = OAuth2PasswordBearer(tokenUrl="api/v1/login") + +API_KEY_NAME = "x-api-key" + +api_key_query = APIKeyQuery( + name=API_KEY_NAME, scheme_name="API key query", auto_error=False +) +api_key_header = APIKeyHeader( + name=API_KEY_NAME, scheme_name="API key header", auto_error=False +) + + +# Source: https://github.com/mrtolkien/fastapi_simple_security/blob/master/fastapi_simple_security/security_api_key.py +async def api_key_security( + query_param: str = Security(api_key_query), + header_param: str = Security(api_key_header), + db: Session = Depends(get_session), +) -> Optional[User]: + settings_service = get_settings_service() + result: Optional[Union[ApiKey, User]] = None + if settings_service.auth_settings.AUTO_LOGIN: + # Get the first user + if not settings_service.auth_settings.SUPERUSER: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Missing first superuser credentials", + ) + + result = get_user_by_username(db, settings_service.auth_settings.SUPERUSER) + + elif not query_param and not header_param: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="An API key must be passed as query or header", + ) + + elif query_param: + result = check_key(db, query_param) + + else: + result = check_key(db, header_param) + + if not result: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Invalid or missing API key", + ) + if isinstance(result, ApiKey): + return result.user + elif isinstance(result, User): + return result + + +async def get_current_user( + token: Annotated[str, Depends(oauth2_login)], + db: Session = Depends(get_session), +) -> User: + settings_service = get_settings_service() + + credentials_exception = HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Could not validate credentials", + headers={"WWW-Authenticate": "Bearer"}, + ) + + if isinstance(token, Coroutine): + token = await token + + if settings_service.auth_settings.SECRET_KEY is None: + raise credentials_exception + + try: + payload = jwt.decode( + token, + settings_service.auth_settings.SECRET_KEY, + algorithms=[settings_service.auth_settings.ALGORITHM], + ) + user_id: UUID = payload.get("sub") # type: ignore + token_type: str = payload.get("type") # type: ignore + if expires := payload.get("exp", None): + expires_datetime = datetime.fromtimestamp(expires, timezone.utc) + # TypeError: can't compare offset-naive and offset-aware datetimes + if datetime.now(timezone.utc) > expires_datetime: + raise credentials_exception + + if user_id is None or token_type: + raise credentials_exception + except JWTError as e: + raise credentials_exception from e + + user = get_user_by_id(db, user_id) # type: ignore + if user is None or not user.is_active: + raise credentials_exception + return user + + +def get_current_active_user(current_user: Annotated[User, Depends(get_current_user)]): + if not current_user.is_active: + raise HTTPException(status_code=400, detail="Inactive user") + return current_user + + +def get_current_active_superuser( + current_user: Annotated[User, Depends(get_current_user)] +) -> User: + if not current_user.is_active: + raise HTTPException(status_code=401, detail="Inactive user") + if not current_user.is_superuser: + raise HTTPException( + status_code=400, detail="The user doesn't have enough privileges" + ) + return current_user + + +def verify_password(plain_password, hashed_password): + settings_service = get_settings_service() + return settings_service.auth_settings.pwd_context.verify( + plain_password, hashed_password + ) + + +def get_password_hash(password): + settings_service = get_settings_service() + return settings_service.auth_settings.pwd_context.hash(password) + + +def create_token(data: dict, expires_delta: timedelta): + settings_service = get_settings_service() + + to_encode = data.copy() + expire = datetime.now(timezone.utc) + expires_delta + to_encode["exp"] = expire + + return jwt.encode( + to_encode, + settings_service.auth_settings.SECRET_KEY, + algorithm=settings_service.auth_settings.ALGORITHM, + ) + + +def create_super_user( + username: str, + password: str, + db: Session = Depends(get_session), +) -> User: + super_user = get_user_by_username(db, username) + + if not super_user: + super_user = User( + username=username, + password=get_password_hash(password), + is_superuser=True, + is_active=True, + last_login_at=None, + ) + + db.add(super_user) + db.commit() + db.refresh(super_user) + + return super_user + + +def create_user_longterm_token(db: Session = Depends(get_session)) -> dict: + settings_service = get_settings_service() + username = settings_service.auth_settings.SUPERUSER + password = settings_service.auth_settings.SUPERUSER_PASSWORD + if not username or not password: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Missing first superuser credentials", + ) + super_user = create_super_user(db=db, username=username, password=password) + + access_token_expires_longterm = timedelta(days=365) + access_token = create_token( + data={"sub": str(super_user.id)}, + expires_delta=access_token_expires_longterm, + ) + + # Update: last_login_at + update_user_last_login_at(super_user.id, db) + + return { + "access_token": access_token, + "refresh_token": None, + "token_type": "bearer", + } + + +def create_user_api_key(user_id: UUID) -> dict: + access_token = create_token( + data={"sub": str(user_id), "role": "api_key"}, + expires_delta=timedelta(days=365 * 2), + ) + + return {"api_key": access_token} + + +def get_user_id_from_token(token: str) -> UUID: + try: + user_id = jwt.get_unverified_claims(token)["sub"] + return UUID(user_id) + except (KeyError, JWTError, ValueError): + return UUID(int=0) + + +def create_user_tokens( + user_id: UUID, db: Session = Depends(get_session), update_last_login: bool = False +) -> dict: + settings_service = get_settings_service() + + access_token_expires = timedelta( + minutes=settings_service.auth_settings.ACCESS_TOKEN_EXPIRE_MINUTES + ) + access_token = create_token( + data={"sub": str(user_id)}, + expires_delta=access_token_expires, + ) + + refresh_token_expires = timedelta( + minutes=settings_service.auth_settings.REFRESH_TOKEN_EXPIRE_MINUTES + ) + refresh_token = create_token( + data={"sub": str(user_id), "type": "rf"}, + expires_delta=refresh_token_expires, + ) + + # Update: last_login_at + if update_last_login: + update_user_last_login_at(user_id, db) + + return { + "access_token": access_token, + "refresh_token": refresh_token, + "token_type": "bearer", + } + + +def create_refresh_token(refresh_token: str, db: Session = Depends(get_session)): + settings_service = get_settings_service() + + try: + payload = jwt.decode( + refresh_token, + settings_service.auth_settings.SECRET_KEY, + algorithms=[settings_service.auth_settings.ALGORITHM], + ) + user_id: UUID = payload.get("sub") # type: ignore + token_type: str = payload.get("type") # type: ignore + + if user_id is None or token_type is None: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid refresh token" + ) + + return create_user_tokens(user_id, db) + + except JWTError as e: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid refresh token", + ) from e + + +def authenticate_user( + username: str, password: str, db: Session = Depends(get_session) +) -> Optional[User]: + user = get_user_by_username(db, username) + + if not user: + return None + + if not user.is_active: + if not user.last_login_at: + raise HTTPException(status_code=400, detail="Waiting for approval") + raise HTTPException(status_code=400, detail="Inactive user") + + return user if verify_password(password, user.password) else None diff --git a/src/backend/langflow/services/base.py b/src/backend/langflow/services/base.py index 6bca6c4e2..301771944 100644 --- a/src/backend/langflow/services/base.py +++ b/src/backend/langflow/services/base.py @@ -1,2 +1,12 @@ -class Service: +from abc import ABC + + +class Service(ABC): name: str + ready: bool = False + + def teardown(self): + pass + + def set_ready(self): + self.ready = True diff --git a/src/backend/langflow/services/cache/__init__.py b/src/backend/langflow/services/cache/__init__.py index 79e143807..3b122aa9e 100644 --- a/src/backend/langflow/services/cache/__init__.py +++ b/src/backend/langflow/services/cache/__init__.py @@ -1,10 +1,8 @@ from . import factory, manager -from langflow.services.cache.manager import cache_manager -from langflow.services.cache.flow import InMemoryCache +from langflow.services.cache.manager import InMemoryCache __all__ = [ - "cache_manager", "factory", "manager", "InMemoryCache", diff --git a/src/backend/langflow/services/cache/base.py b/src/backend/langflow/services/cache/base.py index 88cb3a1da..4eee6639e 100644 --- a/src/backend/langflow/services/cache/base.py +++ b/src/backend/langflow/services/cache/base.py @@ -1,11 +1,13 @@ import abc -class BaseCache(abc.ABC): +class BaseCacheService(abc.ABC): """ Abstract base class for a cache. """ + name = "cache_service" + @abc.abstractmethod def get(self, key): """ @@ -28,6 +30,16 @@ class BaseCache(abc.ABC): value: The value to cache. """ + @abc.abstractmethod + def upsert(self, key, value): + """ + Add an item to the cache if it doesn't exist, or update it if it does. + + Args: + key: The key of the item. + value: The value to cache. + """ + @abc.abstractmethod def delete(self, key): """ diff --git a/src/backend/langflow/services/cache/factory.py b/src/backend/langflow/services/cache/factory.py index 77f8d58d1..f00ab239f 100644 --- a/src/backend/langflow/services/cache/factory.py +++ b/src/backend/langflow/services/cache/factory.py @@ -1,11 +1,35 @@ -from langflow.services.cache.manager import CacheManager +from langflow.services.cache.manager import InMemoryCache, RedisCache, BaseCacheService from langflow.services.factory import ServiceFactory +from langflow.utils.logger import logger +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from langflow.services.settings.manager import SettingsService -class CacheManagerFactory(ServiceFactory): +class CacheServiceFactory(ServiceFactory): def __init__(self): - super().__init__(CacheManager) + super().__init__(BaseCacheService) - def create(self, settings_service): - # Here you would have logic to create and configure a CacheManager - return CacheManager() + def create(self, settings_service: "SettingsService"): + # Here you would have logic to create and configure a CacheService + # based on the settings_service + + if settings_service.settings.CACHE_TYPE == "redis": + logger.debug("Creating Redis cache") + redis_cache = RedisCache( + host=settings_service.settings.REDIS_HOST, + port=settings_service.settings.REDIS_PORT, + db=settings_service.settings.REDIS_DB, + expiration_time=settings_service.settings.REDIS_CACHE_EXPIRE, + ) + if redis_cache.is_connected(): + logger.debug("Redis cache is connected") + return redis_cache + logger.warning( + "Redis cache is not connected, falling back to in-memory cache" + ) + return InMemoryCache() + + elif settings_service.settings.CACHE_TYPE == "memory": + return InMemoryCache() diff --git a/src/backend/langflow/services/cache/flow.py b/src/backend/langflow/services/cache/flow.py deleted file mode 100644 index 0c10c51e1..000000000 --- a/src/backend/langflow/services/cache/flow.py +++ /dev/null @@ -1,146 +0,0 @@ -import threading -import time -from collections import OrderedDict - -from langflow.services.cache.base import BaseCache - - -class InMemoryCache(BaseCache): - """ - A simple in-memory cache using an OrderedDict. - - This cache supports setting a maximum size and expiration time for cached items. - When the cache is full, it uses a Least Recently Used (LRU) eviction policy. - Thread-safe using a threading Lock. - - Attributes: - max_size (int, optional): Maximum number of items to store in the cache. - expiration_time (int, optional): Time in seconds after which a cached item expires. Default is 1 hour. - - Example: - - cache = InMemoryCache(max_size=3, expiration_time=5) - - # setting cache values - cache.set("a", 1) - cache.set("b", 2) - cache["c"] = 3 - - # getting cache values - a = cache.get("a") - b = cache["b"] - """ - - def __init__(self, max_size=None, expiration_time=60 * 60): - """ - Initialize a new InMemoryCache instance. - - Args: - max_size (int, optional): Maximum number of items to store in the cache. - expiration_time (int, optional): Time in seconds after which a cached item expires. Default is 1 hour. - """ - self._cache = OrderedDict() - self._lock = threading.Lock() - self.max_size = max_size - self.expiration_time = expiration_time - - def get(self, key): - """ - Retrieve an item from the cache. - - Args: - key: The key of the item to retrieve. - - Returns: - The value associated with the key, or None if the key is not found or the item has expired. - """ - with self._lock: - if key in self._cache: - item = self._cache.pop(key) - if ( - self.expiration_time is None - or time.time() - item["time"] < self.expiration_time - ): - # Move the key to the end to make it recently used - self._cache[key] = item - return item["value"] - else: - self.delete(key) - return None - - def set(self, key, value): - """ - Add an item to the cache. - - If the cache is full, the least recently used item is evicted. - - Args: - key: The key of the item. - value: The value to cache. - """ - with self._lock: - if key in self._cache: - # Remove existing key before re-inserting to update order - self.delete(key) - elif self.max_size and len(self._cache) >= self.max_size: - # Remove least recently used item - self._cache.popitem(last=False) - self._cache[key] = {"value": value, "time": time.time()} - - def get_or_set(self, key, value): - """ - Retrieve an item from the cache. If the item does not exist, set it with the provided value. - - Args: - key: The key of the item. - value: The value to cache if the item doesn't exist. - - Returns: - The cached value associated with the key. - """ - with self._lock: - if key in self._cache: - return self.get(key) - self.set(key, value) - return value - - def delete(self, key): - """ - Remove an item from the cache. - - Args: - key: The key of the item to remove. - """ - # with self._lock: - self._cache.pop(key, None) - - def clear(self): - """ - Clear all items from the cache. - """ - with self._lock: - self._cache.clear() - - def __contains__(self, key): - """Check if the key is in the cache.""" - return key in self._cache - - def __getitem__(self, key): - """Retrieve an item from the cache using the square bracket notation.""" - return self.get(key) - - def __setitem__(self, key, value): - """Add an item to the cache using the square bracket notation.""" - self.set(key, value) - - def __delitem__(self, key): - """Remove an item from the cache using the square bracket notation.""" - self.delete(key) - - def __len__(self): - """Return the number of items in the cache.""" - return len(self._cache) - - def __repr__(self): - """Return a string representation of the InMemoryCache instance.""" - return f"InMemoryCache(max_size={self.max_size}, expiration_time={self.expiration_time})" diff --git a/src/backend/langflow/services/cache/manager.py b/src/backend/langflow/services/cache/manager.py index ce9a338ef..da76a2b5c 100644 --- a/src/backend/langflow/services/cache/manager.py +++ b/src/backend/langflow/services/cache/manager.py @@ -1,153 +1,336 @@ -from contextlib import contextmanager -from typing import Any, Awaitable, Callable, List, Optional +import threading +import time +from collections import OrderedDict from langflow.services.base import Service -import pandas as pd -from PIL import Image +from langflow.services.cache.base import BaseCacheService + +import pickle + +from loguru import logger -class Subject: - """Base class for implementing the observer pattern.""" +class InMemoryCache(BaseCacheService, Service): - def __init__(self): - self.observers: List[Callable[[], None]] = [] + """ + A simple in-memory cache using an OrderedDict. - def attach(self, observer: Callable[[], None]): - """Attach an observer to the subject.""" - self.observers.append(observer) + This cache supports setting a maximum size and expiration time for cached items. + When the cache is full, it uses a Least Recently Used (LRU) eviction policy. + Thread-safe using a threading Lock. - def detach(self, observer: Callable[[], None]): - """Detach an observer from the subject.""" - self.observers.remove(observer) + Attributes: + max_size (int, optional): Maximum number of items to store in the cache. + expiration_time (int, optional): Time in seconds after which a cached item expires. Default is 1 hour. - def notify(self): - """Notify all observers about an event.""" - for observer in self.observers: - if observer is None: - continue - observer() + Example: + cache = InMemoryCache(max_size=3, expiration_time=5) -class AsyncSubject: - """Base class for implementing the async observer pattern.""" + # setting cache values + cache.set("a", 1) + cache.set("b", 2) + cache["c"] = 3 - def __init__(self): - self.observers: List[Callable[[], Awaitable]] = [] + # getting cache values + a = cache.get("a") + b = cache["b"] + """ - def attach(self, observer: Callable[[], Awaitable]): - """Attach an observer to the subject.""" - self.observers.append(observer) - - def detach(self, observer: Callable[[], Awaitable]): - """Detach an observer from the subject.""" - self.observers.remove(observer) - - async def notify(self): - """Notify all observers about an event.""" - for observer in self.observers: - if observer is None: - continue - await observer() - - -class CacheManager(Subject, Service): - """Manages cache for different clients and notifies observers on changes.""" - - name = "cache_manager" - - def __init__(self): - super().__init__() - self._cache = {} - self.current_client_id = None - self.current_cache = {} - - @contextmanager - def set_client_id(self, client_id: str): + def __init__(self, max_size=None, expiration_time=60 * 60): """ - Context manager to set the current client_id and associated cache. + Initialize a new InMemoryCache instance. Args: - client_id (str): The client identifier. + max_size (int, optional): Maximum number of items to store in the cache. + expiration_time (int, optional): Time in seconds after which a cached item expires. Default is 1 hour. + """ + self._cache = OrderedDict() + self._lock = threading.RLock() + self.max_size = max_size + self.expiration_time = expiration_time + + def get(self, key): + """ + Retrieve an item from the cache. + + Args: + key: The key of the item to retrieve. + + Returns: + The value associated with the key, or None if the key is not found or the item has expired. + """ + with self._lock: + return self._get_without_lock(key) + + def _get_without_lock(self, key): + """ + Retrieve an item from the cache without acquiring the lock. + """ + if item := self._cache.get(key): + if ( + self.expiration_time is None + or time.time() - item["time"] < self.expiration_time + ): + # Move the key to the end to make it recently used + self._cache.move_to_end(key) + # Check if the value is pickled + if isinstance(item["value"], bytes): + value = pickle.loads(item["value"]) + else: + value = item["value"] + return value + else: + self.delete(key) + return None + + def set(self, key, value, pickle=False): + """ + Add an item to the cache. + + If the cache is full, the least recently used item is evicted. + + Args: + key: The key of the item. + value: The value to cache. + """ + with self._lock: + if key in self._cache: + # Remove existing key before re-inserting to update order + self.delete(key) + elif self.max_size and len(self._cache) >= self.max_size: + # Remove least recently used item + self._cache.popitem(last=False) + # pickle locally to mimic Redis + if pickle: + value = pickle.dumps(value) + + self._cache[key] = {"value": value, "time": time.time()} + + def upsert(self, key, value): + """ + Inserts or updates a value in the cache. + If the existing value and the new value are both dictionaries, they are merged. + + Args: + key: The key of the item. + value: The value to insert or update. + """ + with self._lock: + existing_value = self._get_without_lock(key) + if ( + existing_value is not None + and isinstance(existing_value, dict) + and isinstance(value, dict) + ): + existing_value.update(value) + value = existing_value + + self.set(key, value) + + def get_or_set(self, key, value): + """ + Retrieve an item from the cache. If the item does not exist, + set it with the provided value. + + Args: + key: The key of the item. + value: The value to cache if the item doesn't exist. + + Returns: + The cached value associated with the key. + """ + with self._lock: + if key in self._cache: + return self.get(key) + self.set(key, value) + return value + + def delete(self, key): + """ + Remove an item from the cache. + + Args: + key: The key of the item to remove. + """ + with self._lock: + self._cache.pop(key, None) + + def clear(self): + """ + Clear all items from the cache. + """ + with self._lock: + self._cache.clear() + + def __contains__(self, key): + """Check if the key is in the cache.""" + return key in self._cache + + def __getitem__(self, key): + """Retrieve an item from the cache using the square bracket notation.""" + return self.get(key) + + def __setitem__(self, key, value): + """Add an item to the cache using the square bracket notation.""" + self.set(key, value) + + def __delitem__(self, key): + """Remove an item from the cache using the square bracket notation.""" + self.delete(key) + + def __len__(self): + """Return the number of items in the cache.""" + return len(self._cache) + + def __repr__(self): + """Return a string representation of the InMemoryCache instance.""" + return f"InMemoryCache(max_size={self.max_size}, expiration_time={self.expiration_time})" + + +class RedisCache(BaseCacheService, Service): + """ + A Redis-based cache implementation. + + This cache supports setting an expiration time for cached items. + + Attributes: + expiration_time (int, optional): Time in seconds after which a cached item expires. Default is 1 hour. + + Example: + + cache = RedisCache(expiration_time=5) + + # setting cache values + cache.set("a", 1) + cache.set("b", 2) + cache["c"] = 3 + + # getting cache values + a = cache.get("a") + b = cache["b"] + """ + + def __init__(self, host="localhost", port=6379, db=0, expiration_time=60 * 60): + """ + Initialize a new RedisCache instance. + + Args: + host (str, optional): Redis host. + port (int, optional): Redis port. + db (int, optional): Redis DB. + expiration_time (int, optional): Time in seconds after which a + ached item expires. Default is 1 hour. """ - previous_client_id = self.current_client_id - self.current_client_id = client_id - self.current_cache = self._cache.setdefault(client_id, {}) try: - yield - finally: - self.current_client_id = previous_client_id - self.current_cache = self._cache.get(self.current_client_id, {}) + import redis + except ImportError as exc: + raise ImportError( + "RedisCache requires the redis-py package." + " Please install Langflow with the deploy extra: pip install langflow[deploy]" + ) from exc + logger.warning( + "RedisCache is an experimental feature and may not work as expected." + " Please report any issues to our GitHub repository." + ) + self._client = redis.StrictRedis(host=host, port=port, db=db) + self.expiration_time = expiration_time - def add(self, name: str, obj: Any, obj_type: str, extension: Optional[str] = None): + # check connection + def is_connected(self): """ - Add an object to the current client's cache. + Check if the Redis client is connected. + """ + import redis + + try: + self._client.ping() + return True + except redis.exceptions.ConnectionError: + return False + + def get(self, key): + """ + Retrieve an item from the cache. Args: - name (str): The cache key. - obj (Any): The object to cache. - obj_type (str): The type of the object. - """ - object_extensions = { - "image": "png", - "pandas": "csv", - } - if obj_type in object_extensions: - _extension = object_extensions[obj_type] - else: - _extension = type(obj).__name__.lower() - self.current_cache[name] = { - "obj": obj, - "type": obj_type, - "extension": extension or _extension, - } - self.notify() - - def add_pandas(self, name: str, obj: Any): - """ - Add a pandas DataFrame or Series to the current client's cache. - - Args: - name (str): The cache key. - obj (Any): The pandas DataFrame or Series object. - """ - if isinstance(obj, (pd.DataFrame, pd.Series)): - self.add(name, obj.to_csv(), "pandas", extension="csv") - else: - raise ValueError("Object is not a pandas DataFrame or Series") - - def add_image(self, name: str, obj: Any, extension: str = "png"): - """ - Add a PIL Image to the current client's cache. - - Args: - name (str): The cache key. - obj (Any): The PIL Image object. - """ - if isinstance(obj, Image.Image): - self.add(name, obj, "image", extension=extension) - else: - raise ValueError("Object is not a PIL Image") - - def get(self, name: str): - """ - Get an object from the current client's cache. - - Args: - name (str): The cache key. + key: The key of the item to retrieve. Returns: - The cached object associated with the given cache key. + The value associated with the key, or None if the key is not found. """ - return self.current_cache[name] + value = self._client.get(key) + return pickle.loads(value) if value else None - def get_last(self): + def set(self, key, value): """ - Get the last added item in the current client's cache. + Add an item to the cache. - Returns: - The last added item in the cache. + Args: + key: The key of the item. + value: The value to cache. """ - return list(self.current_cache.values())[-1] + try: + if pickled := pickle.dumps(value): + result = self._client.setex(key, self.expiration_time, pickled) + if not result: + raise ValueError("RedisCache could not set the value.") + except TypeError as exc: + raise TypeError( + "RedisCache only accepts values that can be pickled. " + ) from exc + def upsert(self, key, value): + """ + Inserts or updates a value in the cache. + If the existing value and the new value are both dictionaries, they are merged. -cache_manager = CacheManager() + Args: + key: The key of the item. + value: The value to insert or update. + """ + existing_value = self.get(key) + if ( + existing_value is not None + and isinstance(existing_value, dict) + and isinstance(value, dict) + ): + existing_value.update(value) + value = existing_value + + self.set(key, value) + + def delete(self, key): + """ + Remove an item from the cache. + + Args: + key: The key of the item to remove. + """ + self._client.delete(key) + + def clear(self): + """ + Clear all items from the cache. + """ + self._client.flushdb() + + def __contains__(self, key): + """Check if the key is in the cache.""" + return False if key is None else self._client.exists(key) + + def __getitem__(self, key): + """Retrieve an item from the cache using the square bracket notation.""" + return self.get(key) + + def __setitem__(self, key, value): + """Add an item to the cache using the square bracket notation.""" + self.set(key, value) + + def __delitem__(self, key): + """Remove an item from the cache using the square bracket notation.""" + self.delete(key) + + def __repr__(self): + """Return a string representation of the RedisCache instance.""" + return f"RedisCache(expiration_time={self.expiration_time})" diff --git a/src/backend/langflow/services/cache/utils.py b/src/backend/langflow/services/cache/utils.py index 2333eb5f4..bd6b4fb0a 100644 --- a/src/backend/langflow/services/cache/utils.py +++ b/src/backend/langflow/services/cache/utils.py @@ -2,13 +2,18 @@ import base64 import contextlib import functools import hashlib -import json import os import tempfile from collections import OrderedDict from pathlib import Path -from typing import Any, Dict +from typing import TYPE_CHECKING, Any, Dict from appdirs import user_cache_dir +from fastapi import UploadFile +from langflow.api.v1.schemas import BuildStatus +from langflow.services.database.models.base import orjson_dumps + +if TYPE_CHECKING: + pass CACHE: Dict[str, Any] = {} @@ -90,7 +95,8 @@ def clear_old_cache_files(max_cache_size: int = 3): def compute_dict_hash(graph_data): graph_data = filter_json(graph_data) - cleaned_graph_json = json.dumps(graph_data, sort_keys=True) + cleaned_graph_json = orjson_dumps(graph_data, sort_keys=True) + return hashlib.sha256(cleaned_graph_json.encode("utf-8")).hexdigest() @@ -151,7 +157,7 @@ def save_binary_file(content: str, file_name: str, accepted_types: list[str]) -> @create_cache_folder -def save_uploaded_file(file, folder_name): +def save_uploaded_file(file: UploadFile, folder_name): """ Save an uploaded file to the specified folder with a hash of its content as the file name. @@ -164,6 +170,12 @@ def save_uploaded_file(file, folder_name): """ cache_path = Path(CACHE_DIR) folder_path = cache_path / folder_name + filename = file.filename + if isinstance(filename, str) or isinstance(filename, Path): + file_extension = Path(filename).suffix + else: + file_extension = "" + file_object = file.file # Create the folder if it doesn't exist if not folder_path.exists(): @@ -172,22 +184,30 @@ def save_uploaded_file(file, folder_name): # Create a hash of the file content sha256_hash = hashlib.sha256() # Reset the file cursor to the beginning of the file - file.seek(0) + file_object.seek(0) # Iterate over the uploaded file in small chunks to conserve memory - while chunk := file.read(8192): # Read 8KB at a time (adjust as needed) + while chunk := file_object.read(8192): # Read 8KB at a time (adjust as needed) sha256_hash.update(chunk) # Use the hex digest of the hash as the file name hex_dig = sha256_hash.hexdigest() - file_name = hex_dig + file_name = f"{hex_dig}{file_extension}" # Reset the file cursor to the beginning of the file - file.seek(0) + file_object.seek(0) # Save the file with the hash as its name file_path = folder_path / file_name with open(file_path, "wb") as new_file: - while chunk := file.read(8192): + while chunk := file_object.read(8192): new_file.write(chunk) return file_path + + +def update_build_status(cache_service, flow_id: str, status: BuildStatus): + cached_flow = cache_service[flow_id] + if cached_flow is None: + raise ValueError(f"Flow {flow_id} not found in cache") + cached_flow["status"] = status + cache_service[flow_id] = cached_flow diff --git a/src/backend/langflow/services/chat/cache.py b/src/backend/langflow/services/chat/cache.py new file mode 100644 index 000000000..f6247b540 --- /dev/null +++ b/src/backend/langflow/services/chat/cache.py @@ -0,0 +1,153 @@ +from contextlib import contextmanager +from typing import Any, Awaitable, Callable, List, Optional +from langflow.services.base import Service + +import pandas as pd +from PIL import Image + + +class Subject: + """Base class for implementing the observer pattern.""" + + def __init__(self): + self.observers: List[Callable[[], None]] = [] + + def attach(self, observer: Callable[[], None]): + """Attach an observer to the subject.""" + self.observers.append(observer) + + def detach(self, observer: Callable[[], None]): + """Detach an observer from the subject.""" + self.observers.remove(observer) + + def notify(self): + """Notify all observers about an event.""" + for observer in self.observers: + if observer is None: + continue + observer() + + +class AsyncSubject: + """Base class for implementing the async observer pattern.""" + + def __init__(self): + self.observers: List[Callable[[], Awaitable]] = [] + + def attach(self, observer: Callable[[], Awaitable]): + """Attach an observer to the subject.""" + self.observers.append(observer) + + def detach(self, observer: Callable[[], Awaitable]): + """Detach an observer from the subject.""" + self.observers.remove(observer) + + async def notify(self): + """Notify all observers about an event.""" + for observer in self.observers: + if observer is None: + continue + await observer() + + +class CacheService(Subject, Service): + """Manages cache for different clients and notifies observers on changes.""" + + name = "cache_service" + + def __init__(self): + super().__init__() + self._cache = {} + self.current_client_id = None + self.current_cache = {} + + @contextmanager + def set_client_id(self, client_id: str): + """ + Context manager to set the current client_id and associated cache. + + Args: + client_id (str): The client identifier. + """ + previous_client_id = self.current_client_id + self.current_client_id = client_id + self.current_cache = self._cache.setdefault(client_id, {}) + try: + yield + finally: + self.current_client_id = previous_client_id + self.current_cache = self._cache.get(self.current_client_id, {}) + + def add(self, name: str, obj: Any, obj_type: str, extension: Optional[str] = None): + """ + Add an object to the current client's cache. + + Args: + name (str): The cache key. + obj (Any): The object to cache. + obj_type (str): The type of the object. + """ + object_extensions = { + "image": "png", + "pandas": "csv", + } + if obj_type in object_extensions: + _extension = object_extensions[obj_type] + else: + _extension = type(obj).__name__.lower() + self.current_cache[name] = { + "obj": obj, + "type": obj_type, + "extension": extension or _extension, + } + self.notify() + + def add_pandas(self, name: str, obj: Any): + """ + Add a pandas DataFrame or Series to the current client's cache. + + Args: + name (str): The cache key. + obj (Any): The pandas DataFrame or Series object. + """ + if isinstance(obj, (pd.DataFrame, pd.Series)): + self.add(name, obj.to_csv(), "pandas", extension="csv") + else: + raise ValueError("Object is not a pandas DataFrame or Series") + + def add_image(self, name: str, obj: Any, extension: str = "png"): + """ + Add a PIL Image to the current client's cache. + + Args: + name (str): The cache key. + obj (Any): The PIL Image object. + """ + if isinstance(obj, Image.Image): + self.add(name, obj, "image", extension=extension) + else: + raise ValueError("Object is not a PIL Image") + + def get(self, name: str): + """ + Get an object from the current client's cache. + + Args: + name (str): The cache key. + + Returns: + The cached object associated with the given cache key. + """ + return self.current_cache[name] + + def get_last(self): + """ + Get the last added item in the current client's cache. + + Returns: + The last added item in the cache. + """ + return list(self.current_cache.values())[-1] + + +cache_service = CacheService() diff --git a/src/backend/langflow/services/chat/factory.py b/src/backend/langflow/services/chat/factory.py index 03597ed11..54af7fcca 100644 --- a/src/backend/langflow/services/chat/factory.py +++ b/src/backend/langflow/services/chat/factory.py @@ -1,11 +1,11 @@ -from langflow.services.chat.manager import ChatManager +from langflow.services.chat.manager import ChatService from langflow.services.factory import ServiceFactory -class ChatManagerFactory(ServiceFactory): +class ChatServiceFactory(ServiceFactory): def __init__(self): - super().__init__(ChatManager) + super().__init__(ChatService) - def create(self, settings_service): - # Here you would have logic to create and configure a ChatManager - return ChatManager() + def create(self): + # Here you would have logic to create and configure a ChatService + return ChatService() diff --git a/src/backend/langflow/services/chat/manager.py b/src/backend/langflow/services/chat/manager.py index a49f48273..3fe937ce8 100644 --- a/src/backend/langflow/services/chat/manager.py +++ b/src/backend/langflow/services/chat/manager.py @@ -1,20 +1,19 @@ from collections import defaultdict +import uuid from fastapi import WebSocket, status from langflow.api.v1.schemas import ChatMessage, ChatResponse, FileResponse -from langflow.services.base import Service -from langflow.services import service_manager -from langflow.services.cache.manager import Subject -from langflow.services.chat.utils import process_graph from langflow.interface.utils import pil_to_base64 -from langflow.services.schema import ServiceType -from langflow.utils.logger import logger - +from langflow.services.base import Service +from langflow.services.chat.cache import Subject +from langflow.services.chat.utils import process_graph +from loguru import logger +from .cache import cache_service import asyncio -import json from typing import Any, Dict, List -from langflow.services.cache.flow import InMemoryCache +from langflow.services import service_manager, ServiceType +import orjson class ChatHistory(Subject): @@ -44,19 +43,20 @@ class ChatHistory(Subject): self.history[client_id] = [] -class ChatManager(Service): - name = "chat_manager" +class ChatService(Service): + name = "chat_service" def __init__(self): self.active_connections: Dict[str, WebSocket] = {} + self.connection_ids: Dict[str, str] = {} self.chat_history = ChatHistory() - self.cache_manager = service_manager.get(ServiceType.CACHE_MANAGER) - self.cache_manager.attach(self.update) - self.in_memory_cache = InMemoryCache() + self.chat_cache = cache_service + self.chat_cache.attach(self.update) + self.cache_service = service_manager.get(ServiceType.CACHE_SERVICE) def on_chat_history_update(self): """Send the last chat message to the client.""" - client_id = self.cache_manager.current_client_id + client_id = self.chat_cache.current_client_id if client_id in self.active_connections: chat_response = self.chat_history.get_history( client_id, filter_messages=False @@ -77,8 +77,8 @@ class ChatManager(Service): asyncio.run_coroutine_threadsafe(coroutine, loop) def update(self): - if self.cache_manager.current_client_id in self.active_connections: - self.last_cached_object_dict = self.cache_manager.get_last() + if self.chat_cache.current_client_id in self.active_connections: + self.last_cached_object_dict = self.chat_cache.get_last() # Add a new ChatResponse with the data chat_response = FileResponse( message=None, @@ -88,15 +88,18 @@ class ChatManager(Service): ) self.chat_history.add_message( - self.cache_manager.current_client_id, chat_response + self.chat_cache.current_client_id, chat_response ) async def connect(self, client_id: str, websocket: WebSocket): - await websocket.accept() self.active_connections[client_id] = websocket + # This is to avoid having multiple clients with the same id + #! Temporary solution + self.connection_ids[client_id] = f"{client_id}-{uuid.uuid4()}" def disconnect(self, client_id: str): self.active_connections.pop(client_id, None) + self.connection_ids.pop(client_id, None) async def send_message(self, client_id: str, message: str): websocket = self.active_connections[client_id] @@ -138,7 +141,9 @@ class ChatManager(Service): langchain_object=langchain_object, chat_inputs=chat_inputs, websocket=self.active_connections[client_id], + session_id=self.connection_ids[client_id], ) + self.set_cache(client_id, langchain_object) except Exception as e: # Log stack trace logger.exception(e) @@ -174,9 +179,15 @@ class ChatManager(Service): """ Set the cache for a client. """ + # client_id is the flow id but that already exists in the cache + # so we need to change it to something else - self.in_memory_cache.set(client_id, langchain_object) - return client_id in self.in_memory_cache + result_dict = { + "result": langchain_object, + "type": type(langchain_object), + } + self.cache_service.upsert(client_id, result_dict) + return client_id in self.cache_service async def handle_websocket(self, client_id: str, websocket: WebSocket): await self.connect(client_id, websocket) @@ -190,17 +201,23 @@ class ChatManager(Service): while True: json_payload = await websocket.receive_json() try: - payload = json.loads(json_payload) - except TypeError: + payload = orjson.loads(json_payload) + except Exception: payload = json_payload if "clear_history" in payload: self.chat_history.history[client_id] = [] continue - with self.cache_manager.set_client_id(client_id): - langchain_object = self.in_memory_cache.get(client_id) - await self.process_message(client_id, payload, langchain_object) + with self.chat_cache.set_client_id(client_id): + if langchain_object := self.cache_service.get(client_id).get( + "result" + ): + await self.process_message(client_id, payload, langchain_object) + else: + raise RuntimeError( + f"Could not find a LangChain object for client_id {client_id}" + ) except Exception as exc: # Handle any exceptions that might occur logger.error(f"Error handling websocket: {exc}") diff --git a/src/backend/langflow/services/chat/utils.py b/src/backend/langflow/services/chat/utils.py index 17c976eb9..a332e381e 100644 --- a/src/backend/langflow/services/chat/utils.py +++ b/src/backend/langflow/services/chat/utils.py @@ -2,13 +2,14 @@ from fastapi import WebSocket from langflow.api.v1.schemas import ChatMessage from langflow.processing.base import get_result_and_steps from langflow.interface.utils import try_setting_streaming_options -from langflow.utils.logger import logger +from loguru import logger async def process_graph( langchain_object, chat_inputs: ChatMessage, websocket: WebSocket, + session_id: str, ): langchain_object = try_setting_streaming_options(langchain_object, websocket) logger.debug("Loaded langchain object") @@ -27,7 +28,10 @@ async def process_graph( logger.debug("Generating result and thought") result, intermediate_steps = await get_result_and_steps( - langchain_object, chat_inputs.message, websocket=websocket + langchain_object, + chat_inputs.message, + websocket=websocket, + session_id=session_id, ) logger.debug("Generated result and intermediate_steps") return result, intermediate_steps diff --git a/src/backend/langflow/services/database/factory.py b/src/backend/langflow/services/database/factory.py index fecf24543..3726f520b 100644 --- a/src/backend/langflow/services/database/factory.py +++ b/src/backend/langflow/services/database/factory.py @@ -1,17 +1,17 @@ from typing import TYPE_CHECKING -from langflow.services.database.manager import DatabaseManager +from langflow.services.database.manager import DatabaseService from langflow.services.factory import ServiceFactory if TYPE_CHECKING: - from langflow.services.settings.manager import SettingsManager + from langflow.services.settings.manager import SettingsService -class DatabaseManagerFactory(ServiceFactory): +class DatabaseServiceFactory(ServiceFactory): def __init__(self): - super().__init__(DatabaseManager) + super().__init__(DatabaseService) - def create(self, settings_service: "SettingsManager"): - # Here you would have logic to create and configure a DatabaseManager + def create(self, settings_service: "SettingsService"): + # Here you would have logic to create and configure a DatabaseService if not settings_service.settings.DATABASE_URL: raise ValueError("No database URL provided") - return DatabaseManager(settings_service.settings.DATABASE_URL) + return DatabaseService(settings_service.settings.DATABASE_URL) diff --git a/src/backend/langflow/services/database/manager.py b/src/backend/langflow/services/database/manager.py index bf90703e2..3a388f694 100644 --- a/src/backend/langflow/services/database/manager.py +++ b/src/backend/langflow/services/database/manager.py @@ -1,9 +1,13 @@ from pathlib import Path from typing import TYPE_CHECKING from langflow.services.base import Service -from langflow.services.utils import get_settings_manager +from langflow.services.database.models.user.crud import get_user_by_username +from langflow.services.database.utils import Result, TableResults +from langflow.services.getters import get_settings_service +from sqlalchemy import inspect +import sqlalchemy as sa from sqlmodel import SQLModel, Session, create_engine -from langflow.utils.logger import logger +from loguru import logger from alembic.config import Config from alembic import command from langflow.services.database import models # noqa @@ -12,8 +16,8 @@ if TYPE_CHECKING: from sqlalchemy.engine import Engine -class DatabaseManager(Service): - name = "database_manager" +class DatabaseService(Service): + name = "database_service" def __init__(self, database_url: str): self.database_url = database_url @@ -26,10 +30,10 @@ class DatabaseManager(Service): def _create_engine(self) -> "Engine": """Create the engine for the database.""" - settings_manager = get_settings_manager() + settings_service = get_settings_service() if ( - settings_manager.settings.DATABASE_URL - and settings_manager.settings.DATABASE_URL.startswith("sqlite") + settings_service.settings.DATABASE_URL + and settings_service.settings.DATABASE_URL.startswith("sqlite") ): connect_args = {"check_same_thread": False} else: @@ -54,15 +58,80 @@ class DatabaseManager(Service): with Session(self.engine) as session: yield session + def check_schema_health(self) -> bool: + inspector = inspect(self.engine) + + model_mapping = { + "flow": models.Flow, + "user": models.User, + "apikey": models.ApiKey, + # Add other SQLModel classes here + } + + # To account for tables that existed in older versions + legacy_tables = ["flowstyle"] + + for table, model in model_mapping.items(): + expected_columns = list(model.__fields__.keys()) + + try: + available_columns = [ + col["name"] for col in inspector.get_columns(table) + ] + except sa.exc.NoSuchTableError: + logger.error(f"Missing table: {table}") + return False + + for column in expected_columns: + if column not in available_columns: + logger.error(f"Missing column: {column} in table {table}") + return False + + for table in legacy_tables: + if table in inspector.get_table_names(): + logger.warning(f"Legacy table exists: {table}") + + return True + def run_migrations(self): - logger.info( - f"Running DB migrations in {self.script_location} on {self.database_url}" - ) + logger.info(f"Running DB migrations in {self.script_location}") alembic_cfg = Config() alembic_cfg.set_main_option("script_location", str(self.script_location)) alembic_cfg.set_main_option("sqlalchemy.url", self.database_url) command.upgrade(alembic_cfg, "head") + def run_migrations_test(self): + # This method is used for testing purposes only + # We will check that all models are in the database + # and that the database is up to date with all columns + sql_models = [models.Flow, models.User, models.ApiKey] + return [ + TableResults(sql_model.__tablename__, self.check_table(sql_model)) + for sql_model in sql_models + ] + + def check_table(self, model): + results = [] + inspector = inspect(self.engine) + table_name = model.__tablename__ + expected_columns = list(model.__fields__.keys()) + try: + available_columns = [ + col["name"] for col in inspector.get_columns(table_name) + ] + results.append(Result(name=table_name, type="table", success=True)) + except sa.exc.NoSuchTableError: + logger.error(f"Missing table: {table_name}") + results.append(Result(name=table_name, type="table", success=False)) + + for column in expected_columns: + if column not in available_columns: + logger.error(f"Missing column: {column} in table {table_name}") + results.append(Result(name=column, type="column", success=False)) + else: + results.append(Result(name=column, type="column", success=True)) + return results + def create_db_and_tables(self): logger.debug("Creating database and tables") try: @@ -76,9 +145,34 @@ class DatabaseManager(Service): from sqlalchemy import inspect inspector = inspect(self.engine) - if "flow" not in inspector.get_table_names(): - logger.error("Something went wrong creating the database and tables.") - logger.error("Please check your database settings.") - raise RuntimeError("Something went wrong creating the database and tables.") - else: - logger.debug("Database and tables created successfully") + current_tables = ["flow", "user", "apikey"] + table_names = inspector.get_table_names() + for table in current_tables: + if table not in table_names: + logger.error("Something went wrong creating the database and tables.") + logger.error("Please check your database settings.") + raise RuntimeError( + "Something went wrong creating the database and tables." + ) + + logger.debug("Database and tables created successfully") + + def teardown(self): + logger.debug("Tearing down database") + try: + settings_service = get_settings_service() + # remove the default superuser if auto_login is enabled + # using the SUPERUSER to get the user + if settings_service.auth_settings.AUTO_LOGIN: + logger.debug("Removing default superuser") + username = settings_service.auth_settings.SUPERUSER + with Session(self.engine) as session: + user = get_user_by_username(session, username) + session.delete(user) + session.commit() + logger.debug("Default superuser removed") + + except Exception as exc: + logger.error(f"Error tearing down database: {exc}") + + self.engine.dispose() diff --git a/src/backend/langflow/services/database/models/__init__.py b/src/backend/langflow/services/database/models/__init__.py index da47bc5fe..3cc4231a3 100644 --- a/src/backend/langflow/services/database/models/__init__.py +++ b/src/backend/langflow/services/database/models/__init__.py @@ -1,4 +1,5 @@ from .flow import Flow +from .user import User +from .api_key import ApiKey - -__all__ = ["Flow"] +__all__ = ["Flow", "User", "ApiKey"] diff --git a/src/backend/langflow/services/database/models/api_key/__init__.py b/src/backend/langflow/services/database/models/api_key/__init__.py new file mode 100644 index 000000000..fbb8265b9 --- /dev/null +++ b/src/backend/langflow/services/database/models/api_key/__init__.py @@ -0,0 +1,3 @@ +from .api_key import ApiKey, ApiKeyCreate, UnmaskedApiKeyRead, ApiKeyRead + +__all__ = ["ApiKey", "ApiKeyCreate", "UnmaskedApiKeyRead", "ApiKeyRead"] diff --git a/src/backend/langflow/services/database/models/api_key/api_key.py b/src/backend/langflow/services/database/models/api_key/api_key.py new file mode 100644 index 000000000..35aa6c7a9 --- /dev/null +++ b/src/backend/langflow/services/database/models/api_key/api_key.py @@ -0,0 +1,51 @@ +from pydantic import validator +from sqlmodel import Field, Relationship +from uuid import UUID, uuid4 +from typing import Optional, TYPE_CHECKING +from datetime import datetime +from langflow.services.database.models.base import SQLModelSerializable + +if TYPE_CHECKING: + from langflow.services.database.models.user import User + + +class ApiKeyBase(SQLModelSerializable): + name: Optional[str] = Field(index=True) + created_at: datetime = Field(default_factory=datetime.utcnow) + last_used_at: Optional[datetime] = Field(default=None) + total_uses: int = Field(default=0) + is_active: bool = Field(default=True) + + +class ApiKey(ApiKeyBase, table=True): + id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) + + api_key: str = Field(index=True, unique=True) + # User relationship + # Delete API keys when user is deleted + user_id: UUID = Field(index=True, foreign_key="user.id") + user: "User" = Relationship( + back_populates="api_keys", + ) + + +class ApiKeyCreate(ApiKeyBase): + api_key: Optional[str] = None + user_id: Optional[UUID] = None + + +class UnmaskedApiKeyRead(ApiKeyBase): + id: UUID + api_key: str = Field() + user_id: UUID = Field() + + +class ApiKeyRead(ApiKeyBase): + id: UUID + api_key: str = Field() + user_id: UUID = Field() + + @validator("api_key", always=True) + def mask_api_key(cls, v): + # This validator will always run, and will mask the API key + return f"{v[:8]}{'*' * (len(v) - 8)}" diff --git a/src/backend/langflow/services/database/models/api_key/crud.py b/src/backend/langflow/services/database/models/api_key/crud.py new file mode 100644 index 000000000..c1c6f786e --- /dev/null +++ b/src/backend/langflow/services/database/models/api_key/crud.py @@ -0,0 +1,77 @@ +import datetime +import secrets +import threading +from uuid import UUID +from typing import List, Optional +from sqlmodel import Session, select +from langflow.services.database.models.api_key import ( + ApiKey, + ApiKeyCreate, + UnmaskedApiKeyRead, + ApiKeyRead, +) + + +def get_api_keys(session: Session, user_id: UUID) -> List[ApiKeyRead]: + query = select(ApiKey).where(ApiKey.user_id == user_id) + api_keys = session.exec(query).all() + return [ApiKeyRead.from_orm(api_key) for api_key in api_keys] + + +def create_api_key( + session: Session, api_key_create: ApiKeyCreate, user_id: UUID +) -> UnmaskedApiKeyRead: + # Generate a random API key with 32 bytes of randomness + generated_api_key = f"lf-{secrets.token_urlsafe(32)}" + + api_key = ApiKey( + api_key=generated_api_key, + name=api_key_create.name, + user_id=user_id, + ) + + session.add(api_key) + session.commit() + session.refresh(api_key) + unmasked = UnmaskedApiKeyRead.from_orm(api_key) + unmasked.api_key = generated_api_key + return unmasked + + +def delete_api_key(session: Session, api_key_id: UUID) -> None: + api_key = session.get(ApiKey, api_key_id) + if api_key is None: + raise ValueError("API Key not found") + session.delete(api_key) + session.commit() + + +def check_key(session: Session, api_key: str) -> Optional[ApiKey]: + """Check if the API key is valid.""" + query = select(ApiKey).where(ApiKey.api_key == api_key) + api_key_object: Optional[ApiKey] = session.exec(query).first() + if api_key_object is not None: + threading.Thread( + target=update_total_uses, + args=( + session, + api_key_object, + ), + ).start() + return api_key_object + + +def update_total_uses(session, api_key: ApiKey): + """Update the total uses and last used at.""" + # This is running in a separate thread to avoid slowing down the request + # but session is not thread safe so we need to create a new session + + with Session(session.get_bind()) as new_session: + new_api_key = new_session.get(ApiKey, api_key.id) + if new_api_key is None: + raise ValueError("API Key not found") + new_api_key.total_uses += 1 + new_api_key.last_used_at = datetime.datetime.now(datetime.timezone.utc) + new_session.add(new_api_key) + new_session.commit() + return new_api_key diff --git a/src/backend/langflow/services/database/models/base.py b/src/backend/langflow/services/database/models/base.py index f66392ab1..1dc40c383 100644 --- a/src/backend/langflow/services/database/models/base.py +++ b/src/backend/langflow/services/database/models/base.py @@ -3,9 +3,20 @@ import orjson from pydantic import ConfigDict -def orjson_dumps(v, *, default): - # orjson.dumps returns bytes, to match standard json.dumps we need to decode - return orjson.dumps(v, default=default).decode() +def orjson_dumps(v, *, default=None, sort_keys=False, indent_2=True): + option = orjson.OPT_SORT_KEYS if sort_keys else None + if indent_2: + # orjson.dumps returns bytes, to match standard json.dumps we need to decode + # option + # To modify how data is serialized, specify option. Each option is an integer constant in orjson. + # To specify multiple options, mask them together, e.g., option=orjson.OPT_STRICT_INTEGER | orjson.OPT_NAIVE_UTC + if option is None: + option = orjson.OPT_INDENT_2 + else: + option |= orjson.OPT_INDENT_2 + if default is None: + return orjson.dumps(v, option=option).decode() + return orjson.dumps(v, default=default, option=option).decode() class SQLModelSerializable(SQLModel): diff --git a/src/backend/langflow/services/database/models/component/__init__.py b/src/backend/langflow/services/database/models/component/__init__.py new file mode 100644 index 000000000..c787c3e04 --- /dev/null +++ b/src/backend/langflow/services/database/models/component/__init__.py @@ -0,0 +1,3 @@ +from .component import Component, ComponentModel + +__all__ = ["Component", "ComponentModel"] diff --git a/src/backend/langflow/services/database/models/component.py b/src/backend/langflow/services/database/models/component/component.py similarity index 100% rename from src/backend/langflow/services/database/models/component.py rename to src/backend/langflow/services/database/models/component/component.py diff --git a/src/backend/langflow/services/database/models/flow/__init__.py b/src/backend/langflow/services/database/models/flow/__init__.py new file mode 100644 index 000000000..7c7cc0172 --- /dev/null +++ b/src/backend/langflow/services/database/models/flow/__init__.py @@ -0,0 +1,3 @@ +from .flow import Flow, FlowCreate, FlowRead, FlowUpdate + +__all__ = ["Flow", "FlowCreate", "FlowRead", "FlowUpdate"] diff --git a/src/backend/langflow/services/database/models/flow.py b/src/backend/langflow/services/database/models/flow/flow.py similarity index 63% rename from src/backend/langflow/services/database/models/flow.py rename to src/backend/langflow/services/database/models/flow/flow.py index 2c379ad1d..73838fc9b 100644 --- a/src/backend/langflow/services/database/models/flow.py +++ b/src/backend/langflow/services/database/models/flow/flow.py @@ -1,10 +1,20 @@ # Path: src/backend/langflow/database/models/flow.py from langflow.services.database.models.base import SQLModelSerializable +<<<<<<< HEAD:src/backend/langflow/services/database/models/flow.py from sqlmodel import Field, JSON, Column from uuid import UUID, uuid4 from typing import Dict, Optional from pydantic import field_validator +======= +from pydantic import validator +from sqlmodel import Field, JSON, Column, Relationship +from uuid import UUID, uuid4 +from typing import Dict, Optional, TYPE_CHECKING + +if TYPE_CHECKING: + from langflow.services.database.models.user import User +>>>>>>> origin/dev:src/backend/langflow/services/database/models/flow/flow.py class FlowBase(SQLModelSerializable): @@ -12,10 +22,15 @@ class FlowBase(SQLModelSerializable): description: Optional[str] = Field(index=True, default="") data: Optional[Dict] = Field(default=None) +<<<<<<< HEAD:src/backend/langflow/services/database/models/flow.py @field_validator("data") @classmethod def validate_json(cls, v): # dict_keys(['description', 'name', 'id', 'data']) +======= + @validator("data") + def validate_json(v): +>>>>>>> origin/dev:src/backend/langflow/services/database/models/flow/flow.py if not v: return v if not isinstance(v, dict): @@ -33,14 +48,17 @@ class FlowBase(SQLModelSerializable): class Flow(FlowBase, table=True): id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) data: Optional[Dict] = Field(default=None, sa_column=Column(JSON)) + user_id: UUID = Field(index=True, foreign_key="user.id") + user: "User" = Relationship(back_populates="flows") class FlowCreate(FlowBase): - pass + user_id: Optional[UUID] = None class FlowRead(FlowBase): id: UUID + user_id: UUID = Field() class FlowUpdate(SQLModelSerializable): diff --git a/src/backend/langflow/services/database/models/user/__init__.py b/src/backend/langflow/services/database/models/user/__init__.py new file mode 100644 index 000000000..da9170eb7 --- /dev/null +++ b/src/backend/langflow/services/database/models/user/__init__.py @@ -0,0 +1,8 @@ +from .user import User, UserCreate, UserRead, UserUpdate + +__all__ = [ + "User", + "UserCreate", + "UserRead", + "UserUpdate", +] diff --git a/src/backend/langflow/services/database/models/user/crud.py b/src/backend/langflow/services/database/models/user/crud.py new file mode 100644 index 000000000..36f03e684 --- /dev/null +++ b/src/backend/langflow/services/database/models/user/crud.py @@ -0,0 +1,59 @@ +from datetime import datetime, timezone +from typing import Union +from uuid import UUID +from fastapi import Depends, HTTPException, status +from langflow.services.database.models.user.user import User, UserUpdate +from langflow.services.getters import get_session +from sqlalchemy.exc import IntegrityError +from sqlmodel import Session +from typing import Optional + +from sqlalchemy.orm.attributes import flag_modified + + +def get_user_by_username(db: Session, username: str) -> Union[User, None]: + return db.query(User).filter(User.username == username).first() + + +def get_user_by_id(db: Session, id: UUID) -> Union[User, None]: + return db.query(User).filter(User.id == id).first() + + +def update_user( + user_db: Optional[User], user: UserUpdate, db: Session = Depends(get_session) +) -> User: + if not user_db: + raise HTTPException(status_code=404, detail="User not found") + + # user_db_by_username = get_user_by_username(db, user.username) # type: ignore + # if user_db_by_username and user_db_by_username.id != user_id: + # raise HTTPException(status_code=409, detail="Username already exists") + + user_data = user.dict(exclude_unset=True) + changed = False + for attr, value in user_data.items(): + if hasattr(user_db, attr) and value is not None: + setattr(user_db, attr, value) + changed = True + + if not changed: + raise HTTPException( + status_code=status.HTTP_304_NOT_MODIFIED, detail="Nothing to update" + ) + + user_db.updated_at = datetime.now(timezone.utc) + flag_modified(user_db, "updated_at") + + try: + db.commit() + except IntegrityError as e: + db.rollback() + raise HTTPException(status_code=400, detail=str(e)) from e + + return user_db + + +def update_user_last_login_at(user_id: UUID, db: Session = Depends(get_session)): + user_data = UserUpdate(last_login_at=datetime.now(timezone.utc)) # type: ignore + user = get_user_by_id(db, user_id) + return update_user(user, user_data, db) diff --git a/src/backend/langflow/services/database/models/user/user.py b/src/backend/langflow/services/database/models/user/user.py new file mode 100644 index 000000000..5f2f3e1c4 --- /dev/null +++ b/src/backend/langflow/services/database/models/user/user.py @@ -0,0 +1,53 @@ +from langflow.services.database.models.base import SQLModel, SQLModelSerializable +from sqlmodel import Field, Relationship + + +from datetime import datetime +from typing import Optional, TYPE_CHECKING +from uuid import UUID, uuid4 + +if TYPE_CHECKING: + from langflow.services.database.models.api_key import ApiKey + from langflow.services.database.models.flow import Flow + + +class User(SQLModelSerializable, table=True): + id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) + username: str = Field(index=True, unique=True) + password: str = Field() + profile_image: Optional[str] = Field(default=None) + is_active: bool = Field(default=False) + is_superuser: bool = Field(default=False) + create_at: datetime = Field(default_factory=datetime.utcnow) + updated_at: datetime = Field(default_factory=datetime.utcnow) + last_login_at: Optional[datetime] = Field() + api_keys: list["ApiKey"] = Relationship( + back_populates="user", + sa_relationship_kwargs={"cascade": "delete"}, + ) + flows: list["Flow"] = Relationship(back_populates="user") + + +class UserCreate(SQLModel): + username: str = Field() + password: str = Field() + + +class UserRead(SQLModel): + id: UUID = Field(default_factory=uuid4) + username: str = Field() + profile_image: Optional[str] = Field() + is_active: bool = Field() + is_superuser: bool = Field() + create_at: datetime = Field() + updated_at: datetime = Field() + last_login_at: Optional[datetime] = Field() + + +class UserUpdate(SQLModel): + username: Optional[str] = Field() + profile_image: Optional[str] = Field() + password: Optional[str] = Field() + is_active: Optional[bool] = Field() + is_superuser: Optional[bool] = Field() + last_login_at: Optional[datetime] = Field() diff --git a/src/backend/langflow/services/database/utils.py b/src/backend/langflow/services/database/utils.py index 94bcd6651..ce4990fa1 100644 --- a/src/backend/langflow/services/database/utils.py +++ b/src/backend/langflow/services/database/utils.py @@ -1,20 +1,26 @@ +from dataclasses import dataclass from typing import TYPE_CHECKING -from langflow.utils.logger import logger +from loguru import logger from contextlib import contextmanager from alembic.util.exc import CommandError from sqlmodel import Session if TYPE_CHECKING: - from langflow.services.database.manager import DatabaseManager + from langflow.services.database.manager import DatabaseService def initialize_database(): logger.debug("Initializing database") from langflow.services import service_manager, ServiceType - database_manager = service_manager.get(ServiceType.DATABASE_MANAGER) + database_service = service_manager.get(ServiceType.DATABASE_SERVICE) try: - database_manager.run_migrations() + database_service.check_schema_health() + except Exception as exc: + logger.error(f"Error checking schema health: {exc}") + raise RuntimeError("Error checking schema health") from exc + try: + database_service.run_migrations() except CommandError as exc: if "Can't locate revision identified by" not in str(exc): raise exc @@ -24,20 +30,23 @@ def initialize_database(): logger.warning( "Wrong revision in DB, deleting alembic_version table and running migrations again" ) - with session_getter(database_manager) as session: + with session_getter(database_service) as session: session.execute("DROP TABLE alembic_version") - database_manager.run_migrations() + database_service.run_migrations() except Exception as exc: - logger.error(f"Error running migrations: {exc}") - raise RuntimeError("Error running migrations") from exc - database_manager.create_db_and_tables() + # if the exception involves tables already existing + # we can ignore it + if "already exists" not in str(exc): + logger.error(f"Error running migrations: {exc}") + raise RuntimeError("Error running migrations") from exc + database_service.create_db_and_tables() logger.debug("Database initialized") @contextmanager -def session_getter(db_manager: "DatabaseManager"): +def session_getter(db_service: "DatabaseService"): try: - session = Session(db_manager.engine) + session = Session(db_service.engine) yield session except Exception as e: print("Session rollback because of exception:", e) @@ -45,3 +54,16 @@ def session_getter(db_manager: "DatabaseManager"): raise finally: session.close() + + +@dataclass +class Result: + name: str + type: str + success: bool + + +@dataclass +class TableResults: + table_name: str + results: list[Result] diff --git a/src/backend/langflow/services/getters.py b/src/backend/langflow/services/getters.py new file mode 100644 index 000000000..e88b998b5 --- /dev/null +++ b/src/backend/langflow/services/getters.py @@ -0,0 +1,48 @@ +from langflow.services import ServiceType, service_manager +from typing import TYPE_CHECKING, Generator + + +if TYPE_CHECKING: + from langflow.services.database.manager import DatabaseService + from langflow.services.settings.manager import SettingsService + from langflow.services.cache.manager import BaseCacheService + from langflow.services.session.manager import SessionService + from langflow.services.task.manager import TaskService + from langflow.services.chat.manager import ChatService + from sqlmodel import Session + + +def get_settings_service() -> "SettingsService": + try: + return service_manager.get(ServiceType.SETTINGS_SERVICE) + except ValueError: + # initialize settings service + from langflow.services.manager import initialize_settings_service + + initialize_settings_service() + return service_manager.get(ServiceType.SETTINGS_SERVICE) + + +def get_db_service() -> "DatabaseService": + return service_manager.get(ServiceType.DATABASE_SERVICE) + + +def get_session() -> Generator["Session", None, None]: + db_service = service_manager.get(ServiceType.DATABASE_SERVICE) + yield from db_service.get_session() + + +def get_cache_service() -> "BaseCacheService": + return service_manager.get(ServiceType.CACHE_SERVICE) + + +def get_session_service() -> "SessionService": + return service_manager.get(ServiceType.SESSION_SERVICE) + + +def get_task_service() -> "TaskService": + return service_manager.get(ServiceType.TASK_SERVICE) + + +def get_chat_service() -> "ChatService": + return service_manager.get(ServiceType.CHAT_SERVICE) diff --git a/src/backend/langflow/services/manager.py b/src/backend/langflow/services/manager.py index f05102d0e..e74146a20 100644 --- a/src/backend/langflow/services/manager.py +++ b/src/backend/langflow/services/manager.py @@ -1,8 +1,10 @@ from langflow.services.schema import ServiceType -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Dict, List, Optional +from loguru import logger if TYPE_CHECKING: from langflow.services.factory import ServiceFactory + from langflow.services.base import Service class ServiceManager: @@ -11,15 +13,23 @@ class ServiceManager: """ def __init__(self): - self.services = {} + self.services: Dict[str, "Service"] = {} self.factories = {} + self.dependencies = {} - def register_factory(self, service_factory: "ServiceFactory"): + def register_factory( + self, + service_factory: "ServiceFactory", + dependencies: Optional[List[ServiceType]] = None, + ): """ - Registers a new factory. + Registers a new factory with dependencies. """ - if service_factory.service_class.name not in self.factories: - self.factories[service_factory.service_class.name] = service_factory + if dependencies is None: + dependencies = [] + service_name = service_factory.service_class.name + self.factories[service_name] = service_factory + self.dependencies[service_name] = dependencies def get(self, service_name: ServiceType): """ @@ -32,17 +42,27 @@ class ServiceManager: def _create_service(self, service_name: ServiceType): """ - Create a new service given its name. + Create a new service given its name, handling dependencies. """ + logger.debug(f"Create service {service_name}") self._validate_service_creation(service_name) - if service_name == ServiceType.SETTINGS_MANAGER: - self.services[service_name] = self.factories[service_name].create() - else: - settings_service = self.get(ServiceType.SETTINGS_MANAGER) - self.services[service_name] = self.factories[service_name].create( - settings_service - ) + # Create dependencies first + for dependency in self.dependencies.get(service_name, []): + if dependency not in self.services: + self._create_service(dependency) + + # Collect the dependent services + dependent_services = { + dep.value: self.services[dep] + for dep in self.dependencies.get(service_name, []) + } + + # Create the actual service + self.services[service_name] = self.factories[service_name].create( + **dependent_services + ) + self.services[service_name].set_ready() def _validate_service_creation(self, service_name: ServiceType): """ @@ -53,22 +73,28 @@ class ServiceManager: f"No factory registered for the service class '{service_name.name}'" ) - if ( - ServiceType.SETTINGS_MANAGER not in self.factories - and service_name != ServiceType.SETTINGS_MANAGER - ): - raise ValueError( - f"Cannot create service '{service_name.name}' before the settings service" - ) - def update(self, service_name: ServiceType): """ Update a service by its name. """ if service_name in self.services: + logger.debug(f"Update service {service_name}") self.services.pop(service_name, None) self.get(service_name) + def teardown(self): + """ + Teardown all the services. + """ + for service in self.services.values(): + if service is None: + continue + logger.debug(f"Teardown service {service.name}") + service.teardown() + self.services = {} + self.factories = {} + self.dependencies = {} + service_manager = ServiceManager() @@ -81,17 +107,97 @@ def initialize_services(): from langflow.services.cache import factory as cache_factory from langflow.services.chat import factory as chat_factory from langflow.services.settings import factory as settings_factory + from langflow.services.session import factory as session_service_factory + from langflow.services.auth import factory as auth_factory + from langflow.services.task import factory as task_factory - service_manager.register_factory(settings_factory.SettingsManagerFactory()) - service_manager.register_factory(database_factory.DatabaseManagerFactory()) - service_manager.register_factory(cache_factory.CacheManagerFactory()) - service_manager.register_factory(chat_factory.ChatManagerFactory()) + service_manager.register_factory(settings_factory.SettingsServiceFactory()) + service_manager.register_factory( + database_factory.DatabaseServiceFactory(), + dependencies=[ServiceType.SETTINGS_SERVICE], + ) + service_manager.register_factory( + cache_factory.CacheServiceFactory(), dependencies=[ServiceType.SETTINGS_SERVICE] + ) + + service_manager.register_factory( + auth_factory.AuthServiceFactory(), dependencies=[ServiceType.SETTINGS_SERVICE] + ) + + service_manager.register_factory(chat_factory.ChatServiceFactory()) + service_manager.register_factory( + session_service_factory.SessionServiceFactory(), + dependencies=[ServiceType.CACHE_SERVICE], + ) + service_manager.register_factory( + task_factory.TaskServiceFactory(), + ) + + # Test cache connection + service_manager.get(ServiceType.CACHE_SERVICE) + # Test database connection + service_manager.get(ServiceType.DATABASE_SERVICE) + + # Test cache connection + service_manager.get(ServiceType.CACHE_SERVICE) + # Test database connection + service_manager.get(ServiceType.DATABASE_SERVICE) -def initialize_settings_manager(): +def reinitialize_services(): + """ + Reinitialize all the services needed. + """ + + service_manager.update(ServiceType.SETTINGS_SERVICE) + service_manager.update(ServiceType.DATABASE_SERVICE) + service_manager.update(ServiceType.CACHE_SERVICE) + service_manager.update(ServiceType.CHAT_SERVICE) + service_manager.update(ServiceType.SESSION_SERVICE) + service_manager.update(ServiceType.AUTH_SERVICE) + service_manager.update(ServiceType.TASK_SERVICE) + + # Test cache connection + service_manager.get(ServiceType.CACHE_SERVICE) + # Test database connection + service_manager.get(ServiceType.DATABASE_SERVICE) + + # Test cache connection + service_manager.get(ServiceType.CACHE_SERVICE) + # Test database connection + service_manager.get(ServiceType.DATABASE_SERVICE) + + +def initialize_settings_service(): """ Initialize the settings manager. """ from langflow.services.settings import factory as settings_factory - service_manager.register_factory(settings_factory.SettingsManagerFactory()) + service_manager.register_factory(settings_factory.SettingsServiceFactory()) + + +def initialize_session_service(): + """ + Initialize the session manager. + """ + from langflow.services.session import factory as session_service_factory # type: ignore + from langflow.services.cache import factory as cache_factory + + initialize_settings_service() + + service_manager.register_factory( + cache_factory.CacheServiceFactory(), dependencies=[ServiceType.SETTINGS_SERVICE] + ) + + service_manager.register_factory( + session_service_factory.SessionServiceFactory(), + dependencies=[ServiceType.CACHE_SERVICE], + ) + + +def teardown_services(): + """ + Teardown all the services. + """ + service_manager.teardown() diff --git a/src/backend/langflow/services/plugins/__init__.py b/src/backend/langflow/services/plugins/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/services/plugins/langfuse.py b/src/backend/langflow/services/plugins/langfuse.py new file mode 100644 index 000000000..29d59808b --- /dev/null +++ b/src/backend/langflow/services/plugins/langfuse.py @@ -0,0 +1,50 @@ +from langflow.services.getters import get_settings_service +from langflow.utils.logger import logger + +### Temporary implementation +# This will be replaced by a plugin system once merged into 0.5.0 + + +class LangfuseInstance: + _instance = None + + @classmethod + def get(cls): + logger.debug("Getting Langfuse instance") + if cls._instance is None: + cls.create() + return cls._instance + + @classmethod + def create(cls): + logger.debug("Creating Langfuse instance") + from langfuse import Langfuse # type: ignore + + settings_manager = get_settings_service() + + if ( + settings_manager.settings.LANGFUSE_PUBLIC_KEY + and settings_manager.settings.LANGFUSE_SECRET_KEY + ): + logger.debug("Langfuse credentials found") + cls._instance = Langfuse( + public_key=settings_manager.settings.LANGFUSE_PUBLIC_KEY, + secret_key=settings_manager.settings.LANGFUSE_SECRET_KEY, + host=settings_manager.settings.LANGFUSE_HOST, + ) + else: + logger.debug("No Langfuse credentials found") + cls._instance = None + + @classmethod + def update(cls): + logger.debug("Updating Langfuse instance") + cls._instance = None + cls.create() + + @classmethod + def teardown(cls): + logger.debug("Tearing down Langfuse instance") + if cls._instance is not None: + cls._instance.flush() + cls._instance = None diff --git a/src/backend/langflow/services/schema.py b/src/backend/langflow/services/schema.py index 695763afc..8b3b41fcb 100644 --- a/src/backend/langflow/services/schema.py +++ b/src/backend/langflow/services/schema.py @@ -7,7 +7,10 @@ class ServiceType(str, Enum): registered with the service manager. """ - CACHE_MANAGER = "cache_manager" - SETTINGS_MANAGER = "settings_manager" - DATABASE_MANAGER = "database_manager" - CHAT_MANAGER = "chat_manager" + AUTH_SERVICE = "auth_service" + CACHE_SERVICE = "cache_service" + SETTINGS_SERVICE = "settings_service" + DATABASE_SERVICE = "database_service" + CHAT_SERVICE = "chat_service" + SESSION_SERVICE = "session_service" + TASK_SERVICE = "task_service" diff --git a/src/backend/langflow/services/session/__init__.py b/src/backend/langflow/services/session/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/services/session/factory.py b/src/backend/langflow/services/session/factory.py new file mode 100644 index 000000000..9abe025a8 --- /dev/null +++ b/src/backend/langflow/services/session/factory.py @@ -0,0 +1,14 @@ +from typing import TYPE_CHECKING +from langflow.services.session.manager import SessionService +from langflow.services.factory import ServiceFactory + +if TYPE_CHECKING: + from langflow.services.cache.manager import BaseCacheService + + +class SessionServiceFactory(ServiceFactory): + def __init__(self): + super().__init__(SessionService) + + def create(self, cache_service: "BaseCacheService"): + return SessionService(cache_service) diff --git a/src/backend/langflow/services/session/manager.py b/src/backend/langflow/services/session/manager.py new file mode 100644 index 000000000..6bdebf6b3 --- /dev/null +++ b/src/backend/langflow/services/session/manager.py @@ -0,0 +1,47 @@ +from typing import TYPE_CHECKING +from langflow.interface.run import build_sorted_vertices +from langflow.services.base import Service +from langflow.services.cache.utils import compute_dict_hash +from langflow.services.session.utils import session_id_generator + +if TYPE_CHECKING: + from langflow.services.cache.base import BaseCacheService + + +class SessionService(Service): + name = "session_service" + + def __init__(self, cache_service): + self.cache_service: "BaseCacheService" = cache_service + + def load_session(self, key, data_graph): + # Check if the data is cached + if key in self.cache_service: + return self.cache_service.get(key) + + if key is None: + key = self.generate_key(session_id=None, data_graph=data_graph) + + # If not cached, build the graph and cache it + graph, artifacts = build_sorted_vertices(data_graph) + + self.cache_service.set(key, (graph, artifacts)) + + return graph, artifacts + + def build_key(self, session_id, data_graph): + json_hash = compute_dict_hash(data_graph) + return f"{session_id}{':' if session_id else ''}{json_hash}" + + def generate_key(self, session_id, data_graph): + # Hash the JSON and combine it with the session_id to create a unique key + if session_id is None: + # generate a 5 char session_id to concatenate with the json_hash + session_id = session_id_generator() + return self.build_key(session_id, data_graph=data_graph) + + def update_session(self, session_id, value): + self.cache_service.set(session_id, value) + + def clear_session(self, session_id): + self.cache_service.delete(session_id) diff --git a/src/backend/langflow/services/session/utils.py b/src/backend/langflow/services/session/utils.py new file mode 100644 index 000000000..374d85540 --- /dev/null +++ b/src/backend/langflow/services/session/utils.py @@ -0,0 +1,8 @@ +import random +import string + + +def session_id_generator(size=6): + return "".join( + random.SystemRandom().choices(string.ascii_uppercase + string.digits, k=size) + ) diff --git a/src/backend/langflow/services/settings/auth.py b/src/backend/langflow/services/settings/auth.py new file mode 100644 index 000000000..cff671b84 --- /dev/null +++ b/src/backend/langflow/services/settings/auth.py @@ -0,0 +1,99 @@ +from pathlib import Path +from typing import Optional +import secrets +from langflow.services.settings.constants import ( + DEFAULT_SUPERUSER, + DEFAULT_SUPERUSER_PASSWORD, +) +from langflow.services.settings.utils import read_secret_from_file, write_secret_to_file + +from pydantic import BaseSettings, Field, validator +from passlib.context import CryptContext +from loguru import logger + + +class AuthSettings(BaseSettings): + # Login settings + CONFIG_DIR: str + SECRET_KEY: str = Field( + default="", + description="Secret key for JWT. If not provided, a random one will be generated.", + env="LANGFLOW_SECRET_KEY", + allow_mutation=False, + ) + ALGORITHM: str = "HS256" + ACCESS_TOKEN_EXPIRE_MINUTES: int = 60 + REFRESH_TOKEN_EXPIRE_MINUTES: int = 60 * 12 + + # API Key to execute /process endpoint + API_KEY_SECRET_KEY: Optional[ + str + ] = "b82818e0ad4ff76615c5721ee21004b07d84cd9b87ba4d9cb42374da134b841a" + API_KEY_ALGORITHM: str = "HS256" + API_V1_STR: str = "/api/v1" + + # If AUTO_LOGIN = True + # > The application does not request login and logs in automatically as a super user. + AUTO_LOGIN: bool = False + NEW_USER_IS_ACTIVE: bool = False + SUPERUSER: str = DEFAULT_SUPERUSER + SUPERUSER_PASSWORD: str = DEFAULT_SUPERUSER_PASSWORD + + pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + + class Config: + validate_assignment = True + extra = "ignore" + env_prefix = "LANGFLOW_" + + def reset_credentials(self): + self.SUPERUSER = DEFAULT_SUPERUSER + self.SUPERUSER_PASSWORD = DEFAULT_SUPERUSER_PASSWORD + + # If autologin is true, then we need to set the credentials to + # the default values + # so we need to validate the superuser and superuser_password + # fields + @validator("SUPERUSER", "SUPERUSER_PASSWORD", pre=True) + def validate_superuser(cls, value, values): + if values.get("AUTO_LOGIN"): + if value != DEFAULT_SUPERUSER: + value = DEFAULT_SUPERUSER + logger.debug("Resetting superuser to default value") + if values.get("SUPERUSER_PASSWORD") != DEFAULT_SUPERUSER_PASSWORD: + values["SUPERUSER_PASSWORD"] = DEFAULT_SUPERUSER_PASSWORD + logger.debug("Resetting superuser password to default value") + + return value + + return value + + @validator("SECRET_KEY", pre=True) + def get_secret_key(cls, value, values): + config_dir = values.get("CONFIG_DIR") + + if not config_dir: + logger.debug("No CONFIG_DIR provided, not saving secret key") + return value or secrets.token_urlsafe(32) + + secret_key_path = Path(config_dir) / "secret_key" + + if value: + logger.debug("Secret key provided") + write_secret_to_file(secret_key_path, value) + else: + logger.debug("No secret key provided, generating a random one") + + if secret_key_path.exists(): + value = read_secret_from_file(secret_key_path) + logger.debug("Loaded secret key") + if not value: + value = secrets.token_urlsafe(32) + write_secret_to_file(secret_key_path, value) + logger.debug("Saved secret key") + else: + value = secrets.token_urlsafe(32) + write_secret_to_file(secret_key_path, value) + logger.debug("Saved secret key") + + return value diff --git a/src/backend/langflow/services/settings/base.py b/src/backend/langflow/services/settings/base.py index 5545af45d..686b2f039 100644 --- a/src/backend/langflow/services/settings/base.py +++ b/src/backend/langflow/services/settings/base.py @@ -1,15 +1,20 @@ import contextlib import json +import orjson import os from shutil import copy2 -import secrets from typing import Optional, List from pathlib import Path import yaml +<<<<<<< HEAD from pydantic_settings import SettingsConfigDict, BaseSettings from pydantic import field_validator, validator from langflow.utils.logger import logger +======= +from pydantic import BaseSettings, root_validator, validator +from loguru import logger +>>>>>>> origin/dev # BASE_COMPONENTS_PATH = str(Path(__file__).parent / "components") BASE_COMPONENTS_PATH = str(Path(__file__).parent.parent.parent / "components") @@ -38,18 +43,20 @@ class Settings(BaseSettings): DEV: bool = False DATABASE_URL: Optional[str] = None - CACHE: str = "InMemoryCache" + CACHE_TYPE: str = "memory" REMOVE_API_KEYS: bool = False COMPONENTS_PATH: List[str] = [] + LANGCHAIN_CACHE: str = "InMemoryCache" - # Login settings - SECRET_KEY: str = secrets.token_hex(32) - ALGORITHM: str = "HS256" - ACCESS_TOKEN_EXPIRE_MINUTES: int = 60 - REFRESH_TOKEN_EXPIRE_MINUTES: int = 70 - # If AUTO_LOGIN = True - # > The application does not request login and logs in automatically as a super user. - AUTO_LOGIN: bool = True + # Redis + REDIS_HOST: str = "localhost" + REDIS_PORT: int = 6379 + REDIS_DB: int = 0 + REDIS_CACHE_EXPIRE: int = 3600 + + LANGFUSE_SECRET_KEY: Optional[str] = None + LANGFUSE_PUBLIC_KEY: Optional[str] = None + LANGFUSE_HOST: Optional[str] = None @validator("CONFIG_DIR", pre=True, allow_reuse=True) def set_langflow_dir(cls, value): @@ -186,7 +193,7 @@ class Settings(BaseSettings): if isinstance(getattr(self, key), list): # value might be a '[something]' string with contextlib.suppress(json.decoder.JSONDecodeError): - value = json.loads(str(value)) + value = orjson.loads(str(value)) if isinstance(value, list): for item in value: if isinstance(item, Path): diff --git a/src/backend/langflow/services/settings/constants.py b/src/backend/langflow/services/settings/constants.py new file mode 100644 index 000000000..6cf7d4823 --- /dev/null +++ b/src/backend/langflow/services/settings/constants.py @@ -0,0 +1,2 @@ +DEFAULT_SUPERUSER = "langflow" +DEFAULT_SUPERUSER_PASSWORD = "langflow" diff --git a/src/backend/langflow/services/settings/factory.py b/src/backend/langflow/services/settings/factory.py index ab22e22b8..9202ae8c3 100644 --- a/src/backend/langflow/services/settings/factory.py +++ b/src/backend/langflow/services/settings/factory.py @@ -1,15 +1,15 @@ from pathlib import Path -from langflow.services.settings.manager import SettingsManager +from langflow.services.settings.manager import SettingsService from langflow.services.factory import ServiceFactory -class SettingsManagerFactory(ServiceFactory): +class SettingsServiceFactory(ServiceFactory): def __init__(self): - super().__init__(SettingsManager) + super().__init__(SettingsService) def create(self): - # Here you would have logic to create and configure a SettingsManager + # Here you would have logic to create and configure a SettingsService langflow_dir = Path(__file__).parent.parent.parent - return SettingsManager.load_settings_from_yaml( + return SettingsService.load_settings_from_yaml( str(langflow_dir / "config.yaml") ) diff --git a/src/backend/langflow/services/settings/manager.py b/src/backend/langflow/services/settings/manager.py index 713ff8192..40bf396f0 100644 --- a/src/backend/langflow/services/settings/manager.py +++ b/src/backend/langflow/services/settings/manager.py @@ -1,19 +1,21 @@ from langflow.services.base import Service +from langflow.services.settings.auth import AuthSettings from langflow.services.settings.base import Settings -from langflow.utils.logger import logger +from loguru import logger import os import yaml -class SettingsManager(Service): - name = "settings_manager" +class SettingsService(Service): + name = "settings_service" - def __init__(self, settings: Settings): + def __init__(self, settings: Settings, auth_settings: AuthSettings): super().__init__() self.settings = settings + self.auth_settings = auth_settings @classmethod - def load_settings_from_yaml(cls, file_path: str) -> "SettingsManager": + def load_settings_from_yaml(cls, file_path: str) -> "SettingsService": # Check if a string is a valid path or a file name if "/" not in file_path: # Get current path @@ -33,4 +35,10 @@ class SettingsManager(Service): ) settings = Settings(**settings_dict) - return cls(settings) + if not settings.CONFIG_DIR: + raise ValueError("CONFIG_DIR must be set in settings") + + auth_settings = AuthSettings( + CONFIG_DIR=settings.CONFIG_DIR, + ) + return cls(settings, auth_settings) diff --git a/src/backend/langflow/services/settings/utils.py b/src/backend/langflow/services/settings/utils.py new file mode 100644 index 000000000..fae96ff28 --- /dev/null +++ b/src/backend/langflow/services/settings/utils.py @@ -0,0 +1,47 @@ +import os +from pathlib import Path +import platform + +from loguru import logger + + +def set_secure_permissions(file_path): + if platform.system() in ["Linux", "Darwin"]: # Unix/Linux/Mac + os.chmod(file_path, 0o600) + elif platform.system() == "Windows": + import win32api + import win32con + import win32security + + user, domain, _ = win32security.LookupAccountName("", win32api.GetUserName()) + sd = win32security.GetFileSecurity( + file_path, win32security.DACL_SECURITY_INFORMATION + ) + dacl = win32security.ACL() + + # Set the new DACL for the file: read and write access for the owner, no access for everyone else + dacl.AddAccessAllowedAce( + win32security.ACL_REVISION, + win32con.GENERIC_READ | win32con.GENERIC_WRITE, + user, + ) + sd.SetSecurityDescriptorDacl(1, dacl, 0) + win32security.SetFileSecurity( + file_path, win32security.DACL_SECURITY_INFORMATION, sd + ) + else: + print("Unsupported OS") + + +def write_secret_to_file(path: Path, value: str) -> None: + with path.open("wb") as f: + f.write(value.encode("utf-8")) + try: + set_secure_permissions(path) + except Exception: + logger.error("Failed to set secure permissions on secret key") + + +def read_secret_from_file(path: Path) -> str: + with path.open("r") as f: + return f.read() diff --git a/src/backend/langflow/services/task/__init__.py b/src/backend/langflow/services/task/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/services/task/backends/__init__.py b/src/backend/langflow/services/task/backends/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/backend/langflow/services/task/backends/anyio.py b/src/backend/langflow/services/task/backends/anyio.py new file mode 100644 index 000000000..ca91eca19 --- /dev/null +++ b/src/backend/langflow/services/task/backends/anyio.py @@ -0,0 +1,67 @@ +from typing import Any, Callable, Optional, Tuple +import anyio +from langflow.services.task.backends.base import TaskBackend +from loguru import logger + + +class AnyIOTaskResult: + def __init__(self, scope): + self._scope = scope + self._status = "PENDING" + self._result = None + self._exception = None + + @property + def status(self) -> str: + if self._status == "DONE": + return "FAILURE" if self._exception is not None else "SUCCESS" + return self._status + + @property + def result(self) -> Any: + return self._result + + def ready(self) -> bool: + return self._status == "DONE" + + async def run(self, func, *args, **kwargs): + try: + self._result = await func(*args, **kwargs) + except Exception as e: + self._exception = e + finally: + self._status = "DONE" + + +class AnyIOBackend(TaskBackend): + def __init__(self): + self.tasks = {} + + async def launch_task( + self, task_func: Callable[..., Any], *args: Any, **kwargs: Any + ) -> Tuple[Optional[str], Optional[AnyIOTaskResult]]: + """ + Launch a new task in an asynchronous manner. + + Parameters: + task_func: The asynchronous function to run. + *args: Positional arguments to pass to task_func. + **kwargs: Keyword arguments to pass to task_func. + + Returns: + A tuple containing a unique task ID and the task result object. + """ + async with anyio.create_task_group() as tg: + try: + task_result = AnyIOTaskResult(tg) + tg.start_soon(task_result.run, task_func, *args, **kwargs) + task_id = str(id(task_result)) + self.tasks[task_id] = task_result + logger.info(f"Task {task_id} started.") + return task_id, task_result + except Exception as e: + logger.error(f"An error occurred while launching the task: {e}") + return None, None + + def get_task(self, task_id: str) -> Any: + return self.tasks.get(task_id) diff --git a/src/backend/langflow/services/task/backends/base.py b/src/backend/langflow/services/task/backends/base.py new file mode 100644 index 000000000..ccbd9273b --- /dev/null +++ b/src/backend/langflow/services/task/backends/base.py @@ -0,0 +1,12 @@ +from abc import ABC, abstractmethod +from typing import Any, Callable + + +class TaskBackend(ABC): + @abstractmethod + def launch_task(self, task_func: Callable[..., Any], *args: Any, **kwargs: Any): + pass + + @abstractmethod + def get_task(self, task_id: str) -> Any: + pass diff --git a/src/backend/langflow/services/task/backends/celery.py b/src/backend/langflow/services/task/backends/celery.py new file mode 100644 index 000000000..5b0a035ef --- /dev/null +++ b/src/backend/langflow/services/task/backends/celery.py @@ -0,0 +1,23 @@ +from typing import Any, Callable +from celery.result import AsyncResult # type: ignore +from langflow.services.task.backends.base import TaskBackend +from langflow.worker import celery_app + + +class CeleryBackend(TaskBackend): + def __init__(self): + self.celery_app = celery_app + + def launch_task( + self, task_func: Callable[..., Any], *args: Any, **kwargs: Any + ) -> tuple[str, AsyncResult]: + # I need to type the delay method to make it easier + from celery import Task # type: ignore + + if not hasattr(task_func, "delay"): + raise ValueError(f"Task function {task_func} does not have a delay method") + task: Task = task_func.delay(*args, **kwargs) + return task.id, AsyncResult(task.id, app=self.celery_app) + + def get_task(self, task_id: str) -> Any: + return AsyncResult(task_id, app=self.celery_app) diff --git a/src/backend/langflow/services/task/factory.py b/src/backend/langflow/services/task/factory.py new file mode 100644 index 000000000..efb6ac24d --- /dev/null +++ b/src/backend/langflow/services/task/factory.py @@ -0,0 +1,11 @@ +from langflow.services.task.manager import TaskService +from langflow.services.factory import ServiceFactory + + +class TaskServiceFactory(ServiceFactory): + def __init__(self): + super().__init__(TaskService) + + def create(self): + # Here you would have logic to create and configure a TaskService + return TaskService() diff --git a/src/backend/langflow/services/task/manager.py b/src/backend/langflow/services/task/manager.py new file mode 100644 index 000000000..422b34faa --- /dev/null +++ b/src/backend/langflow/services/task/manager.py @@ -0,0 +1,71 @@ +from typing import Any, Callable, Coroutine, Union +from langflow.utils.logger import configure +from loguru import logger +from langflow.services.base import Service +from langflow.services.task.backends.anyio import AnyIOBackend +from langflow.services.task.backends.base import TaskBackend +from langflow.services.task.utils import get_celery_worker_status + + +def check_celery_availability(): + try: + from langflow.worker import celery_app + + status = get_celery_worker_status(celery_app) + logger.debug(f"Celery status: {status}") + except Exception as exc: + logger.debug(f"Celery not available: {exc}") + status = {"availability": None} + return status + + +try: + configure() + status = check_celery_availability() + + USE_CELERY = status.get("availability") is not None +except ImportError: + USE_CELERY = False + + +class TaskService(Service): + name = "task_service" + + def __init__(self): + self.backend = self.get_backend() + self.use_celery = USE_CELERY + + def get_backend(self) -> TaskBackend: + if USE_CELERY: + from langflow.services.task.backends.celery import CeleryBackend + + logger.debug("Using Celery backend") + return CeleryBackend() + logger.debug("Using AnyIO backend") + return AnyIOBackend() + + # In your TaskService class + async def launch_and_await_task( + self, + task_func: Callable[..., Any], + *args: Any, + **kwargs: Any, + ) -> Any: + if not self.use_celery: + return None, await task_func(*args, **kwargs) + if not hasattr(task_func, "apply"): + raise ValueError(f"Task function {task_func} does not have an apply method") + task = task_func.apply(args=args, kwargs=kwargs) + result = task.get() + return task.id, result + + async def launch_task( + self, task_func: Callable[..., Any], *args: Any, **kwargs: Any + ) -> Any: + logger.debug(f"Launching task {task_func} with args {args} and kwargs {kwargs}") + logger.debug(f"Using backend {self.backend}") + task = self.backend.launch_task(task_func, *args, **kwargs) + return await task if isinstance(task, Coroutine) else task + + def get_task(self, task_id: Union[int, str]) -> Any: + return self.backend.get_task(task_id) diff --git a/src/backend/langflow/services/task/utils.py b/src/backend/langflow/services/task/utils.py new file mode 100644 index 000000000..5dfb03b83 --- /dev/null +++ b/src/backend/langflow/services/task/utils.py @@ -0,0 +1,22 @@ +import contextlib +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + with contextlib.suppress(ImportError): + from celery import Celery # type: ignore + + +def get_celery_worker_status(app: "Celery"): + i = app.control.inspect() + availability = app.control.ping() + stats = i.stats() + registered_tasks = i.registered() + active_tasks = i.active() + scheduled_tasks = i.scheduled() + return { + "availability": availability, + "stats": stats, + "registered_tasks": registered_tasks, + "active_tasks": active_tasks, + "scheduled_tasks": scheduled_tasks, + } diff --git a/src/backend/langflow/services/utils.py b/src/backend/langflow/services/utils.py index 049e82c0f..02d9816f5 100644 --- a/src/backend/langflow/services/utils.py +++ b/src/backend/langflow/services/utils.py @@ -1,18 +1,153 @@ -from langflow.services import ServiceType, service_manager -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from langflow.services.settings.manager import SettingsManager +from langflow.services.auth.utils import create_super_user +from langflow.services.database.utils import initialize_database +from langflow.services.manager import service_manager +from langflow.services.schema import ServiceType +from langflow.services.settings.constants import ( + DEFAULT_SUPERUSER, + DEFAULT_SUPERUSER_PASSWORD, +) +from .getters import get_session, get_settings_service +from loguru import logger -def get_settings_manager() -> "SettingsManager": - return service_manager.get(ServiceType.SETTINGS_MANAGER) +def setup_superuser(settings_service, session): + """ + Setup the superuser. + """ + # We will use the SUPERUSER and SUPERUSER_PASSWORD + # vars on settings_manager.auth_settings to create the superuser + # if it does not exist. + if settings_service.auth_settings.AUTO_LOGIN: + logger.debug("AUTO_LOGIN is set to True. Creating default superuser.") + + username = settings_service.auth_settings.SUPERUSER + password = settings_service.auth_settings.SUPERUSER_PASSWORD + if username == DEFAULT_SUPERUSER and password == DEFAULT_SUPERUSER_PASSWORD: + logger.debug("Default superuser credentials detected.") + logger.debug("Creating default superuser.") + else: + logger.debug("Creating superuser.") + + try: + from langflow.services.database.models.user.user import User + + user = session.query(User).filter(User.username == username).first() + if user and user.is_superuser is True: + return + except Exception as exc: + logger.exception(exc) + raise RuntimeError( + "Could not create superuser. Please create a superuser manually." + ) from exc + try: + # create superuser + create_super_user(db=session, username=username, password=password) + except Exception as exc: + logger.exception(exc) + raise RuntimeError( + "Could not create superuser. Please create a superuser manually." + ) from exc + # reset superuser credentials + settings_service.auth_settings.reset_credentials() + logger.debug("Superuser created successfully.") -def get_db_manager(): - return service_manager.get(ServiceType.DATABASE_MANAGER) +def teardown_superuser(settings_service, session): + """ + Teardown the superuser. + """ + # If AUTO_LOGIN is True, we will remove the default superuser + # from the database. + + if settings_service.auth_settings.AUTO_LOGIN: + logger.debug("AUTO_LOGIN is set to True. Removing default superuser.") + username = settings_service.auth_settings.SUPERUSER + from langflow.services.database.models.user.user import User + + user = session.query(User).filter(User.username == username).first() + if user and user.is_superuser: + session.delete(user) + session.commit() + logger.debug("Default superuser removed successfully.") + else: + logger.debug("Default superuser not found.") -def get_session(): - db_manager = service_manager.get(ServiceType.DATABASE_MANAGER) - yield from db_manager.get_session() +def teardown_services(): + """ + Teardown all the services. + """ + try: + teardown_superuser(get_settings_service(), next(get_session())) + service_manager.teardown() + except Exception as exc: + logger.exception(exc) + + +def initialize_settings_service(): + """ + Initialize the settings manager. + """ + from langflow.services.settings import factory as settings_factory + + service_manager.register_factory(settings_factory.SettingsServiceFactory()) + + +def initialize_session_service(): + """ + Initialize the session manager. + """ + from langflow.services.session import factory as session_service_factory # type: ignore + from langflow.services.cache import factory as cache_factory + + initialize_settings_service() + + service_manager.register_factory( + cache_factory.CacheServiceFactory(), dependencies=[ServiceType.SETTINGS_SERVICE] + ) + + service_manager.register_factory( + session_service_factory.SessionServiceFactory(), + dependencies=[ServiceType.CACHE_SERVICE], + ) + + +def initialize_services(): + """ + Initialize all the services needed. + """ + from langflow.services.database import factory as database_factory + from langflow.services.cache import factory as cache_factory + from langflow.services.chat import factory as chat_factory + from langflow.services.settings import factory as settings_factory + from langflow.services.auth import factory as auth_factory + from langflow.services.task import factory as task_factory + from langflow.services.session import factory as session_service_factory # type: ignore + + service_manager.register_factory(settings_factory.SettingsServiceFactory()) + service_manager.register_factory( + auth_factory.AuthServiceFactory(), dependencies=[ServiceType.SETTINGS_SERVICE] + ) + service_manager.register_factory( + database_factory.DatabaseServiceFactory(), + dependencies=[ServiceType.SETTINGS_SERVICE], + ) + service_manager.register_factory( + cache_factory.CacheServiceFactory(), dependencies=[ServiceType.SETTINGS_SERVICE] + ) + service_manager.register_factory(chat_factory.ChatServiceFactory()) + + service_manager.register_factory(task_factory.TaskServiceFactory()) + + service_manager.register_factory( + session_service_factory.SessionServiceFactory(), + dependencies=[ServiceType.CACHE_SERVICE], + ) + # Test cache connection + service_manager.get(ServiceType.CACHE_SERVICE) + # Test database connection + service_manager.get(ServiceType.DATABASE_SERVICE) + # Setup the superuser + initialize_database() + session = next(get_session()) + setup_superuser(service_manager.get(ServiceType.SETTINGS_SERVICE), session) diff --git a/src/backend/langflow/template/frontend_node/base.py b/src/backend/langflow/template/frontend_node/base.py index dddbe1a0b..6b05b6554 100644 --- a/src/backend/langflow/template/frontend_node/base.py +++ b/src/backend/langflow/template/frontend_node/base.py @@ -140,13 +140,16 @@ class FrontendNode(BaseModel): @staticmethod def handle_dict_type(field: TemplateField, _type: str) -> str: """Handles 'dict' type by replacing it with 'code' or 'file' based on the field name.""" - if "dict" in _type.lower(): - if field.name == "dict_": - field.field_type = "file" - field.suffixes = [".json", ".yaml", ".yml"] - field.file_types = ["json", "yaml", "yml"] - else: - field.field_type = "code" + if "dict" in _type.lower() and field.name == "dict_": + field.field_type = "file" + field.suffixes = [".json", ".yaml", ".yml"] + field.file_types = ["json", "yaml", "yml"] + elif ( + _type.startswith("Dict") + or _type.startswith("Mapping") + or _type.startswith("dict") + ): + field.field_type = "dict" return _type @staticmethod @@ -240,20 +243,6 @@ class FrontendNode(BaseModel): "description", } - @staticmethod - def replace_dict_with_code_or_file( - field: TemplateField, _type: str, key: str - ) -> str: - """Replaces 'dict' type with 'code' or 'file'.""" - if "dict" in _type.lower(): - if key == "dict_": - field.field_type = "file" - field.suffixes = [".json", ".yaml", ".yml"] - field.file_types = ["json", "yaml", "yml"] - else: - field.field_type = "code" - return field.field_type - @staticmethod def set_field_default_value(field: TemplateField, value: dict, key: str) -> None: """Sets the field value with the default value if present.""" diff --git a/src/backend/langflow/template/frontend_node/documentloaders.py b/src/backend/langflow/template/frontend_node/documentloaders.py index d400ff502..ed5a0fa42 100644 --- a/src/backend/langflow/template/frontend_node/documentloaders.py +++ b/src/backend/langflow/template/frontend_node/documentloaders.py @@ -170,11 +170,11 @@ class DocumentLoaderFrontNode(FrontendNode): # add a metadata field of type dict self.template.add_field( TemplateField( - field_type="code", - required=True, + field_type="dict", + required=False, show=True, name="metadata", - value="{}", + value={}, display_name="Metadata", multiline=False, ) diff --git a/src/backend/langflow/template/frontend_node/embeddings.py b/src/backend/langflow/template/frontend_node/embeddings.py index 4e7e25112..665328e78 100644 --- a/src/backend/langflow/template/frontend_node/embeddings.py +++ b/src/backend/langflow/template/frontend_node/embeddings.py @@ -89,7 +89,7 @@ class EmbeddingFrontendNode(FrontendNode): if field.name == "headers": field.show = False if field.name == "model_kwargs": - field.field_type = "code" + field.field_type = "dict" field.advanced = True field.show = True elif field.name in [ diff --git a/src/backend/langflow/template/frontend_node/formatter/field_formatters.py b/src/backend/langflow/template/frontend_node/formatter/field_formatters.py index 6b949f8f6..dd289f453 100644 --- a/src/backend/langflow/template/frontend_node/formatter/field_formatters.py +++ b/src/backend/langflow/template/frontend_node/formatter/field_formatters.py @@ -153,10 +153,13 @@ class DictCodeFileFormatter(FieldFormatter): key = field.name value = field.to_dict() _type = value["type"] - if "dict" in _type.lower(): - if key == "dict_": - field.field_type = "file" - field.suffixes = [".json", ".yaml", ".yml"] - field.file_types = ["json", "yaml", "yml"] - else: - field.field_type = "code" + if "dict" in _type.lower() and key == "dict_": + field.field_type = "file" + field.suffixes = [".json", ".yaml", ".yml"] + field.file_types = ["json", "yaml", "yml"] + elif ( + _type.startswith("Dict") + or _type.startswith("Mapping") + or _type.startswith("dict") + ): + field.field_type = "dict" diff --git a/src/backend/langflow/template/frontend_node/llms.py b/src/backend/langflow/template/frontend_node/llms.py index a6a128cfe..b8e007a27 100644 --- a/src/backend/langflow/template/frontend_node/llms.py +++ b/src/backend/langflow/template/frontend_node/llms.py @@ -1,5 +1,5 @@ -import json from typing import Optional +from langflow.services.database.models.base import orjson_dumps from langflow.template.field.base import TemplateField from langflow.template.frontend_node.base import FrontendNode @@ -89,7 +89,7 @@ class LLMFrontendNode(FrontendNode): if field.name == "config": field.show = True field.advanced = True - field.value = json.dumps(CTRANSFORMERS_DEFAULT_CONFIG, indent=2) + field.value = orjson_dumps(CTRANSFORMERS_DEFAULT_CONFIG, indent_2=True) @staticmethod def format_field(field: TemplateField, name: Optional[str] = None) -> None: @@ -131,7 +131,7 @@ class LLMFrontendNode(FrontendNode): if display_name := display_names_dict.get(field.name): field.display_name = display_name if field.name == "model_kwargs": - field.field_type = "code" + field.field_type = "dict" field.advanced = True field.show = True elif field.name in [ diff --git a/src/backend/langflow/template/frontend_node/prompts.py b/src/backend/langflow/template/frontend_node/prompts.py index da5d2a300..c52b1901c 100644 --- a/src/backend/langflow/template/frontend_node/prompts.py +++ b/src/backend/langflow/template/frontend_node/prompts.py @@ -15,6 +15,7 @@ from langflow.template.template.base import Template class PromptFrontendNode(FrontendNode): @staticmethod def format_field(field: TemplateField, name: Optional[str] = None) -> None: + FrontendNode.format_field(field, name) # if field.field_type == "StringPromptTemplate" # change it to str PROMPT_FIELDS = [ diff --git a/src/backend/langflow/template/frontend_node/utilities.py b/src/backend/langflow/template/frontend_node/utilities.py index df993e377..a5adb219d 100644 --- a/src/backend/langflow/template/frontend_node/utilities.py +++ b/src/backend/langflow/template/frontend_node/utilities.py @@ -1,6 +1,6 @@ import ast -import json from typing import Optional +from langflow.services.database.models.base import orjson_dumps from langflow.template.field.base import TemplateField from langflow.template.frontend_node.base import FrontendNode @@ -21,5 +21,4 @@ class UtilitiesFrontendNode(FrontendNode): field.field_type = "str" if isinstance(field.value, dict): - field.field_type = "code" - field.value = json.dumps(field.value, indent=4) + field.value = orjson_dumps(field.value) diff --git a/src/backend/langflow/template/frontend_node/vectorstores.py b/src/backend/langflow/template/frontend_node/vectorstores.py index 23c293437..73e9aaaca 100644 --- a/src/backend/langflow/template/frontend_node/vectorstores.py +++ b/src/backend/langflow/template/frontend_node/vectorstores.py @@ -56,7 +56,7 @@ class VectorStoreFrontendNode(FrontendNode): # Add search_kwargs field extra_field = TemplateField( name="search_kwargs", - field_type="code", + field_type="NestedDict", required=False, placeholder="", show=True, diff --git a/src/backend/langflow/utils/constants.py b/src/backend/langflow/utils/constants.py index e473d855b..0c97b56a2 100644 --- a/src/backend/langflow/utils/constants.py +++ b/src/backend/langflow/utils/constants.py @@ -48,4 +48,16 @@ def python_function(text: str) -> str: return text """ -DIRECT_TYPES = ["str", "bool", "code", "int", "float", "Any", "prompt"] + +PYTHON_BASIC_TYPES = [str, bool, int, float, tuple, list, dict, set] +DIRECT_TYPES = [ + "str", + "bool", + "dict", + "int", + "float", + "Any", + "prompt", + "code", + "NestedDict", +] diff --git a/src/backend/langflow/utils/logger.py b/src/backend/langflow/utils/logger.py index deb0f75ca..b08621410 100644 --- a/src/backend/langflow/utils/logger.py +++ b/src/backend/langflow/utils/logger.py @@ -1,30 +1,69 @@ -import logging +from typing import Optional +from loguru import logger from pathlib import Path - from rich.logging import RichHandler - -logger = logging.getLogger("langflow") +import os +import orjson +import appdirs -def configure(log_level: str = "DEBUG", log_file: Path = None): # type: ignore - log_format = "%(asctime)s - %(levelname)s - %(message)s" - log_level_value = getattr(logging, log_level.upper(), logging.INFO) +VALID_LOG_LEVELS = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] - logging.basicConfig( - level=log_level_value, - format=log_format, - datefmt="[%X]", - handlers=[RichHandler(rich_tracebacks=True)], + +def serialize(record): + subset = { + "timestamp": record["time"].timestamp(), + "message": record["message"], + "level": record["level"].name, + "module": record["module"], + } + return orjson.dumps(subset) + + +def patching(record): + record["extra"]["serialized"] = serialize(record) + + +def configure(log_level: Optional[str] = None, log_file: Optional[Path] = None): + if os.getenv("LANGFLOW_LOG_LEVEL") in VALID_LOG_LEVELS and log_level is None: + log_level = os.getenv("LANGFLOW_LOG_LEVEL") + if log_level is None: + log_level = "INFO" + # Human-readable + log_format = ( + "{time:YYYY-MM-DD HH:mm:ss} - " + "{level: <8} - {module} - {message}" ) - if log_file: - log_file = Path(log_file) - log_file.parent.mkdir(parents=True, exist_ok=True) + # log_format = log_format_dev if log_level.upper() == "DEBUG" else log_format_prod + logger.remove() # Remove default handlers + logger.patch(patching) + # Configure loguru to use RichHandler + logger.configure( + handlers=[ + { + "sink": RichHandler(rich_tracebacks=True, markup=True), + "format": log_format, + "level": log_level.upper(), + } + ] + ) - file_handler = logging.FileHandler(log_file) - file_handler.setFormatter(logging.Formatter(log_format)) - logger.addHandler(file_handler) + if not log_file: + cache_dir = Path(appdirs.user_cache_dir("langflow")) + log_file = cache_dir / "langflow.log" - logger.info(f"Logger set up with log level: {log_level_value}({log_level})") + log_file = Path(log_file) + log_file.parent.mkdir(parents=True, exist_ok=True) + + logger.add( + sink=str(log_file), + level=log_level.upper(), + format=log_format, + rotation="10 MB", # Log rotation based on file size + serialize=True, + ) + + logger.debug(f"Logger set up with log level: {log_level}") if log_file: logger.info(f"Log file: {log_file}") diff --git a/src/backend/langflow/utils/util.py b/src/backend/langflow/utils/util.py index 614f04078..fce9a1a21 100644 --- a/src/backend/langflow/utils/util.py +++ b/src/backend/langflow/utils/util.py @@ -2,14 +2,13 @@ import re import inspect import importlib from functools import wraps -from typing import Optional, Dict, Any, Union +from typing import List, Optional, Dict, Any, Union -from docstring_parser import parse # type: ignore +from docstring_parser import parse from langflow.template.frontend_node.constants import FORCE_SHOW_FIELDS from langflow.utils import constants -from langflow.utils.logger import logger -from multiprocess import cpu_count # type: ignore +from langchain.schema import Document def build_template_from_function( @@ -265,6 +264,9 @@ def format_dict( _type: Union[str, type] = get_type(value) + if "BaseModel" in str(_type): + continue + _type = remove_optional_wrapper(_type) _type = check_list_type(_type, value) _type = replace_mapping_with_dict(_type) @@ -274,8 +276,6 @@ def format_dict( value["password"] = is_password_field(key) value["multiline"] = is_multiline_field(key) - replace_dict_type_with_code(value) - if key == "dict_": set_dict_file_attributes(value) @@ -406,14 +406,6 @@ def is_multiline_field(key: str) -> bool: } -def replace_dict_type_with_code(value: Dict[str, Any]) -> None: - """ - Replaces the type value with 'code' if the type is a dict. - """ - if "dict" in value["type"].lower(): - value["type"] = "code" - - def set_dict_file_attributes(value: Dict[str, Any]) -> None: """ Sets the file attributes for the 'dict_' key. @@ -458,8 +450,10 @@ def add_options_to_field( value["value"] = options_map[class_name][0] -def get_number_of_workers(workers=None): - if workers == -1 or workers is None: - workers = (cpu_count() * 2) + 1 - logger.debug(f"Number of workers: {workers}") - return workers +def build_loader_repr_from_documents(documents: List[Document]) -> str: + if documents: + avg_length = sum(len(doc.page_content) for doc in documents) / len(documents) + return f"""{len(documents)} documents + \nAvg. Document Length (characters): {int(avg_length)} + Documents: {documents[:3]}...""" + return "0 documents" diff --git a/src/backend/langflow/worker.py b/src/backend/langflow/worker.py new file mode 100644 index 000000000..2eeba14a5 --- /dev/null +++ b/src/backend/langflow/worker.py @@ -0,0 +1,62 @@ +from langflow.core.celery_app import celery_app +from typing import Any, Dict, Optional +from typing import TYPE_CHECKING + +from celery.exceptions import SoftTimeLimitExceeded # type: ignore +from langflow.processing.process import ( + Result, + generate_result, + process_inputs, +) +from langflow.services.manager import initialize_session_service +from langflow.services.getters import get_session_service + +if TYPE_CHECKING: + from langflow.graph.vertex.base import Vertex + + +@celery_app.task(acks_late=True) +def test_celery(word: str) -> str: + return f"test task return {word}" + + +@celery_app.task(bind=True, soft_time_limit=30, max_retries=3) +def build_vertex(self, vertex: "Vertex") -> "Vertex": + """ + Build a vertex + """ + try: + vertex.task_id = self.request.id + vertex.build() + return vertex + except SoftTimeLimitExceeded as e: + raise self.retry( + exc=SoftTimeLimitExceeded("Task took too long"), countdown=2 + ) from e + + +@celery_app.task(acks_late=True) +def process_graph_cached_task( + data_graph: Dict[str, Any], + inputs: Optional[dict] = None, + clear_cache=False, + session_id=None, +) -> Dict[str, Any]: + initialize_session_service() + session_service = get_session_service() + if clear_cache: + session_service.clear_session(session_id) + if session_id is None: + session_id = session_service.generate_key( + session_id=session_id, data_graph=data_graph + ) + # Load the graph using SessionService + graph, artifacts = session_service.load_session(session_id, data_graph) + built_object = graph.build() + processed_inputs = process_inputs(inputs, artifacts) + result = generate_result(built_object, processed_inputs) + # langchain_object is now updated with the new memory + # we need to update the cache with the updated langchain_object + session_service.update_session(session_id, (graph, artifacts)) + + return Result(result=result, session_id=session_id).dict() diff --git a/src/frontend/.github/workflows/playwright.yml b/src/frontend/.github/workflows/playwright.yml new file mode 100644 index 000000000..90b6b700d --- /dev/null +++ b/src/frontend/.github/workflows/playwright.yml @@ -0,0 +1,27 @@ +name: Playwright Tests +on: + push: + branches: [ main, master ] + pull_request: + branches: [ main, master ] +jobs: + test: + timeout-minutes: 60 + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-node@v3 + with: + node-version: 18 + - name: Install dependencies + run: npm ci + - name: Install Playwright Browsers + run: npx playwright install --with-deps + - name: Run Playwright tests + run: npx playwright test + - uses: actions/upload-artifact@v3 + if: always() + with: + name: playwright-report + path: playwright-report/ + retention-days: 30 diff --git a/src/frontend/.gitignore b/src/frontend/.gitignore index 4d29575de..9ad9e7fb4 100644 --- a/src/frontend/.gitignore +++ b/src/frontend/.gitignore @@ -21,3 +21,6 @@ npm-debug.log* yarn-debug.log* yarn-error.log* +/test-results/ +/playwright-report/ +/playwright/.cache/ diff --git a/src/frontend/Dockerfile b/src/frontend/Dockerfile index 010811e7b..024eab562 100644 --- a/src/frontend/Dockerfile +++ b/src/frontend/Dockerfile @@ -1,10 +1,16 @@ -FROM node:14-alpine as frontend_build -ARG BACKEND +FROM node:20-alpine as frontend_build +ARG BACKEND_URL WORKDIR /app -COPY . /app + +COPY ./package.json ./package-lock.json ./tsconfig.json ./vite.config.ts ./index.html ./tailwind.config.js ./postcss.config.js ./prettier.config.js /app/ RUN npm install +COPY ./src /app/src RUN npm run build FROM nginx COPY --from=frontend_build /app/build/ /usr/share/nginx/html -COPY /nginx.conf /etc/nginx/conf.d/default.conf \ No newline at end of file +COPY /nginx.conf /etc/nginx/conf.d/default.conf +COPY start-nginx.sh /start-nginx.sh +RUN chmod +x /start-nginx.sh +ENV BACKEND_URL=$BACKEND_URL +CMD ["/start-nginx.sh"] \ No newline at end of file diff --git a/src/frontend/harFiles/langflow.har b/src/frontend/harFiles/langflow.har new file mode 100644 index 000000000..2f73c3515 --- /dev/null +++ b/src/frontend/harFiles/langflow.har @@ -0,0 +1,229 @@ +{ + "log": { + "version": "1.2", + "creator": { + "name": "Playwright", + "version": "1.37.1" + }, + "browser": { + "name": "chromium", + "version": "116.0.5845.82" + }, + "entries": [ + { + "startedDateTime": "2023-08-31T14:55:35.502Z", + "time": 0.727, + "request": { + "method": "GET", + "url": "http://localhost:3000/api/v1/auto_login", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Accept", "value": "application/json, text/plain, */*" }, + { "name": "Accept-Encoding", "value": "gzip, deflate, br" }, + { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, + { "name": "Connection", "value": "keep-alive" }, + { "name": "Host", "value": "localhost:3000" }, + { "name": "Referer", "value": "http://localhost:3000/" }, + { "name": "Sec-Fetch-Dest", "value": "empty" }, + { "name": "Sec-Fetch-Mode", "value": "cors" }, + { "name": "Sec-Fetch-Site", "value": "same-origin" }, + { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36" }, + { "name": "sec-ch-ua", "value": "\"Not)A;Brand\";v=\"24\", \"Chromium\";v=\"116\"" }, + { "name": "sec-ch-ua-mobile", "value": "?0" }, + { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } + ], + "queryString": [], + "headersSize": -1, + "bodySize": -1 + }, + "response": { + "status": 200, + "statusText": "OK", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Access-Control-Allow-Origin", "value": "*" }, + { "name": "connection", "value": "close" }, + { "name": "content-length", "value": "227" }, + { "name": "content-type", "value": "application/json" }, + { "name": "date", "value": "Thu, 31 Aug 2023 14:55:34 GMT" }, + { "name": "server", "value": "uvicorn" } + ], + "content": { + "size": -1, + "mimeType": "application/json", + "text": "{\"access_token\":\"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJiNWIxYjFhZC05M2VjLTRjMjgtYWMxNy01OGMxNDQ0MWI1ZGQiLCJleHAiOjE3MjUwMjk3MzV9.8qhEtryWqA9RbwEP2s20umKdVE7J7jHGoogXZqxLNWk\",\"refresh_token\":null,\"token_type\":\"bearer\"}" + }, + "headersSize": -1, + "bodySize": -1, + "redirectURL": "" + }, + "cache": {}, + "timings": { "send": -1, "wait": -1, "receive": 0.727 } + }, + { + "startedDateTime": "2023-08-31T14:55:35.586Z", + "time": 0.719, + "request": { + "method": "GET", + "url": "http://localhost:3000/api/v1/flows/", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Accept", "value": "application/json, text/plain, */*" }, + { "name": "Accept-Encoding", "value": "gzip, deflate, br" }, + { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, + { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJiNWIxYjFhZC05M2VjLTRjMjgtYWMxNy01OGMxNDQ0MWI1ZGQiLCJleHAiOjE3MjUwMjk3MzV9.8qhEtryWqA9RbwEP2s20umKdVE7J7jHGoogXZqxLNWk" }, + { "name": "Connection", "value": "keep-alive" }, + { "name": "Cookie", "value": "access_token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJiNWIxYjFhZC05M2VjLTRjMjgtYWMxNy01OGMxNDQ0MWI1ZGQiLCJleHAiOjE3MjUwMjk3MzV9.8qhEtryWqA9RbwEP2s20umKdVE7J7jHGoogXZqxLNWk; refresh_token=auto" }, + { "name": "Host", "value": "localhost:3000" }, + { "name": "Referer", "value": "http://localhost:3000/" }, + { "name": "Sec-Fetch-Dest", "value": "empty" }, + { "name": "Sec-Fetch-Mode", "value": "cors" }, + { "name": "Sec-Fetch-Site", "value": "same-origin" }, + { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36" }, + { "name": "sec-ch-ua", "value": "\"Not)A;Brand\";v=\"24\", \"Chromium\";v=\"116\"" }, + { "name": "sec-ch-ua-mobile", "value": "?0" }, + { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } + ], + "queryString": [], + "headersSize": -1, + "bodySize": -1 + }, + "response": { + "status": 200, + "statusText": "OK", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Access-Control-Allow-Origin", "value": "*" }, + { "name": "connection", "value": "close" }, + { "name": "content-length", "value": "2" }, + { "name": "content-type", "value": "application/json" }, + { "name": "date", "value": "Thu, 31 Aug 2023 14:55:34 GMT" }, + { "name": "server", "value": "uvicorn" } + ], + "content": { + "size": -1, + "mimeType": "application/json", + "text": "[]" + }, + "headersSize": -1, + "bodySize": -1, + "redirectURL": "" + }, + "cache": {}, + "timings": { "send": -1, "wait": -1, "receive": 0.719 } + }, + { + "startedDateTime": "2023-08-31T14:55:35.586Z", + "time": 1.031, + "request": { + "method": "GET", + "url": "http://localhost:3000/api/v1/all", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Accept", "value": "application/json, text/plain, */*" }, + { "name": "Accept-Encoding", "value": "gzip, deflate, br" }, + { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, + { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJiNWIxYjFhZC05M2VjLTRjMjgtYWMxNy01OGMxNDQ0MWI1ZGQiLCJleHAiOjE3MjUwMjk3MzV9.8qhEtryWqA9RbwEP2s20umKdVE7J7jHGoogXZqxLNWk" }, + { "name": "Connection", "value": "keep-alive" }, + { "name": "Cookie", "value": "access_token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJiNWIxYjFhZC05M2VjLTRjMjgtYWMxNy01OGMxNDQ0MWI1ZGQiLCJleHAiOjE3MjUwMjk3MzV9.8qhEtryWqA9RbwEP2s20umKdVE7J7jHGoogXZqxLNWk; refresh_token=auto" }, + { "name": "Host", "value": "localhost:3000" }, + { "name": "Referer", "value": "http://localhost:3000/" }, + { "name": "Sec-Fetch-Dest", "value": "empty" }, + { "name": "Sec-Fetch-Mode", "value": "cors" }, + { "name": "Sec-Fetch-Site", "value": "same-origin" }, + { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36" }, + { "name": "sec-ch-ua", "value": "\"Not)A;Brand\";v=\"24\", \"Chromium\";v=\"116\"" }, + { "name": "sec-ch-ua-mobile", "value": "?0" }, + { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } + ], + "queryString": [], + "headersSize": -1, + "bodySize": -1 + }, + "response": { + "status": 200, + "statusText": "OK", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Access-Control-Allow-Origin", "value": "*" }, + { "name": "connection", "value": "close" }, + { "name": "content-length", "value": "251307" }, + { "name": "content-type", "value": "application/json" }, + { "name": "date", "value": "Thu, 31 Aug 2023 14:55:34 GMT" }, + { "name": "server", "value": "uvicorn" } + ], + "content": { + "size": -1, + "mimeType": "application/json", + "text": "{\"chains\":{\"ConversationChain\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":{\"_type\":\"default\"},\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLLMOutputParser\",\"list\":false},\"prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"history\",\"input\"],\"output_parser\":null,\"partial_variables\":{},\"template\":\"The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.\\n\\nCurrent conversation:\\n{history}\\nHuman: {input}\\nAI:\",\"template_format\":\"f-string\",\"validate_template\":true,\"_type\":\"prompt\"},\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"input_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"input\",\"password\":false,\"name\":\"input_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"llm_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"llm_kwargs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"output_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"response\",\"password\":false,\"name\":\"output_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"return_final_only\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"return_final_only\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ConversationChain\"},\"description\":\"Chain to have a conversation and load context from memory.\",\"base_classes\":[\"Chain\",\"LLMChain\",\"ConversationChain\",\"function\"],\"display_name\":\"ConversationChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"ConversationalRetrievalChain\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Callbacks\",\"list\":false},\"condense_question_llm\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"condense_question_llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"condense_question_prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"chat_history\",\"question\"],\"output_parser\":null,\"partial_variables\":{},\"template\":\"Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question, in its original language.\\n\\nChat History:\\n{chat_history}\\nFollow Up Input: {question}\\nStandalone question:\",\"template_format\":\"f-string\",\"validate_template\":true,\"_type\":\"prompt\"},\"password\":false,\"name\":\"condense_question_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMemory\",\"list\":false},\"retriever\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseRetriever\",\"list\":false},\"chain_type\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"stuff\",\"password\":false,\"options\":[\"stuff\",\"map_reduce\",\"map_rerank\",\"refine\"],\"name\":\"chain_type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"combine_docs_chain_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"combine_docs_chain_kwargs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"return_source_documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"return_source_documents\",\"display_name\":\"Return source documents\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ConversationalRetrievalChain\"},\"description\":\"Convenience method to load chain from LLM and retriever.\",\"base_classes\":[\"Chain\",\"BaseConversationalRetrievalChain\",\"ConversationalRetrievalChain\",\"function\"],\"display_name\":\"ConversationalRetrievalChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/chains/popular/chat_vector_db\",\"beta\":false,\"error\":null},\"LLMChain\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLLMOutputParser\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"llm_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"llm_kwargs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"output_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text\",\"password\":false,\"name\":\"output_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"return_final_only\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"return_final_only\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"LLMChain\"},\"description\":\"Chain to run queries against LLMs.\",\"base_classes\":[\"Chain\",\"LLMChain\",\"function\"],\"display_name\":\"LLMChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/chains/foundational/llm_chain\",\"beta\":false,\"error\":null},\"LLMCheckerChain\":{\"template\":{\"check_assertions_prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"assertions\"],\"output_parser\":null,\"partial_variables\":{},\"template\":\"Here is a bullet point list of assertions:\\n{assertions}\\nFor each assertion, determine whether it is true or false. If it is false, explain why.\\n\\n\",\"template_format\":\"f-string\",\"validate_template\":true,\"_type\":\"prompt\"},\"password\":false,\"name\":\"check_assertions_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"PromptTemplate\",\"list\":false},\"create_draft_answer_prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"question\"],\"output_parser\":null,\"partial_variables\":{},\"template\":\"{question}\\n\\n\",\"template_format\":\"f-string\",\"validate_template\":true,\"_type\":\"prompt\"},\"password\":false,\"name\":\"create_draft_answer_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"PromptTemplate\",\"list\":false},\"list_assertions_prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"statement\"],\"output_parser\":null,\"partial_variables\":{},\"template\":\"Here is a statement:\\n{statement}\\nMake a bullet point list of the assumptions you made when producing the above statement.\\n\\n\",\"template_format\":\"f-string\",\"validate_template\":true,\"_type\":\"prompt\"},\"password\":false,\"name\":\"list_assertions_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"PromptTemplate\",\"list\":false},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"revised_answer_prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"checked_assertions\",\"question\"],\"output_parser\":null,\"partial_variables\":{},\"template\":\"{checked_assertions}\\n\\nQuestion: In light of the above assertions and checks, how would you answer the question '{question}'?\\n\\nAnswer:\",\"template_format\":\"f-string\",\"validate_template\":true,\"_type\":\"prompt\"},\"password\":false,\"name\":\"revised_answer_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"PromptTemplate\",\"list\":false},\"_type\":\"LLMCheckerChain\"},\"description\":\"\",\"base_classes\":[\"Chain\",\"LLMCheckerChain\",\"function\"],\"display_name\":\"LLMCheckerChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/chains/additional/llm_checker\",\"beta\":false,\"error\":null},\"LLMMathChain\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"llm_chain\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm_chain\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"LLMChain\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":{\"input_variables\":[\"question\"],\"output_parser\":null,\"partial_variables\":{},\"template\":\"Translate a math problem into a expression that can be executed using Python's numexpr library. Use the output of running this code to answer the question.\\n\\nQuestion: ${{Question with math problem.}}\\n```text\\n${{single line mathematical expression that solves the problem}}\\n```\\n...numexpr.evaluate(text)...\\n```output\\n${{Output of running the code}}\\n```\\nAnswer: ${{Answer}}\\n\\nBegin.\\n\\nQuestion: What is 37593 * 67?\\n```text\\n37593 * 67\\n```\\n...numexpr.evaluate(\\\"37593 * 67\\\")...\\n```output\\n2518731\\n```\\nAnswer: 2518731\\n\\nQuestion: 37593^(1/5)\\n```text\\n37593**(1/5)\\n```\\n...numexpr.evaluate(\\\"37593**(1/5)\\\")...\\n```output\\n8.222831614237718\\n```\\nAnswer: 8.222831614237718\\n\\nQuestion: {question}\\n\",\"template_format\":\"f-string\",\"validate_template\":true,\"_type\":\"prompt\"},\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"input_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"question\",\"password\":false,\"name\":\"input_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"output_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"answer\",\"password\":false,\"name\":\"output_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"LLMMathChain\"},\"description\":\"Chain that interprets a prompt and executes python code to do math.\",\"base_classes\":[\"Chain\",\"LLMMathChain\",\"function\"],\"display_name\":\"LLMMathChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/chains/additional/llm_math\",\"beta\":false,\"error\":null},\"RetrievalQA\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"combine_documents_chain\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"combine_documents_chain\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseCombineDocumentsChain\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"retriever\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseRetriever\",\"list\":false},\"input_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"query\",\"password\":false,\"name\":\"input_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"output_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"result\",\"password\":false,\"name\":\"output_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"return_source_documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"return_source_documents\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"RetrievalQA\"},\"description\":\"Chain for question-answering against an index.\",\"base_classes\":[\"Chain\",\"BaseRetrievalQA\",\"RetrievalQA\",\"function\"],\"display_name\":\"RetrievalQA\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/chains/popular/vector_db_qa\",\"beta\":false,\"error\":null},\"RetrievalQAWithSourcesChain\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"combine_documents_chain\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"combine_documents_chain\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseCombineDocumentsChain\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"retriever\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseRetriever\",\"list\":false},\"answer_key\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"answer\",\"password\":false,\"name\":\"answer_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"input_docs_key\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"docs\",\"password\":false,\"name\":\"input_docs_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"max_tokens_limit\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":3375,\"password\":false,\"name\":\"max_tokens_limit\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"question_key\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"question\",\"password\":false,\"name\":\"question_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"reduce_k_below_max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"reduce_k_below_max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"return_source_documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"return_source_documents\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"sources_answer_key\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"sources\",\"password\":false,\"name\":\"sources_answer_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"RetrievalQAWithSourcesChain\"},\"description\":\"Question-answering with sources over an index.\",\"base_classes\":[\"Chain\",\"RetrievalQAWithSourcesChain\",\"BaseQAWithSourcesChain\",\"function\"],\"display_name\":\"RetrievalQAWithSourcesChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"SQLDatabaseChain\":{\"template\":{\"db\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"db\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"SQLDatabase\",\"list\":false},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"_type\":\"SQLDatabaseChain\"},\"description\":\"\",\"base_classes\":[\"Chain\",\"SQLDatabaseChain\",\"function\"],\"display_name\":\"SQLDatabaseChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"CombineDocsChain\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"chain_type\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"stuff\",\"password\":false,\"options\":[\"stuff\",\"map_reduce\",\"map_rerank\",\"refine\"],\"name\":\"chain_type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"_type\":\"load_qa_chain\"},\"description\":\"Load question answering chain.\",\"base_classes\":[\"BaseCombineDocumentsChain\",\"function\"],\"display_name\":\"CombineDocsChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"SeriesCharacterChain\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"character\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"character\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"series\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"series\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"SeriesCharacterChain\"},\"description\":\"SeriesCharacterChain is a chain you can use to have a conversation with a character from a series.\",\"base_classes\":[\"LLMChain\",\"BaseCustomChain\",\"Chain\",\"ConversationChain\",\"SeriesCharacterChain\",\"function\"],\"display_name\":\"SeriesCharacterChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"MidJourneyPromptChain\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMemory\",\"list\":false},\"_type\":\"MidJourneyPromptChain\"},\"description\":\"MidJourneyPromptChain is a chain you can use to generate new MidJourney prompts.\",\"base_classes\":[\"LLMChain\",\"BaseCustomChain\",\"Chain\",\"ConversationChain\",\"MidJourneyPromptChain\"],\"display_name\":\"MidJourneyPromptChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"TimeTravelGuideChain\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMemory\",\"list\":false},\"_type\":\"TimeTravelGuideChain\"},\"description\":\"Time travel guide chain.\",\"base_classes\":[\"LLMChain\",\"BaseCustomChain\",\"TimeTravelGuideChain\",\"Chain\",\"ConversationChain\"],\"display_name\":\"TimeTravelGuideChain\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"PromptRunner\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\n\\nfrom langchain.llms.base import BaseLLM\\nfrom langchain import PromptTemplate\\nfrom langchain.schema import Document\\n\\n\\nclass PromptRunner(CustomComponent):\\n display_name: str = \\\"Prompt Runner\\\"\\n description: str = \\\"Run a Chain with the given PromptTemplate\\\"\\n beta = True\\n field_config = {\\n \\\"llm\\\": {\\\"display_name\\\": \\\"LLM\\\"},\\n \\\"prompt\\\": {\\n \\\"display_name\\\": \\\"Prompt Template\\\",\\n \\\"info\\\": \\\"Make sure the prompt has all variables filled.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"inputs\\\": {\\\"field_type\\\": \\\"code\\\"},\\n }\\n\\n def build(\\n self,\\n llm: BaseLLM,\\n prompt: PromptTemplate,\\n ) -> Document:\\n chain = prompt | llm\\n # The input is an empty dict because the prompt is already filled\\n result = chain.invoke({})\\n if hasattr(result, \\\"content\\\"):\\n result = result.content\\n self.repr_value = result\\n return Document(page_content=str(result))\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLLM\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"display_name\":\"Prompt Template\",\"advanced\":false,\"dynamic\":false,\"info\":\"Make sure the prompt has all variables filled.\",\"type\":\"PromptTemplate\",\"list\":false}},\"description\":\"Run a Chain with the given PromptTemplate\",\"base_classes\":[\"Document\"],\"display_name\":\"Prompt Runner\",\"custom_fields\":{\"llm\":null,\"prompt\":null},\"output_types\":[\"PromptRunner\"],\"documentation\":\"\",\"beta\":true,\"error\":null}},\"agents\":{\"ZeroShotAgent\":{\"template\":{\"callback_manager\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callback_manager\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseCallbackManager\",\"list\":false},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"AgentOutputParser\",\"list\":false},\"tools\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"tools\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseTool\",\"list\":true},\"format_instructions\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"Use the following format:\\n\\nQuestion: the input question you must answer\\nThought: you should always think about what to do\\nAction: the action to take, should be one of [{tool_names}]\\nAction Input: the input to the action\\nObservation: the result of the action\\n... (this Thought/Action/Action Input/Observation can repeat N times)\\nThought: I now know the final answer\\nFinal Answer: the final answer to the original input question\",\"password\":false,\"name\":\"format_instructions\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"input_variables\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"Answer the following questions as best you can. You have access to the following tools:\",\"password\":false,\"name\":\"prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"suffix\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"Begin!\\n\\nQuestion: {input}\\nThought:{agent_scratchpad}\",\"password\":false,\"name\":\"suffix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"ZeroShotAgent\"},\"description\":\"Construct an agent from an LLM and tools.\",\"base_classes\":[\"ZeroShotAgent\",\"BaseSingleActionAgent\",\"Agent\",\"function\"],\"display_name\":\"ZeroShotAgent\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/agents/how_to/custom_mrkl_agent\",\"beta\":false,\"error\":null},\"JsonAgent\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"toolkit\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"toolkit\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseToolkit\",\"list\":false},\"_type\":\"json_agent\"},\"description\":\"Construct a json agent from an LLM and tools.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"JsonAgent\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/agents/toolkits/openapi\",\"beta\":false,\"error\":null},\"CSVAgent\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".csv\"],\"password\":false,\"name\":\"path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"csv\"],\"file_path\":null},\"_type\":\"csv_agent\"},\"description\":\"Construct a CSV agent from a CSV and tools.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"CSVAgent\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/agents/toolkits/csv\",\"beta\":false,\"error\":null},\"AgentInitializer\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMemory\",\"list\":false},\"tools\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"tools\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Tool\",\"list\":true},\"agent\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"zero-shot-react-description\",\"password\":false,\"options\":[\"zero-shot-react-description\",\"react-docstore\",\"self-ask-with-search\",\"conversational-react-description\",\"openai-functions\",\"openai-multi-functions\"],\"name\":\"agent\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"_type\":\"initialize_agent\"},\"description\":\"Construct a zero shot agent from an LLM and tools.\",\"base_classes\":[\"AgentExecutor\",\"function\"],\"display_name\":\"AgentInitializer\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/agents/agent_types/\",\"beta\":false,\"error\":null},\"VectorStoreAgent\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"vectorstoreinfo\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"vectorstoreinfo\",\"display_name\":\"Vector Store Info\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"VectorStoreInfo\",\"list\":false},\"_type\":\"vectorstore_agent\"},\"description\":\"Construct an agent from a Vector Store.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"VectorStoreAgent\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"VectorStoreRouterAgent\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"vectorstoreroutertoolkit\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"vectorstoreroutertoolkit\",\"display_name\":\"Vector Store Router Toolkit\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"VectorStoreRouterToolkit\",\"list\":false},\"_type\":\"vectorstorerouter_agent\"},\"description\":\"Construct an agent from a Vector Store Router.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"VectorStoreRouterAgent\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"SQLAgent\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"display_name\":\"LLM\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"database_uri\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"database_uri\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"sql_agent\"},\"description\":\"Construct an SQL agent from an LLM and tools.\",\"base_classes\":[\"AgentExecutor\"],\"display_name\":\"SQLAgent\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"OpenAIConversationalAgent\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom typing import Optional\\nfrom langchain.prompts import SystemMessagePromptTemplate\\nfrom langchain.tools import Tool\\nfrom langchain.schema.memory import BaseMemory\\nfrom langchain.chat_models import ChatOpenAI\\n\\nfrom langchain.agents.agent import AgentExecutor\\nfrom langchain.agents.openai_functions_agent.base import OpenAIFunctionsAgent\\nfrom langchain.memory.token_buffer import ConversationTokenBufferMemory\\nfrom langchain.prompts.chat import MessagesPlaceholder\\nfrom langchain.agents.agent_toolkits.conversational_retrieval.openai_functions import (\\n _get_default_system_message,\\n)\\n\\n\\nclass ConversationalAgent(CustomComponent):\\n display_name: str = \\\"OpenAI Conversational Agent\\\"\\n description: str = \\\"Conversational Agent that can use OpenAI's function calling API\\\"\\n\\n def build_config(self):\\n openai_function_models = [\\n \\\"gpt-3.5-turbo-0613\\\",\\n \\\"gpt-3.5-turbo-16k-0613\\\",\\n \\\"gpt-4-0613\\\",\\n \\\"gpt-4-32k-0613\\\",\\n ]\\n return {\\n \\\"tools\\\": {\\\"is_list\\\": True, \\\"display_name\\\": \\\"Tools\\\"},\\n \\\"memory\\\": {\\\"display_name\\\": \\\"Memory\\\"},\\n \\\"system_message\\\": {\\\"display_name\\\": \\\"System Message\\\"},\\n \\\"max_token_limit\\\": {\\\"display_name\\\": \\\"Max Token Limit\\\"},\\n \\\"model_name\\\": {\\n \\\"display_name\\\": \\\"Model Name\\\",\\n \\\"options\\\": openai_function_models,\\n \\\"value\\\": openai_function_models[0],\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n }\\n\\n def build(\\n self,\\n model_name: str,\\n openai_api_key: str,\\n openai_api_base: str,\\n tools: Tool,\\n memory: Optional[BaseMemory] = None,\\n system_message: Optional[SystemMessagePromptTemplate] = None,\\n max_token_limit: int = 2000,\\n ) -> AgentExecutor:\\n llm = ChatOpenAI(\\n model=model_name,\\n openai_api_key=openai_api_key,\\n openai_api_base=openai_api_base,\\n )\\n if not memory:\\n memory_key = \\\"chat_history\\\"\\n memory = ConversationTokenBufferMemory(\\n memory_key=memory_key,\\n return_messages=True,\\n output_key=\\\"output\\\",\\n llm=llm,\\n max_token_limit=max_token_limit,\\n )\\n else:\\n memory_key = memory.memory_key # type: ignore\\n\\n _system_message = system_message or _get_default_system_message()\\n prompt = OpenAIFunctionsAgent.create_prompt(\\n system_message=_system_message, # type: ignore\\n extra_prompt_messages=[MessagesPlaceholder(variable_name=memory_key)],\\n )\\n agent = OpenAIFunctionsAgent(\\n llm=llm, tools=tools, prompt=prompt # type: ignore\\n )\\n return AgentExecutor(\\n agent=agent,\\n tools=tools, # type: ignore\\n memory=memory,\\n verbose=True,\\n return_intermediate_steps=True,\\n )\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"max_token_limit\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":2000,\"password\":false,\"name\":\"max_token_limit\",\"display_name\":\"Max Token Limit\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"memory\",\"display_name\":\"Memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMemory\",\"list\":false},\"model_name\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"gpt-3.5-turbo-0613\",\"password\":false,\"options\":[\"gpt-3.5-turbo-0613\",\"gpt-3.5-turbo-16k-0613\",\"gpt-4-0613\",\"gpt-4-32k-0613\"],\"name\":\"model_name\",\"display_name\":\"Model Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"openai_api_base\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"openai_api_base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_key\",\"display_name\":\"openai_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"system_message\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"system_message\",\"display_name\":\"System Message\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"SystemMessagePromptTemplate\",\"list\":false},\"tools\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"tools\",\"display_name\":\"Tools\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Tool\",\"list\":true}},\"description\":\"Conversational Agent that can use OpenAI's function calling API\",\"base_classes\":[\"AgentExecutor\",\"Chain\"],\"display_name\":\"OpenAI Conversational Agent\",\"custom_fields\":{\"max_token_limit\":null,\"memory\":null,\"model_name\":null,\"openai_api_base\":null,\"openai_api_key\":null,\"system_message\":null,\"tools\":null},\"output_types\":[\"OpenAIConversationalAgent\"],\"documentation\":\"\",\"beta\":true,\"error\":null}},\"prompts\":{\"ChatMessagePromptTemplate\":{\"template\":{\"additional_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"additional_kwargs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\\nYou are a helpful assistant that talks casually about life in general.\\nYou are a good listener and you can talk about anything.\\n\",\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"prompt\",\"list\":false},\"role\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"role\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"ChatMessagePromptTemplate\"},\"description\":\"Chat message prompt template.\",\"base_classes\":[\"ChatMessagePromptTemplate\",\"BaseStringMessagePromptTemplate\",\"BaseMessagePromptTemplate\"],\"display_name\":\"ChatMessagePromptTemplate\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/msg_prompt_templates\",\"beta\":false,\"error\":null},\"ChatPromptTemplate\":{\"template\":{\"messages\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseMessagePromptTemplate\",\"list\":true},\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false},\"input_variables\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"partial_variables\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"ChatPromptTemplate\"},\"description\":\"A prompt template for chat models.\",\"base_classes\":[\"BasePromptTemplate\",\"ChatPromptTemplate\",\"BaseChatPromptTemplate\"],\"display_name\":\"ChatPromptTemplate\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/how_to/prompts\",\"beta\":false,\"error\":null},\"HumanMessagePromptTemplate\":{\"template\":{\"additional_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"additional_kwargs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\\nYou are a helpful assistant that talks casually about life in general.\\nYou are a good listener and you can talk about anything.\\n\",\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"prompt\",\"list\":false},\"_type\":\"HumanMessagePromptTemplate\"},\"description\":\"Human message prompt template. This is a message that is sent to the user.\",\"base_classes\":[\"HumanMessagePromptTemplate\",\"BaseStringMessagePromptTemplate\",\"BaseMessagePromptTemplate\"],\"display_name\":\"HumanMessagePromptTemplate\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/how_to/prompts\",\"beta\":false,\"error\":null},\"PromptTemplate\":{\"template\":{\"output_parser\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"output_parser\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseOutputParser\",\"list\":false},\"input_variables\":{\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"input_variables\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"partial_variables\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"partial_variables\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"template\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"template\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"prompt\",\"list\":false},\"template_format\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"f-string\",\"password\":false,\"name\":\"template_format\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"validate_template\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"validate_template\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"PromptTemplate\"},\"description\":\"A prompt template for a language model.\",\"base_classes\":[\"BasePromptTemplate\",\"StringPromptTemplate\",\"PromptTemplate\"],\"display_name\":\"PromptTemplate\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/\",\"beta\":false,\"error\":null},\"SystemMessagePromptTemplate\":{\"template\":{\"additional_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"additional_kwargs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"prompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\\nYou are a helpful assistant that talks casually about life in general.\\nYou are a good listener and you can talk about anything.\\n\",\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"prompt\",\"list\":false},\"_type\":\"SystemMessagePromptTemplate\"},\"description\":\"System message prompt template.\",\"base_classes\":[\"SystemMessagePromptTemplate\",\"BaseStringMessagePromptTemplate\",\"BaseMessagePromptTemplate\"],\"display_name\":\"SystemMessagePromptTemplate\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/how_to/prompts\",\"beta\":false,\"error\":null}},\"llms\":{\"Anthropic\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"count_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"count_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Callable[[str], int]\",\"list\":false},\"AI_PROMPT\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"AI_PROMPT\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"HUMAN_PROMPT\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"HUMAN_PROMPT\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"anthropic_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"anthropic_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"anthropic_api_url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"anthropic_api_url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"max_tokens_to_sample\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":256,\"password\":false,\"name\":\"max_tokens_to_sample\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"model\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"claude-2\",\"password\":false,\"name\":\"model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"top_k\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"top_k\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"top_p\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"top_p\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"Anthropic\"},\"description\":\"Anthropic large language models.\",\"base_classes\":[\"_AnthropicCommon\",\"Anthropic\",\"BaseLanguageModel\",\"BaseLLM\",\"LLM\"],\"display_name\":\"Anthropic\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"Cohere\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cohere_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"cohere_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"frequency_penalty\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":0.0,\"password\":false,\"name\":\"frequency_penalty\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"k\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":0,\"password\":false,\"name\":\"k\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":256,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"model\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"p\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"p\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"presence_penalty\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":0.0,\"password\":false,\"name\":\"presence_penalty\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"stop\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"stop\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.75,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"truncate\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"truncate\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"Cohere\"},\"description\":\"Cohere large language models.\",\"base_classes\":[\"Cohere\",\"BaseLanguageModel\",\"BaseLLM\",\"LLM\"],\"display_name\":\"Cohere\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/cohere\",\"beta\":false,\"error\":null},\"CTransformers\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"config\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{\\n \\\"top_k\\\": 40,\\n \\\"top_p\\\": 0.95,\\n \\\"temperature\\\": 0.8,\\n \\\"repetition_penalty\\\": 1.1,\\n \\\"last_n_tokens\\\": 64,\\n \\\"seed\\\": -1,\\n \\\"max_new_tokens\\\": 256,\\n \\\"stop\\\": null,\\n \\\"stream\\\": false,\\n \\\"reset\\\": true,\\n \\\"batch_size\\\": 8,\\n \\\"threads\\\": -1,\\n \\\"context_length\\\": -1,\\n \\\"gpu_layers\\\": 0\\n}\",\"password\":false,\"name\":\"config\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"lib\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"lib\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"model\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_file\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_file\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_type\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"CTransformers\"},\"description\":\"C Transformers LLM models.\",\"base_classes\":[\"BaseLanguageModel\",\"BaseLLM\",\"CTransformers\",\"LLM\"],\"display_name\":\"CTransformers\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/ctransformers\",\"beta\":false,\"error\":null},\"HuggingFaceHub\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"huggingfacehub_api_token\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"huggingfacehub_api_token\",\"display_name\":\"HuggingFace Hub API Token\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"repo_id\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"gpt2\",\"password\":false,\"name\":\"repo_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"task\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-generation\",\"password\":false,\"options\":[\"text-generation\",\"text2text-generation\",\"summarization\"],\"name\":\"task\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"HuggingFaceHub\"},\"description\":\"HuggingFaceHub models.\",\"base_classes\":[\"HuggingFaceHub\",\"BaseLanguageModel\",\"BaseLLM\",\"LLM\"],\"display_name\":\"HuggingFaceHub\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/huggingface_hub\",\"beta\":false,\"error\":null},\"LlamaCpp\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"echo\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"echo\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"f16_kv\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"f16_kv\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"last_n_tokens_size\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":64,\"password\":false,\"name\":\"last_n_tokens_size\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"logits_all\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"logits_all\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"logprobs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"logprobs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"lora_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"lora_base\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"lora_path\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"lora_path\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":256,\"password\":true,\"name\":\"max_tokens\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"model_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"n_batch\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":8,\"password\":false,\"name\":\"n_batch\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"n_ctx\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":512,\"password\":false,\"name\":\"n_ctx\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"n_gpu_layers\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"n_gpu_layers\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"n_parts\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":-1,\"password\":false,\"name\":\"n_parts\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"n_threads\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"n_threads\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"repeat_penalty\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1.1,\"password\":false,\"name\":\"repeat_penalty\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"rope_freq_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":10000.0,\"password\":false,\"name\":\"rope_freq_base\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"rope_freq_scale\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1.0,\"password\":false,\"name\":\"rope_freq_scale\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"seed\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":-1,\"password\":false,\"name\":\"seed\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"stop\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"stop\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"streaming\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"suffix\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"suffix\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.8,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"top_k\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":40,\"password\":false,\"name\":\"top_k\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"top_p\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.95,\"password\":false,\"name\":\"top_p\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"use_mlock\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"use_mlock\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"use_mmap\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"use_mmap\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"vocab_only\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"vocab_only\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"LlamaCpp\"},\"description\":\"llama.cpp model.\",\"base_classes\":[\"BaseLanguageModel\",\"BaseLLM\",\"LlamaCpp\",\"LLM\"],\"display_name\":\"LlamaCpp\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/llamacpp\",\"beta\":false,\"error\":null},\"OpenAI\":{\"template\":{\"allowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":[],\"password\":false,\"name\":\"allowed_special\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Literal'all'\",\"list\":true},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"disallowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"all\",\"password\":false,\"name\":\"disallowed_special\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Literal'all'\",\"list\":false},\"batch_size\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":20,\"password\":false,\"name\":\"batch_size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"best_of\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"best_of\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"frequency_penalty\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":0,\"password\":false,\"name\":\"frequency_penalty\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"logit_bias\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"logit_bias\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":6,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":256,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-davinci-003\",\"password\":false,\"options\":[\"text-davinci-003\",\"text-davinci-002\",\"text-curie-001\",\"text-babbage-001\",\"text-ada-001\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"presence_penalty\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":0,\"password\":false,\"name\":\"presence_penalty\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.7,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"top_p\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"top_p\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"OpenAI\"},\"description\":\"OpenAI large language models.\",\"base_classes\":[\"OpenAI\",\"BaseLanguageModel\",\"BaseLLM\",\"BaseOpenAI\"],\"display_name\":\"OpenAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai\",\"beta\":false,\"error\":null},\"VertexAI\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"ForwardRef(\\\"'_LanguageModel'\\\")\",\"list\":false},\"credentials\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".json\"],\"password\":false,\"name\":\"credentials\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"json\"],\"file_path\":null},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"location\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"us-central1\",\"password\":false,\"name\":\"location\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"max_output_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":128,\"password\":false,\"name\":\"max_output_tokens\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":6,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-bison\",\"password\":false,\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"project\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"project\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_parallelism\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":5,\"password\":false,\"name\":\"request_parallelism\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"stop\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"stop\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.0,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"top_k\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":40,\"password\":false,\"name\":\"top_k\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"top_p\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.95,\"password\":false,\"name\":\"top_p\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tuned_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"tuned_model_name\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"VertexAI\"},\"description\":\"Google Vertex AI large language models.\",\"base_classes\":[\"BaseLanguageModel\",\"BaseLLM\",\"VertexAI\",\"_VertexAICommon\",\"LLM\"],\"display_name\":\"VertexAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/llms/integrations/google_vertex_ai_palm\",\"beta\":false,\"error\":null},\"ChatAnthropic\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"count_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"count_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Callable[[str], int]\",\"list\":false},\"AI_PROMPT\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"AI_PROMPT\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"HUMAN_PROMPT\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"HUMAN_PROMPT\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"anthropic_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"anthropic_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"anthropic_api_url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"anthropic_api_url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"default_request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"default_request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"max_tokens_to_sample\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":256,\"password\":false,\"name\":\"max_tokens_to_sample\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"model\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"claude-2\",\"password\":false,\"name\":\"model\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"top_k\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"top_k\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"top_p\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"top_p\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatAnthropic\"},\"description\":\"`Anthropic` chat large language models.\",\"base_classes\":[\"_AnthropicCommon\",\"BaseChatModel\",\"BaseLanguageModel\",\"ChatAnthropic\",\"BaseLLM\"],\"display_name\":\"ChatAnthropic\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/anthropic\",\"beta\":false,\"error\":null},\"ChatOpenAI\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":6,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"max_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"gpt-3.5-turbo-0613\",\"password\":false,\"options\":[\"gpt-3.5-turbo-0613\",\"gpt-3.5-turbo\",\"gpt-3.5-turbo-16k-0613\",\"gpt-3.5-turbo-16k\",\"gpt-4-0613\",\"gpt-4-32k-0613\",\"gpt-4\",\"gpt-4-32k\"],\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"n\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":1,\"password\":false,\"name\":\"n\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":false,\"dynamic\":false,\"info\":\"\\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\\n\\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\\n\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"streaming\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"streaming\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.7,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatOpenAI\"},\"description\":\"`OpenAI` Chat large language models API.\",\"base_classes\":[\"ChatOpenAI\",\"BaseChatModel\",\"BaseLanguageModel\",\"BaseLLM\"],\"display_name\":\"ChatOpenAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai\",\"beta\":false,\"error\":null},\"ChatVertexAI\":{\"template\":{\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":true},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"ForwardRef(\\\"'_LanguageModel'\\\")\",\"list\":false},\"credentials\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".json\"],\"password\":false,\"name\":\"credentials\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"json\"],\"file_path\":null},\"cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"location\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"us-central1\",\"password\":false,\"name\":\"location\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"max_output_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":128,\"password\":false,\"name\":\"max_output_tokens\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":6,\"password\":false,\"name\":\"max_retries\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"chat-bison\",\"password\":false,\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"project\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"project\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_parallelism\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":5,\"password\":false,\"name\":\"request_parallelism\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"stop\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"stop\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.0,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"top_k\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":40,\"password\":false,\"name\":\"top_k\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"top_p\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.95,\"password\":false,\"name\":\"top_p\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"verbose\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"verbose\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ChatVertexAI\"},\"description\":\"`Vertex AI` Chat large language models API.\",\"base_classes\":[\"BaseChatModel\",\"BaseLanguageModel\",\"_VertexAICommon\",\"ChatVertexAI\",\"BaseLLM\"],\"display_name\":\"ChatVertexAI\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/google_vertex_ai_palm\",\"beta\":false,\"error\":null}},\"memories\":{\"ConversationBufferMemory\":{\"template\":{\"chat_memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMessageHistory\",\"list\":false},\"ai_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"AI\",\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"human_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"Human\",\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"input_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"type\":\"str\",\"list\":false},\"memory_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"output_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\"type\":\"str\",\"list\":false},\"return_messages\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ConversationBufferMemory\"},\"description\":\"Buffer for storing conversation memory.\",\"base_classes\":[\"BaseChatMemory\",\"ConversationBufferMemory\",\"BaseMemory\"],\"display_name\":\"ConversationBufferMemory\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/buffer\",\"beta\":false,\"error\":null},\"ConversationBufferWindowMemory\":{\"template\":{\"chat_memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMessageHistory\",\"list\":false},\"ai_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"AI\",\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"human_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"Human\",\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"input_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"type\":\"str\",\"list\":false},\"k\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"k\",\"display_name\":\"Memory Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"memory_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"output_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\"type\":\"str\",\"list\":false},\"return_messages\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ConversationBufferWindowMemory\"},\"description\":\"Buffer for storing conversation memory inside a limited size window.\",\"base_classes\":[\"BaseChatMemory\",\"ConversationBufferWindowMemory\",\"BaseMemory\"],\"display_name\":\"ConversationBufferWindowMemory\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/buffer_window\",\"beta\":false,\"error\":null},\"ConversationEntityMemory\":{\"template\":{\"chat_memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMessageHistory\",\"list\":false},\"entity_extraction_prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"entity_extraction_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"entity_store\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"entity_store\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseEntityStore\",\"list\":false},\"entity_summarization_prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"entity_summarization_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"ai_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"AI\",\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"chat_history_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"history\",\"password\":false,\"name\":\"chat_history_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"entity_cache\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"entity_cache\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"human_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"Human\",\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"input_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"type\":\"str\",\"list\":false},\"k\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"k\",\"display_name\":\"Memory Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"output_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\"type\":\"str\",\"list\":false},\"return_messages\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ConversationEntityMemory\"},\"description\":\"Entity extractor & summarizer memory.\",\"base_classes\":[\"BaseChatMemory\",\"BaseMemory\",\"ConversationEntityMemory\"],\"display_name\":\"ConversationEntityMemory\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/memory/integrations/entity_memory_with_sqlite\",\"beta\":false,\"error\":null},\"ConversationKGMemory\":{\"template\":{\"chat_memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMessageHistory\",\"list\":false},\"entity_extraction_prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"entity_extraction_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"kg\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"kg\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"NetworkxEntityGraph\",\"list\":false},\"knowledge_extraction_prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"knowledge_extraction_prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"summary_message_cls\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"summary_message_cls\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[langchain.schema.messages.BaseMessage]\",\"list\":false},\"ai_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"AI\",\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"human_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"Human\",\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"input_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"type\":\"str\",\"list\":false},\"k\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"k\",\"display_name\":\"Memory Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"memory_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"output_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\"type\":\"str\",\"list\":false},\"return_messages\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ConversationKGMemory\"},\"description\":\"Knowledge graph conversation memory.\",\"base_classes\":[\"BaseChatMemory\",\"BaseMemory\",\"ConversationKGMemory\"],\"display_name\":\"ConversationKGMemory\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/kg\",\"beta\":false,\"error\":null},\"ConversationSummaryMemory\":{\"template\":{\"chat_memory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMessageHistory\",\"list\":false},\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BasePromptTemplate\",\"list\":false},\"summary_message_cls\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"summary_message_cls\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[langchain.schema.messages.BaseMessage]\",\"list\":false},\"ai_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"AI\",\"password\":false,\"name\":\"ai_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"buffer\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"buffer\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"human_prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"Human\",\"password\":false,\"name\":\"human_prefix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"input_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"type\":\"str\",\"list\":false},\"memory_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"output_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\"type\":\"str\",\"list\":false},\"return_messages\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"ConversationSummaryMemory\"},\"description\":\"Conversation summarizer to chat memory.\",\"base_classes\":[\"ConversationSummaryMemory\",\"BaseChatMemory\",\"BaseMemory\",\"SummarizerMixin\"],\"display_name\":\"ConversationSummaryMemory\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/summary\",\"beta\":false,\"error\":null},\"MongoDBChatMessageHistory\":{\"template\":{\"collection_name\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"message_store\",\"password\":false,\"name\":\"collection_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"connection_string\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"connection_string\",\"advanced\":false,\"dynamic\":false,\"info\":\"MongoDB connection string (e.g mongodb://mongo_user:password123@mongo:27017)\",\"type\":\"str\",\"list\":false},\"database_name\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"password\":false,\"name\":\"database_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"session_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"session_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"MongoDBChatMessageHistory\"},\"description\":\"Memory store with MongoDB\",\"base_classes\":[\"MongoDBChatMessageHistory\",\"BaseChatMessageHistory\"],\"display_name\":\"MongoDBChatMessageHistory\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/memory/integrations/mongodb_chat_message_history\",\"beta\":false,\"error\":null},\"MotorheadMemory\":{\"template\":{\"chat_memory\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"chat_memory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseChatMessageHistory\",\"list\":false},\"api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"client_id\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"client_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"context\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"context\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"input_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"type\":\"str\",\"list\":false},\"memory_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"output_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"output_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\"type\":\"str\",\"list\":false},\"return_messages\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"session_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"session_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"https://api.getmetal.io/v1/motorhead\",\"password\":false,\"name\":\"url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"MotorheadMemory\"},\"description\":\"Chat message memory backed by Motorhead service.\",\"base_classes\":[\"BaseChatMemory\",\"MotorheadMemory\",\"BaseMemory\"],\"display_name\":\"MotorheadMemory\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/integrations/memory/motorhead_memory\",\"beta\":false,\"error\":null},\"PostgresChatMessageHistory\":{\"template\":{\"connection_string\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"postgresql://postgres:mypassword@localhost/chat_history\",\"password\":false,\"name\":\"connection_string\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"session_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"session_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"table_name\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"message_store\",\"password\":false,\"name\":\"table_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"PostgresChatMessageHistory\"},\"description\":\"Memory store with Postgres\",\"base_classes\":[\"PostgresChatMessageHistory\",\"BaseChatMessageHistory\"],\"display_name\":\"PostgresChatMessageHistory\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/memory/integrations/postgres_chat_message_history\",\"beta\":false,\"error\":null},\"VectorStoreRetrieverMemory\":{\"template\":{\"retriever\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"VectorStoreRetriever\",\"list\":false},\"exclude_input_keys\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"exclude_input_keys\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"input_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"input_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"The variable to be used as Chat Input when more than one variable is available.\",\"type\":\"str\",\"list\":false},\"memory_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"chat_history\",\"password\":false,\"name\":\"memory_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"return_docs\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"return_docs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"return_messages\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"return_messages\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"VectorStoreRetrieverMemory\"},\"description\":\"VectorStoreRetriever-backed memory.\",\"base_classes\":[\"BaseMemory\",\"VectorStoreRetrieverMemory\"],\"display_name\":\"VectorStoreRetrieverMemory\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/memory/how_to/vectorstore_retriever_memory\",\"beta\":false,\"error\":null}},\"tools\":{\"Calculator\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"_type\":\"Calculator\"},\"description\":\"Useful for when you need to answer questions about math.\",\"base_classes\":[\"Tool\",\"BaseTool\"],\"display_name\":\"Calculator\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"News API\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"news_api_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"news_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"News API\"},\"description\":\"Use this when you want to get information about the top headlines of current news stories. The input should be a question in natural language that this API can answer.\",\"base_classes\":[\"Tool\",\"BaseTool\"],\"display_name\":\"News API\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"TMDB API\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"tmdb_bearer_token\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"tmdb_bearer_token\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"TMDB API\"},\"description\":\"Useful for when you want to get information from The Movie Database. The input should be a question in natural language that this API can answer.\",\"base_classes\":[\"Tool\",\"BaseTool\"],\"display_name\":\"TMDB API\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"Podcast API\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"listen_api_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"listen_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"Podcast API\"},\"description\":\"Use the Listen Notes Podcast API to search all podcasts or episodes. The input should be a question in natural language that this API can answer.\",\"base_classes\":[\"Tool\",\"BaseTool\"],\"display_name\":\"Podcast API\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"Search\":{\"template\":{\"aiosession\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"aiosession\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"serpapi_api_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"serpapi_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"Search\"},\"description\":\"A search engine. Useful for when you need to answer questions about current events. Input should be a search query.\",\"base_classes\":[\"Tool\",\"BaseTool\"],\"display_name\":\"Search\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"Tool\":{\"template\":{\"func\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"func\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"function\",\"list\":false},\"description\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"description\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"name\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"return_direct\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"return_direct\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"Tool\"},\"description\":\"Converts a chain, agent or function into a tool.\",\"base_classes\":[\"Tool\",\"BaseTool\"],\"display_name\":\"Tool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"PythonFunctionTool\":{\"template\":{\"code\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\\ndef python_function(text: str) -> str:\\n \\\"\\\"\\\"This is a default python function that returns the input text\\\"\\\"\\\"\\n return text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"description\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\",\"password\":false,\"name\":\"description\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"name\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"return_direct\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"return_direct\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"PythonFunctionTool\"},\"description\":\"Python function to be executed.\",\"base_classes\":[\"BaseTool\",\"Tool\"],\"display_name\":\"PythonFunctionTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"PythonFunction\":{\"template\":{\"code\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"\\ndef python_function(text: str) -> str:\\n \\\"\\\"\\\"This is a default python function that returns the input text\\\"\\\"\\\"\\n return text\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"PythonFunction\"},\"description\":\"Python function to be executed.\",\"base_classes\":[\"function\"],\"display_name\":\"PythonFunction\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"JsonSpec\":{\"template\":{\"path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".json\",\".yaml\",\".yml\"],\"password\":false,\"name\":\"path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"json\",\"yaml\",\"yml\"],\"file_path\":null},\"max_value_length\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"max_value_length\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"_type\":\"JsonSpec\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"JsonSpec\"],\"display_name\":\"JsonSpec\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"BingSearchRun\":{\"template\":{\"api_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BingSearchAPIWrapper\",\"list\":false},\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"BingSearchRun\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BingSearchRun\",\"BaseTool\"],\"display_name\":\"BingSearchRun\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"GoogleSearchResults\":{\"template\":{\"api_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"GoogleSearchAPIWrapper\",\"list\":false},\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"num_results\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":4,\"password\":false,\"name\":\"num_results\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"GoogleSearchResults\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseTool\",\"GoogleSearchResults\"],\"display_name\":\"GoogleSearchResults\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"GoogleSearchRun\":{\"template\":{\"api_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"GoogleSearchAPIWrapper\",\"list\":false},\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"GoogleSearchRun\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseTool\",\"GoogleSearchRun\"],\"display_name\":\"GoogleSearchRun\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"GoogleSerperRun\":{\"template\":{\"api_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"GoogleSerperAPIWrapper\",\"list\":false},\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"GoogleSerperRun\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"GoogleSerperRun\",\"BaseTool\"],\"display_name\":\"GoogleSerperRun\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"InfoSQLDatabaseTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"db\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"db\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"SQLDatabase\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"InfoSQLDatabaseTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseSQLDatabaseTool\",\"BaseTool\",\"InfoSQLDatabaseTool\"],\"display_name\":\"InfoSQLDatabaseTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"JsonGetValueTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"spec\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"spec\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"JsonSpec\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"JsonGetValueTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"JsonGetValueTool\",\"BaseTool\"],\"display_name\":\"JsonGetValueTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"JsonListKeysTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"spec\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"spec\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"JsonSpec\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"JsonListKeysTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseTool\",\"JsonListKeysTool\"],\"display_name\":\"JsonListKeysTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"ListSQLDatabaseTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"db\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"db\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"SQLDatabase\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"ListSQLDatabaseTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseSQLDatabaseTool\",\"BaseTool\",\"ListSQLDatabaseTool\"],\"display_name\":\"ListSQLDatabaseTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"PythonAstREPLTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"globals\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"globals\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"locals\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"locals\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"sanitize_input\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"sanitize_input\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"PythonAstREPLTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseTool\",\"PythonAstREPLTool\"],\"display_name\":\"PythonAstREPLTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"PythonREPLTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"python_repl\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"python_repl\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"PythonREPL\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"sanitize_input\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"sanitize_input\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"PythonREPLTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"PythonREPLTool\",\"BaseTool\"],\"display_name\":\"PythonREPLTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"QuerySQLDataBaseTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"db\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"db\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"SQLDatabase\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"QuerySQLDataBaseTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"QuerySQLDataBaseTool\",\"BaseSQLDatabaseTool\",\"BaseTool\"],\"display_name\":\"QuerySQLDataBaseTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"RequestsDeleteTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"requests_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"TextRequestsWrapper\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"RequestsDeleteTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"RequestsDeleteTool\",\"BaseRequestsTool\",\"BaseTool\"],\"display_name\":\"RequestsDeleteTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"RequestsGetTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"requests_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"TextRequestsWrapper\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"RequestsGetTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseRequestsTool\",\"BaseTool\",\"RequestsGetTool\"],\"display_name\":\"RequestsGetTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"RequestsPatchTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"requests_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"TextRequestsWrapper\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"RequestsPatchTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseRequestsTool\",\"BaseTool\",\"RequestsPatchTool\"],\"display_name\":\"RequestsPatchTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"RequestsPostTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"requests_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"TextRequestsWrapper\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"RequestsPostTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseRequestsTool\",\"BaseTool\",\"RequestsPostTool\"],\"display_name\":\"RequestsPostTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"RequestsPutTool\":{\"template\":{\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"requests_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"TextRequestsWrapper\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"RequestsPutTool\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"RequestsPutTool\",\"BaseRequestsTool\",\"BaseTool\"],\"display_name\":\"RequestsPutTool\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"WikipediaQueryRun\":{\"template\":{\"api_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"WikipediaAPIWrapper\",\"list\":false},\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"WikipediaQueryRun\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"WikipediaQueryRun\",\"BaseTool\"],\"display_name\":\"WikipediaQueryRun\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"WolframAlphaQueryRun\":{\"template\":{\"api_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"api_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"WolframAlphaAPIWrapper\",\"list\":false},\"args_schema\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"args_schema\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Type[pydantic.main.BaseModel]\",\"list\":false},\"callbacks\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"callbacks\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"langchain.callbacks.base.BaseCallbackHandler\",\"list\":false},\"handle_tool_error\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"handle_tool_error\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"tags\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tags\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"WolframAlphaQueryRun\"},\"description\":\"\",\"base_classes\":[\"Tool\",\"BaseTool\",\"BaseTool\",\"WolframAlphaQueryRun\"],\"display_name\":\"WolframAlphaQueryRun\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null}},\"toolkits\":{\"JsonToolkit\":{\"template\":{\"spec\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"spec\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"JsonSpec\",\"list\":false},\"_type\":\"JsonToolkit\"},\"description\":\"Toolkit for interacting with a JSON spec.\",\"base_classes\":[\"BaseToolkit\",\"JsonToolkit\",\"Tool\"],\"display_name\":\"JsonToolkit\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"OpenAPIToolkit\":{\"template\":{\"json_agent\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"json_agent\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"AgentExecutor\",\"list\":false},\"requests_wrapper\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"requests_wrapper\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"TextRequestsWrapper\",\"list\":false},\"_type\":\"OpenAPIToolkit\"},\"description\":\"Toolkit for interacting with an OpenAPI API.\",\"base_classes\":[\"OpenAPIToolkit\",\"BaseToolkit\",\"Tool\"],\"display_name\":\"OpenAPIToolkit\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"VectorStoreInfo\":{\"template\":{\"vectorstore\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"vectorstore\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"VectorStore\",\"list\":false},\"description\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"description\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"name\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"VectorStoreInfo\"},\"description\":\"Information about a VectorStore.\",\"base_classes\":[\"VectorStoreInfo\"],\"display_name\":\"VectorStoreInfo\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"VectorStoreRouterToolkit\":{\"template\":{\"llm\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"vectorstores\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"vectorstores\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"VectorStoreInfo\",\"list\":true},\"_type\":\"VectorStoreRouterToolkit\"},\"description\":\"Toolkit for routing between Vector Stores.\",\"base_classes\":[\"VectorStoreRouterToolkit\",\"BaseToolkit\",\"Tool\"],\"display_name\":\"VectorStoreRouterToolkit\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"VectorStoreToolkit\":{\"template\":{\"llm\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLanguageModel\",\"list\":false},\"vectorstore_info\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"vectorstore_info\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"VectorStoreInfo\",\"list\":false},\"_type\":\"VectorStoreToolkit\"},\"description\":\"Toolkit for interacting with a Vector Store.\",\"base_classes\":[\"VectorStoreToolkit\",\"BaseToolkit\",\"Tool\"],\"display_name\":\"VectorStoreToolkit\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"Metaphor\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from typing import List, Union\\nfrom langflow import CustomComponent\\n\\nfrom metaphor_python import Metaphor # type: ignore\\nfrom langchain.tools import Tool\\nfrom langchain.agents import tool\\nfrom langchain.agents.agent_toolkits.base import BaseToolkit\\n\\n\\nclass MetaphorToolkit(CustomComponent):\\n display_name: str = \\\"Metaphor\\\"\\n description: str = \\\"Metaphor Toolkit\\\"\\n documentation = (\\n \\\"https://python.langchain.com/docs/integrations/tools/metaphor_search\\\"\\n )\\n beta = True\\n # api key should be password = True\\n field_config = {\\n \\\"metaphor_api_key\\\": {\\\"display_name\\\": \\\"Metaphor API Key\\\", \\\"password\\\": True},\\n \\\"code\\\": {\\\"advanced\\\": True},\\n }\\n\\n def build(\\n self,\\n metaphor_api_key: str,\\n use_autoprompt: bool = True,\\n search_num_results: int = 5,\\n similar_num_results: int = 5,\\n ) -> Union[Tool, BaseToolkit]:\\n # If documents, then we need to create a Vectara instance using .from_documents\\n client = Metaphor(api_key=metaphor_api_key)\\n\\n @tool\\n def search(query: str):\\n \\\"\\\"\\\"Call search engine with a query.\\\"\\\"\\\"\\n return client.search(\\n query, use_autoprompt=use_autoprompt, num_results=search_num_results\\n )\\n\\n @tool\\n def get_contents(ids: List[str]):\\n \\\"\\\"\\\"Get contents of a webpage.\\n\\n The ids passed in should be a list of ids as fetched from `search`.\\n \\\"\\\"\\\"\\n return client.get_contents(ids)\\n\\n @tool\\n def find_similar(url: str):\\n \\\"\\\"\\\"Get search results similar to a given URL.\\n\\n The url passed in should be a URL returned from `search`\\n \\\"\\\"\\\"\\n return client.find_similar(url, num_results=similar_num_results)\\n\\n return [search, get_contents, find_similar] # type: ignore\\n\",\"password\":false,\"name\":\"code\",\"advanced\":true,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"metaphor_api_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"metaphor_api_key\",\"display_name\":\"Metaphor API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"search_num_results\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":5,\"password\":false,\"name\":\"search_num_results\",\"display_name\":\"search_num_results\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"similar_num_results\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":5,\"password\":false,\"name\":\"similar_num_results\",\"display_name\":\"similar_num_results\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"use_autoprompt\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":true,\"password\":false,\"name\":\"use_autoprompt\",\"display_name\":\"use_autoprompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false}},\"description\":\"Metaphor Toolkit\",\"base_classes\":[\"BaseTool\",\"Tool\"],\"display_name\":\"Metaphor\",\"custom_fields\":{\"metaphor_api_key\":null,\"search_num_results\":null,\"similar_num_results\":null,\"use_autoprompt\":null},\"output_types\":[\"Metaphor\"],\"documentation\":\"https://python.langchain.com/docs/integrations/tools/metaphor_search\",\"beta\":true,\"error\":null}},\"wrappers\":{\"TextRequestsWrapper\":{\"template\":{\"aiosession\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"aiosession\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"ClientSession\",\"list\":false},\"auth\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"auth\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"headers\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"{'Authorization': 'Bearer '}\",\"password\":false,\"name\":\"headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"TextRequestsWrapper\"},\"description\":\"Lightweight wrapper around requests library.\",\"base_classes\":[\"TextRequestsWrapper\"],\"display_name\":\"TextRequestsWrapper\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"SQLDatabase\":{\"template\":{\"database_uri\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"database_uri\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"engine_args\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"engine_args\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"SQLDatabase\"},\"description\":\"Construct a SQLAlchemy engine from URI.\",\"base_classes\":[\"SQLDatabase\",\"function\"],\"display_name\":\"SQLDatabase\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null}},\"embeddings\":{\"OpenAIEmbeddings\":{\"template\":{\"allowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":[],\"password\":false,\"name\":\"allowed_special\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Literal'all'\",\"list\":true},\"disallowed_special\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"all\",\"password\":false,\"name\":\"disallowed_special\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Literal'all'\",\"list\":true},\"chunk_size\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1000,\"password\":false,\"name\":\"chunk_size\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"deployment\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-embedding-ada-002\",\"password\":false,\"name\":\"deployment\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"embedding_ctx_length\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":8191,\"password\":false,\"name\":\"embedding_ctx_length\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"headers\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"{'Authorization': 'Bearer '}\",\"password\":false,\"name\":\"headers\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":6,\"password\":false,\"name\":\"max_retries\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"model\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"text-embedding-ada-002\",\"password\":false,\"name\":\"model\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"openai_api_base\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_base\",\"display_name\":\"OpenAI API Base\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"openai_api_key\",\"display_name\":\"OpenAI API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_api_type\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_type\",\"display_name\":\"OpenAI API Type\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_api_version\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"openai_api_version\",\"display_name\":\"OpenAI API Version\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_organization\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_organization\",\"display_name\":\"OpenAI Organization\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"openai_proxy\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"openai_proxy\",\"display_name\":\"OpenAI Proxy\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"request_timeout\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"show_progress_bar\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"show_progress_bar\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"tiktoken_model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"tiktoken_model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"OpenAIEmbeddings\"},\"description\":\"OpenAI embedding models.\",\"base_classes\":[\"OpenAIEmbeddings\",\"Embeddings\"],\"display_name\":\"OpenAIEmbeddings\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/openai\",\"beta\":false,\"error\":null},\"CohereEmbeddings\":{\"template\":{\"async_client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"async_client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"cohere_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"cohere_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"model\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"embed-english-v2.0\",\"password\":false,\"name\":\"model\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"truncate\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"truncate\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"CohereEmbeddings\"},\"description\":\"Cohere embedding models.\",\"base_classes\":[\"CohereEmbeddings\",\"Embeddings\"],\"display_name\":\"CohereEmbeddings\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/cohere\",\"beta\":false,\"error\":null},\"HuggingFaceEmbeddings\":{\"template\":{\"cache_folder\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"cache_folder\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"encode_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"encode_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"model_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"model_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"sentence-transformers/all-mpnet-base-v2\",\"password\":false,\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"multi_process\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"multi_process\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"HuggingFaceEmbeddings\"},\"description\":\"HuggingFace sentence_transformers embedding models.\",\"base_classes\":[\"HuggingFaceEmbeddings\",\"Embeddings\"],\"display_name\":\"HuggingFaceEmbeddings\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/sentence_transformers\",\"beta\":false,\"error\":null},\"VertexAIEmbeddings\":{\"template\":{\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"ForwardRef(\\\"'_LanguageModel'\\\")\",\"list\":false},\"credentials\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".json\"],\"password\":false,\"name\":\"credentials\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"json\"],\"file_path\":null},\"location\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"us-central1\",\"password\":false,\"name\":\"location\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"max_output_tokens\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":128,\"password\":true,\"name\":\"max_output_tokens\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"max_retries\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":6,\"password\":false,\"name\":\"max_retries\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"model_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"textembedding-gecko\",\"password\":false,\"name\":\"model_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"project\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"project\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"request_parallelism\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":5,\"password\":false,\"name\":\"request_parallelism\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"stop\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"stop\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"temperature\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.0,\"password\":false,\"name\":\"temperature\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"top_k\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":40,\"password\":false,\"name\":\"top_k\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"top_p\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":0.95,\"password\":false,\"name\":\"top_p\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"_type\":\"VertexAIEmbeddings\"},\"description\":\"Google Cloud VertexAI embedding models.\",\"base_classes\":[\"VertexAIEmbeddings\",\"_VertexAICommon\",\"Embeddings\"],\"display_name\":\"VertexAIEmbeddings\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/google_vertex_ai_palm\",\"beta\":false,\"error\":null}},\"vectorstores\":{\"Chroma\":{\"template\":{\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"chromadb.Client\",\"list\":false},\"client_settings\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client_settings\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"chromadb.config.Setting\",\"list\":true},\"documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":true},\"embedding\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Embeddings\",\"list\":false},\"chroma_server_cors_allow_origins\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_cors_allow_origins\",\"display_name\":\"Chroma Server CORS Allow Origins\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"chroma_server_grpc_port\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_grpc_port\",\"display_name\":\"Chroma Server GRPC Port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"chroma_server_host\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_host\",\"display_name\":\"Chroma Server Host\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"chroma_server_http_port\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_http_port\",\"display_name\":\"Chroma Server HTTP Port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"chroma_server_ssl_enabled\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"chroma_server_ssl_enabled\",\"display_name\":\"Chroma Server SSL Enabled\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"collection_metadata\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"collection_metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"collection_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"langchain\",\"password\":false,\"name\":\"collection_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"ids\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"ids\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"metadatas\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":true},\"persist\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"persist\",\"display_name\":\"Persist\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"persist_directory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"persist_directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"search_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"Chroma\"},\"description\":\"Create a Chroma vectorstore from a raw documents.\",\"base_classes\":[\"VectorStore\",\"Chroma\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"Chroma\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/chroma\",\"beta\":false,\"error\":null},\"FAISS\":{\"template\":{\"documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":true},\"embedding\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Embeddings\",\"list\":false},\"folder_path\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"folder_path\",\"display_name\":\"Local Path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"ids\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"ids\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"index_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"index_name\",\"display_name\":\"Index Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadatas\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":true},\"search_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"FAISS\"},\"description\":\"Construct FAISS wrapper from raw documents.\",\"base_classes\":[\"FAISS\",\"VectorStore\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"FAISS\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/faiss\",\"beta\":false,\"error\":null},\"MongoDBAtlasVectorSearch\":{\"template\":{\"collection\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"collection\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Collection[MongoDBDocumentType]\",\"list\":false},\"documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":true},\"embedding\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Embeddings\",\"list\":false},\"collection_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"collection_name\",\"display_name\":\"Collection Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"db_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"db_name\",\"display_name\":\"Database Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"index_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"index_name\",\"display_name\":\"Index Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadatas\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":true},\"mongodb_atlas_cluster_uri\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"mongodb_atlas_cluster_uri\",\"display_name\":\"MongoDB Atlas Cluster URI\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"search_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"MongoDBAtlasVectorSearch\"},\"description\":\"Construct MongoDBAtlasVectorSearch wrapper from raw documents.\",\"base_classes\":[\"MongoDBAtlasVectorSearch\",\"VectorStore\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"MongoDB Atlas\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/mongodb_atlas\",\"beta\":false,\"error\":null},\"Pinecone\":{\"template\":{\"documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":true},\"embedding\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Embeddings\",\"list\":false},\"batch_size\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":32,\"password\":false,\"name\":\"batch_size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"ids\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"ids\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"index_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"index_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadatas\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":true},\"namespace\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"namespace\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"pinecone_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"pinecone_api_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"pinecone_env\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"pinecone_env\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"search_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"text_key\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"text\",\"password\":true,\"name\":\"text_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"upsert_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"upsert_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"Pinecone\"},\"description\":\"Construct Pinecone wrapper from raw documents.\",\"base_classes\":[\"Pinecone\",\"VectorStore\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"Pinecone\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/pinecone\",\"beta\":false,\"error\":null},\"Qdrant\":{\"template\":{\"documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":true},\"embedding\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Embeddings\",\"list\":false},\"hnsw_config\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"hnsw_config\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"common_types.HnswConfigDiff\",\"list\":false},\"init_from\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"init_from\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"common_types.InitFrom\",\"list\":false},\"optimizers_config\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"optimizers_config\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"common_types.OptimizersConfigDiff\",\"list\":false},\"quantization_config\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"quantization_config\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"common_types.QuantizationConfig\",\"list\":false},\"wal_config\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"wal_config\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"common_types.WalConfigDiff\",\"list\":false},\"api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"api_key\",\"display_name\":\"API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"batch_size\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":64,\"password\":false,\"name\":\"batch_size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"collection_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"collection_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"content_payload_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"page_content\",\"password\":false,\"name\":\"content_payload_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"distance_func\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"Cosine\",\"password\":false,\"name\":\"distance_func\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"force_recreate\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"force_recreate\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"grpc_port\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":6334,\"password\":false,\"name\":\"grpc_port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"host\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"host\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"https\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"https\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"ids\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"ids\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"location\":{\"required\":false,\"placeholder\":\":memory:\",\"show\":true,\"multiline\":false,\"value\":\":memory:\",\"password\":false,\"name\":\"location\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadata_payload_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"metadata\",\"password\":false,\"name\":\"metadata_payload_key\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadatas\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":true},\"on_disk\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"on_disk\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"on_disk_payload\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"on_disk_payload\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"path\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"path\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"port\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":6333,\"password\":false,\"name\":\"port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"prefer_grpc\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"prefer_grpc\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"prefix\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"prefix\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"replication_factor\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"replication_factor\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"search_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"shard_number\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"shard_number\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"timeout\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"timeout\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"float\",\"list\":false},\"url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"vector_name\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"vector_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"write_consistency_factor\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"write_consistency_factor\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"_type\":\"Qdrant\"},\"description\":\"Construct Qdrant wrapper from a list of texts.\",\"base_classes\":[\"VectorStore\",\"Qdrant\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"Qdrant\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/qdrant\",\"beta\":false,\"error\":null},\"SupabaseVectorStore\":{\"template\":{\"client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"supabase.client.Client\",\"list\":false},\"documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":true},\"embedding\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Embeddings\",\"list\":false},\"ids\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"ids\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"metadatas\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":true},\"query_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"query_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"search_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"supabase_service_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":true,\"name\":\"supabase_service_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"supabase_url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"supabase_url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"table_name\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"table_name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"SupabaseVectorStore\"},\"description\":\"Return VectorStore initialized from texts and embeddings.\",\"base_classes\":[\"VectorStore\",\"SupabaseVectorStore\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"Supabase\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/supabase\",\"beta\":false,\"error\":null},\"Weaviate\":{\"template\":{\"documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":true},\"embedding\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Embeddings\",\"list\":false},\"client_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"client_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"metadatas\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"metadatas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":true},\"search_kwargs\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"search_kwargs\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"weaviate_url\":{\"required\":true,\"placeholder\":\"http://localhost:8080\",\"show\":true,\"multiline\":false,\"value\":\"http://localhost:8080\",\"password\":false,\"name\":\"weaviate_url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"Weaviate\"},\"description\":\"Construct Weaviate wrapper from raw documents.\",\"base_classes\":[\"VectorStore\",\"Weaviate\",\"BaseRetriever\",\"VectorStoreRetriever\"],\"display_name\":\"Weaviate\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/weaviate\",\"beta\":false,\"error\":null},\"Vectara\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional, Union\\nfrom langflow import CustomComponent\\n\\nfrom langchain.vectorstores import Vectara\\nfrom langchain.schema import Document\\nfrom langchain.vectorstores.base import VectorStore\\nfrom langchain.schema import BaseRetriever\\nfrom langchain.embeddings.base import Embeddings\\n\\n\\nclass VectaraComponent(CustomComponent):\\n display_name: str = \\\"Vectara\\\"\\n description: str = \\\"Implementation of Vector Store using Vectara\\\"\\n documentation = (\\n \\\"https://python.langchain.com/docs/integrations/vectorstores/vectara\\\"\\n )\\n beta = True\\n # api key should be password = True\\n field_config = {\\n \\\"vectara_customer_id\\\": {\\\"display_name\\\": \\\"Vectara Customer ID\\\"},\\n \\\"vectara_corpus_id\\\": {\\\"display_name\\\": \\\"Vectara Corpus ID\\\"},\\n \\\"vectara_api_key\\\": {\\\"display_name\\\": \\\"Vectara API Key\\\", \\\"password\\\": True},\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"documents\\\": {\\\"display_name\\\": \\\"Documents\\\"},\\n \\\"embedding\\\": {\\\"display_name\\\": \\\"Embedding\\\"},\\n }\\n\\n def build(\\n self,\\n vectara_customer_id: str,\\n vectara_corpus_id: str,\\n vectara_api_key: str,\\n embedding: Optional[Embeddings] = None,\\n documents: Optional[Document] = None,\\n ) -> Union[VectorStore, BaseRetriever]:\\n # If documents, then we need to create a Vectara instance using .from_documents\\n if documents is not None and embedding is not None:\\n return Vectara.from_documents(\\n documents=documents, # type: ignore\\n vectara_customer_id=vectara_customer_id,\\n vectara_corpus_id=vectara_corpus_id,\\n vectara_api_key=vectara_api_key,\\n embedding=embedding,\\n )\\n\\n return Vectara(\\n vectara_customer_id=vectara_customer_id,\\n vectara_corpus_id=vectara_corpus_id,\\n vectara_api_key=vectara_api_key,\\n )\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":false},\"embedding\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Embeddings\",\"list\":false},\"vectara_api_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"vectara_api_key\",\"display_name\":\"Vectara API Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"vectara_corpus_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"vectara_corpus_id\",\"display_name\":\"Vectara Corpus ID\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"vectara_customer_id\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"vectara_customer_id\",\"display_name\":\"Vectara Customer ID\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"Implementation of Vector Store using Vectara\",\"base_classes\":[\"VectorStore\",\"BaseRetriever\"],\"display_name\":\"Vectara\",\"custom_fields\":{\"documents\":null,\"embedding\":null,\"vectara_api_key\":null,\"vectara_corpus_id\":null,\"vectara_customer_id\":null},\"output_types\":[\"Vectara\"],\"documentation\":\"https://python.langchain.com/docs/integrations/vectorstores/vectara\",\"beta\":true,\"error\":null},\"Chroma (1)\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import Optional, Union\\nfrom langflow import CustomComponent\\n\\nfrom langchain.vectorstores import Chroma\\nfrom langchain.schema import Document\\nfrom langchain.vectorstores.base import VectorStore\\nfrom langchain.schema import BaseRetriever\\nfrom langchain.embeddings.base import Embeddings\\nimport chromadb # type: ignore\\n\\n\\nclass ChromaComponent(CustomComponent):\\n \\\"\\\"\\\"\\n A custom component for implementing a Vector Store using Chroma.\\n \\\"\\\"\\\"\\n\\n display_name: str = \\\"Chroma (Custom Component)\\\"\\n description: str = \\\"Implementation of Vector Store using Chroma\\\"\\n documentation = \\\"https://python.langchain.com/docs/integrations/vectorstores/chroma\\\"\\n beta = True\\n\\n def build_config(self):\\n \\\"\\\"\\\"\\n Builds the configuration for the component.\\n\\n Returns:\\n - dict: A dictionary containing the configuration options for the component.\\n \\\"\\\"\\\"\\n return {\\n \\\"collection_name\\\": {\\\"display_name\\\": \\\"Collection Name\\\", \\\"value\\\": \\\"langflow\\\"},\\n \\\"persist\\\": {\\\"display_name\\\": \\\"Persist\\\"},\\n \\\"persist_directory\\\": {\\\"display_name\\\": \\\"Persist Directory\\\"},\\n \\\"code\\\": {\\\"show\\\": False, \\\"display_name\\\": \\\"Code\\\"},\\n \\\"documents\\\": {\\\"display_name\\\": \\\"Documents\\\", \\\"is_list\\\": True},\\n \\\"embedding\\\": {\\\"display_name\\\": \\\"Embedding\\\"},\\n \\\"chroma_server_cors_allow_origins\\\": {\\n \\\"display_name\\\": \\\"Server CORS Allow Origins\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"chroma_server_host\\\": {\\\"display_name\\\": \\\"Server Host\\\", \\\"advanced\\\": True},\\n \\\"chroma_server_port\\\": {\\\"display_name\\\": \\\"Server Port\\\", \\\"advanced\\\": True},\\n \\\"chroma_server_grpc_port\\\": {\\n \\\"display_name\\\": \\\"Server gRPC Port\\\",\\n \\\"advanced\\\": True,\\n },\\n \\\"chroma_server_ssl_enabled\\\": {\\n \\\"display_name\\\": \\\"Server SSL Enabled\\\",\\n \\\"advanced\\\": True,\\n },\\n }\\n\\n def build(\\n self,\\n collection_name: str,\\n persist: bool,\\n chroma_server_ssl_enabled: bool,\\n persist_directory: Optional[str] = None,\\n embedding: Optional[Embeddings] = None,\\n documents: Optional[Document] = None,\\n chroma_server_cors_allow_origins: Optional[str] = None,\\n chroma_server_host: Optional[str] = None,\\n chroma_server_port: Optional[int] = None,\\n chroma_server_grpc_port: Optional[int] = None,\\n ) -> Union[VectorStore, BaseRetriever]:\\n \\\"\\\"\\\"\\n Builds the Vector Store or BaseRetriever object.\\n\\n Args:\\n - collection_name (str): The name of the collection.\\n - persist_directory (Optional[str]): The directory to persist the Vector Store to.\\n - chroma_server_ssl_enabled (bool): Whether to enable SSL for the Chroma server.\\n - persist (bool): Whether to persist the Vector Store or not.\\n - embedding (Optional[Embeddings]): The embeddings to use for the Vector Store.\\n - documents (Optional[Document]): The documents to use for the Vector Store.\\n - chroma_server_cors_allow_origins (Optional[str]): The CORS allow origins for the Chroma server.\\n - chroma_server_host (Optional[str]): The host for the Chroma server.\\n - chroma_server_port (Optional[int]): The port for the Chroma server.\\n - chroma_server_grpc_port (Optional[int]): The gRPC port for the Chroma server.\\n\\n Returns:\\n - Union[VectorStore, BaseRetriever]: The Vector Store or BaseRetriever object.\\n \\\"\\\"\\\"\\n\\n # Chroma settings\\n chroma_settings = None\\n\\n if chroma_server_host is not None:\\n chroma_settings = chromadb.config.Settings(\\n chroma_server_cors_allow_origins=chroma_server_cors_allow_origins\\n or None,\\n chroma_server_host=chroma_server_host,\\n chroma_server_port=chroma_server_port or None,\\n chroma_server_grpc_port=chroma_server_grpc_port or None,\\n chroma_server_ssl_enabled=chroma_server_ssl_enabled,\\n )\\n\\n # If documents, then we need to create a Chroma instance using .from_documents\\n if documents is not None and embedding is not None:\\n return Chroma.from_documents(\\n documents=documents, # type: ignore\\n persist_directory=persist_directory if persist else None,\\n collection_name=collection_name,\\n embedding=embedding,\\n client_settings=chroma_settings,\\n )\\n\\n return Chroma(\\n persist_directory=persist_directory, client_settings=chroma_settings\\n )\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"chroma_server_cors_allow_origins\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_cors_allow_origins\",\"display_name\":\"Server CORS Allow Origins\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"chroma_server_grpc_port\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_grpc_port\",\"display_name\":\"Server gRPC Port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"chroma_server_host\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_host\",\"display_name\":\"Server Host\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"chroma_server_port\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"chroma_server_port\",\"display_name\":\"Server Port\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"chroma_server_ssl_enabled\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"chroma_server_ssl_enabled\",\"display_name\":\"Server SSL Enabled\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"collection_name\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"langflow\",\"password\":false,\"name\":\"collection_name\",\"display_name\":\"Collection Name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"documents\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"display_name\":\"Documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":true},\"embedding\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"embedding\",\"display_name\":\"Embedding\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Embeddings\",\"list\":false},\"persist\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"persist\",\"display_name\":\"Persist\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"persist_directory\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"persist_directory\",\"display_name\":\"Persist Directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"Implementation of Vector Store using Chroma\",\"base_classes\":[\"VectorStore\",\"BaseRetriever\"],\"display_name\":\"Chroma (Custom Component)\",\"custom_fields\":{\"chroma_server_cors_allow_origins\":null,\"chroma_server_grpc_port\":null,\"chroma_server_host\":null,\"chroma_server_port\":null,\"chroma_server_ssl_enabled\":null,\"collection_name\":null,\"documents\":null,\"embedding\":null,\"persist\":null,\"persist_directory\":null},\"output_types\":[\"Chroma\"],\"documentation\":\"https://python.langchain.com/docs/integrations/vectorstores/chroma\",\"beta\":true,\"error\":null}},\"documentloaders\":{\"AZLyricsLoader\":{\"template\":{\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"web_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"AZLyricsLoader\"},\"description\":\"Load `AZLyrics` webpages.\",\"base_classes\":[\"Document\"],\"display_name\":\"AZLyricsLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/azlyrics\",\"beta\":false,\"error\":null},\"AirbyteJSONLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".json\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"json\"],\"file_path\":null},\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"AirbyteJSONLoader\"},\"description\":\"Load local `Airbyte` json files.\",\"base_classes\":[\"Document\"],\"display_name\":\"AirbyteJSONLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json\",\"beta\":false,\"error\":null},\"BSHTMLLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".html\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"html\"],\"file_path\":null},\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"BSHTMLLoader\"},\"description\":\"Load `HTML` files and parse them with `beautiful soup`.\",\"base_classes\":[\"Document\"],\"display_name\":\"BSHTMLLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/html\",\"beta\":false,\"error\":null},\"CSVLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".csv\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"csv\"],\"file_path\":null},\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"CSVLoader\"},\"description\":\"Load a `CSV` file into a list of Documents.\",\"base_classes\":[\"Document\"],\"display_name\":\"CSVLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv\",\"beta\":false,\"error\":null},\"CoNLLULoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".csv\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"csv\"],\"file_path\":null},\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"CoNLLULoader\"},\"description\":\"Load `CoNLL-U` files.\",\"base_classes\":[\"Document\"],\"display_name\":\"CoNLLULoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/conll-u\",\"beta\":false,\"error\":null},\"CollegeConfidentialLoader\":{\"template\":{\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"web_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"CollegeConfidentialLoader\"},\"description\":\"Load `College Confidential` webpages.\",\"base_classes\":[\"Document\"],\"display_name\":\"CollegeConfidentialLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/college_confidential\",\"beta\":false,\"error\":null},\"DirectoryLoader\":{\"template\":{\"glob\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"**/*.txt\",\"password\":false,\"name\":\"glob\",\"display_name\":\"glob\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"load_hidden\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"False\",\"password\":false,\"name\":\"load_hidden\",\"display_name\":\"Load hidden files\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"max_concurrency\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"max_concurrency\",\"display_name\":\"Max concurrency\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"path\",\"display_name\":\"Local directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"recursive\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"True\",\"password\":false,\"name\":\"recursive\",\"display_name\":\"Recursive\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"silent_errors\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"False\",\"password\":false,\"name\":\"silent_errors\",\"display_name\":\"Silent errors\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"use_multithreading\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"True\",\"password\":false,\"name\":\"use_multithreading\",\"display_name\":\"Use multithreading\",\"advanced\":true,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"DirectoryLoader\"},\"description\":\"Load from a directory.\",\"base_classes\":[\"Document\"],\"display_name\":\"DirectoryLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/file_directory\",\"beta\":false,\"error\":null},\"EverNoteLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".xml\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"xml\"],\"file_path\":null},\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"EverNoteLoader\"},\"description\":\"Load from `EverNote`.\",\"base_classes\":[\"Document\"],\"display_name\":\"EverNoteLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/evernote\",\"beta\":false,\"error\":null},\"FacebookChatLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".json\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"json\"],\"file_path\":null},\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"FacebookChatLoader\"},\"description\":\"Load `Facebook Chat` messages directory dump.\",\"base_classes\":[\"Document\"],\"display_name\":\"FacebookChatLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/facebook_chat\",\"beta\":false,\"error\":null},\"GitLoader\":{\"template\":{\"branch\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"branch\",\"display_name\":\"Branch\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"clone_url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"clone_url\",\"display_name\":\"Clone URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"file_filter\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"file_filter\",\"display_name\":\"File extensions (comma-separated)\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"repo_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"repo_path\",\"display_name\":\"Path to repository\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"GitLoader\"},\"description\":\"Load `Git` repository files.\",\"base_classes\":[\"Document\"],\"display_name\":\"GitLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/git\",\"beta\":false,\"error\":null},\"GitbookLoader\":{\"template\":{\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"web_page\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"web_page\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"GitbookLoader\"},\"description\":\"Load `GitBook` data.\",\"base_classes\":[\"Document\"],\"display_name\":\"GitbookLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/gitbook\",\"beta\":false,\"error\":null},\"GutenbergLoader\":{\"template\":{\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"web_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"GutenbergLoader\"},\"description\":\"Load from `Gutenberg.org`.\",\"base_classes\":[\"Document\"],\"display_name\":\"GutenbergLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/gutenberg\",\"beta\":false,\"error\":null},\"HNLoader\":{\"template\":{\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"web_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"HNLoader\"},\"description\":\"Load `Hacker News` data.\",\"base_classes\":[\"Document\"],\"display_name\":\"HNLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/hacker_news\",\"beta\":false,\"error\":null},\"IFixitLoader\":{\"template\":{\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"web_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"IFixitLoader\"},\"description\":\"Load `iFixit` repair guides, device wikis and answers.\",\"base_classes\":[\"Document\"],\"display_name\":\"IFixitLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/ifixit\",\"beta\":false,\"error\":null},\"IMSDbLoader\":{\"template\":{\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"web_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"IMSDbLoader\"},\"description\":\"Load `IMSDb` webpages.\",\"base_classes\":[\"Document\"],\"display_name\":\"IMSDbLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/imsdb\",\"beta\":false,\"error\":null},\"NotionDirectoryLoader\":{\"template\":{\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"path\",\"display_name\":\"Local directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"NotionDirectoryLoader\"},\"description\":\"Load `Notion directory` dump.\",\"base_classes\":[\"Document\"],\"display_name\":\"NotionDirectoryLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/notion\",\"beta\":false,\"error\":null},\"PyPDFLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".pdf\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"pdf\"],\"file_path\":null},\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"PyPDFLoader\"},\"description\":\"Load `PDF using `pypdf` and chunks at character level.\",\"base_classes\":[\"Document\"],\"display_name\":\"PyPDFLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/pdf\",\"beta\":false,\"error\":null},\"PyPDFDirectoryLoader\":{\"template\":{\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"path\",\"display_name\":\"Local directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"PyPDFDirectoryLoader\"},\"description\":\"Load a directory with `PDF` files using `pypdf` and chunks at character level.\",\"base_classes\":[\"Document\"],\"display_name\":\"PyPDFDirectoryLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/pdf\",\"beta\":false,\"error\":null},\"ReadTheDocsLoader\":{\"template\":{\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"path\",\"display_name\":\"Local directory\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"ReadTheDocsLoader\"},\"description\":\"Load `ReadTheDocs` documentation directory.\",\"base_classes\":[\"Document\"],\"display_name\":\"ReadTheDocsLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/readthedocs_documentation\",\"beta\":false,\"error\":null},\"SRTLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".srt\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"srt\"],\"file_path\":null},\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"SRTLoader\"},\"description\":\"Load `.srt` (subtitle) files.\",\"base_classes\":[\"Document\"],\"display_name\":\"SRTLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/subtitle\",\"beta\":false,\"error\":null},\"SlackDirectoryLoader\":{\"template\":{\"zip_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".zip\"],\"password\":false,\"name\":\"zip_path\",\"display_name\":\"Path to zip file\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"zip\"],\"file_path\":null},\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"workspace_url\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"workspace_url\",\"display_name\":\"Workspace URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"SlackDirectoryLoader\"},\"description\":\"Load from a `Slack` directory dump.\",\"base_classes\":[\"Document\"],\"display_name\":\"SlackDirectoryLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/slack\",\"beta\":false,\"error\":null},\"TextLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".txt\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"txt\"],\"file_path\":null},\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"TextLoader\"},\"description\":\"Load text file.\",\"base_classes\":[\"Document\"],\"display_name\":\"TextLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/\",\"beta\":false,\"error\":null},\"UnstructuredEmailLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".eml\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"eml\"],\"file_path\":null},\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"UnstructuredEmailLoader\"},\"description\":\"Load email files using `Unstructured`.\",\"base_classes\":[\"Document\"],\"display_name\":\"UnstructuredEmailLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/email\",\"beta\":false,\"error\":null},\"UnstructuredHTMLLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".html\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"html\"],\"file_path\":null},\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"UnstructuredHTMLLoader\"},\"description\":\"Load `HTML` files using `Unstructured`.\",\"base_classes\":[\"Document\"],\"display_name\":\"UnstructuredHTMLLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/html\",\"beta\":false,\"error\":null},\"UnstructuredMarkdownLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".md\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"md\"],\"file_path\":null},\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"UnstructuredMarkdownLoader\"},\"description\":\"Load `Markdown` files using `Unstructured`.\",\"base_classes\":[\"Document\"],\"display_name\":\"UnstructuredMarkdownLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/markdown\",\"beta\":false,\"error\":null},\"UnstructuredPowerPointLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".pptx\",\".ppt\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"pptx\",\"ppt\"],\"file_path\":null},\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"UnstructuredPowerPointLoader\"},\"description\":\"Load `Microsoft PowerPoint` files using `Unstructured`.\",\"base_classes\":[\"Document\"],\"display_name\":\"UnstructuredPowerPointLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/microsoft_powerpoint\",\"beta\":false,\"error\":null},\"UnstructuredWordDocumentLoader\":{\"template\":{\"file_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"suffixes\":[\".docx\",\".doc\"],\"password\":false,\"name\":\"file_path\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"file\",\"list\":false,\"fileTypes\":[\"docx\",\"doc\"],\"file_path\":null},\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"UnstructuredWordDocumentLoader\"},\"description\":\"Load `Microsof Word` file using `Unstructured`.\",\"base_classes\":[\"Document\"],\"display_name\":\"UnstructuredWordDocumentLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/microsoft_word\",\"beta\":false,\"error\":null},\"WebBaseLoader\":{\"template\":{\"metadata\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{}\",\"password\":false,\"name\":\"metadata\",\"display_name\":\"Metadata\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"web_path\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"web_path\",\"display_name\":\"Web Page\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"WebBaseLoader\"},\"description\":\"Load HTML pages using `urllib` and parse them with `BeautifulSoup'.\",\"base_classes\":[\"Document\"],\"display_name\":\"WebBaseLoader\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/web_base\",\"beta\":false,\"error\":null}},\"textsplitters\":{\"CharacterTextSplitter\":{\"template\":{\"documents\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":true},\"chunk_overlap\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":200,\"password\":false,\"name\":\"chunk_overlap\",\"display_name\":\"Chunk Overlap\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"chunk_size\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1000,\"password\":false,\"name\":\"chunk_size\",\"display_name\":\"Chunk Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"separator\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\\\\n\",\"password\":false,\"name\":\"separator\",\"display_name\":\"Separator\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"CharacterTextSplitter\"},\"description\":\"Splitting text that looks at characters.\",\"base_classes\":[\"Document\"],\"display_name\":\"CharacterTextSplitter\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/character_text_splitter\",\"beta\":false,\"error\":null},\"RecursiveCharacterTextSplitter\":{\"template\":{\"documents\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"documents\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":true},\"chunk_overlap\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":200,\"password\":false,\"name\":\"chunk_overlap\",\"display_name\":\"Chunk Overlap\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"chunk_size\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":1000,\"password\":false,\"name\":\"chunk_size\",\"display_name\":\"Chunk Size\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"separator_type\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"Text\",\"password\":false,\"options\":[\"Text\",\"cpp\",\"go\",\"html\",\"java\",\"js\",\"latex\",\"markdown\",\"php\",\"proto\",\"python\",\"rst\",\"ruby\",\"rust\",\"scala\",\"sol\",\"swift\"],\"name\":\"separator_type\",\"display_name\":\"Separator Type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"separators\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"\\\\n\",\"password\":false,\"name\":\"separators\",\"display_name\":\"Separator\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"RecursiveCharacterTextSplitter\"},\"description\":\"Splitting text by recursively look at characters.\",\"base_classes\":[\"Document\"],\"display_name\":\"RecursiveCharacterTextSplitter\",\"custom_fields\":{},\"output_types\":[\"Document\"],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/recursive_text_splitter\",\"beta\":false,\"error\":null}},\"utilities\":{\"BingSearchAPIWrapper\":{\"template\":{\"bing_search_url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"bing_search_url\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"bing_subscription_key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"bing_subscription_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"k\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"k\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"_type\":\"BingSearchAPIWrapper\"},\"description\":\"Wrapper for Bing Search API.\",\"base_classes\":[\"BingSearchAPIWrapper\"],\"display_name\":\"BingSearchAPIWrapper\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"GoogleSearchAPIWrapper\":{\"template\":{\"google_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"google_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"google_cse_id\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"google_cse_id\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"k\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"k\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"search_engine\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"search_engine\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"siterestrict\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"siterestrict\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"GoogleSearchAPIWrapper\"},\"description\":\"Wrapper for Google Search API.\",\"base_classes\":[\"GoogleSearchAPIWrapper\"],\"display_name\":\"GoogleSearchAPIWrapper\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"GoogleSerperAPIWrapper\":{\"template\":{\"aiosession\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"aiosession\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"ClientSession\",\"list\":false},\"gl\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"us\",\"password\":false,\"name\":\"gl\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"hl\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"en\",\"password\":false,\"name\":\"hl\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"k\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"k\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"result_key_for_type\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"{\\n \\\"news\\\": \\\"news\\\",\\n \\\"places\\\": \\\"places\\\",\\n \\\"images\\\": \\\"images\\\",\\n \\\"search\\\": \\\"organic\\\"\\n}\",\"password\":true,\"name\":\"result_key_for_type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"serper_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"serper_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"tbs\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"tbs\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"type\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"search\",\"password\":false,\"options\":[\"news\",\"search\",\"places\",\"images\"],\"name\":\"type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"_type\":\"GoogleSerperAPIWrapper\"},\"description\":\"Wrapper around the Serper.dev Google Search API.\",\"base_classes\":[\"GoogleSerperAPIWrapper\"],\"display_name\":\"GoogleSerperAPIWrapper\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"SearxSearchWrapper\":{\"template\":{\"aiosession\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"aiosession\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"categories\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"categories\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"engines\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"engines\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":true},\"headers\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"{'Authorization': 'Bearer '}\",\"password\":false,\"name\":\"headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"k\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":10,\"password\":false,\"name\":\"k\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"params\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"params\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"query_suffix\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"query_suffix\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"searx_host\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"\",\"password\":false,\"name\":\"searx_host\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"unsecure\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"unsecure\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"_type\":\"SearxSearchWrapper\"},\"description\":\"Wrapper for Searx API.\",\"base_classes\":[\"SearxSearchWrapper\"],\"display_name\":\"SearxSearchWrapper\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"SerpAPIWrapper\":{\"template\":{\"aiosession\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"aiosession\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"ClientSession\",\"list\":false},\"params\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"{\\n \\\"engine\\\": \\\"google\\\",\\n \\\"google_domain\\\": \\\"google.com\\\",\\n \\\"gl\\\": \\\"us\\\",\\n \\\"hl\\\": \\\"en\\\"\\n}\",\"password\":false,\"name\":\"params\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"code\",\"list\":false},\"search_engine\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"search_engine\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"serpapi_api_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":true,\"name\":\"serpapi_api_key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"SerpAPIWrapper\"},\"description\":\"Wrapper around SerpAPI.\",\"base_classes\":[\"SerpAPIWrapper\"],\"display_name\":\"SerpAPIWrapper\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"WikipediaAPIWrapper\":{\"template\":{\"doc_content_chars_max\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":4000,\"password\":false,\"name\":\"doc_content_chars_max\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"lang\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":\"en\",\"password\":false,\"name\":\"lang\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"load_all_available_meta\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":false,\"password\":false,\"name\":\"load_all_available_meta\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"bool\",\"list\":false},\"top_k_results\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"value\":3,\"password\":false,\"name\":\"top_k_results\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"int\",\"list\":false},\"wiki_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"wiki_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"_type\":\"WikipediaAPIWrapper\"},\"description\":\"Wrapper around WikipediaAPI.\",\"base_classes\":[\"WikipediaAPIWrapper\"],\"display_name\":\"WikipediaAPIWrapper\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"WolframAlphaAPIWrapper\":{\"template\":{\"wolfram_alpha_appid\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"wolfram_alpha_appid\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"wolfram_client\":{\"required\":false,\"placeholder\":\"\",\"show\":false,\"multiline\":false,\"password\":false,\"name\":\"wolfram_client\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Any\",\"list\":false},\"_type\":\"WolframAlphaAPIWrapper\"},\"description\":\"Wrapper for Wolfram Alpha.\",\"base_classes\":[\"WolframAlphaAPIWrapper\"],\"display_name\":\"WolframAlphaAPIWrapper\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":false,\"error\":null},\"GetRequest\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langchain.schema import Document\\nfrom langflow.services.database.models.base import orjson_dumps\\nimport requests\\nfrom typing import Optional\\n\\n\\nclass GetRequest(CustomComponent):\\n display_name: str = \\\"GET Request\\\"\\n description: str = \\\"Make a GET request to the given URL.\\\"\\n output_types: list[str] = [\\\"Document\\\"]\\n documentation: str = \\\"https://docs.langflow.org/components/utilities#get-request\\\"\\n beta = True\\n field_config = {\\n \\\"url\\\": {\\n \\\"display_name\\\": \\\"URL\\\",\\n \\\"info\\\": \\\"The URL to make the request to\\\",\\n \\\"is_list\\\": True,\\n },\\n \\\"headers\\\": {\\n \\\"display_name\\\": \\\"Headers\\\",\\n \\\"field_type\\\": \\\"code\\\",\\n \\\"info\\\": \\\"The headers to send with the request.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"timeout\\\": {\\n \\\"display_name\\\": \\\"Timeout\\\",\\n \\\"field_type\\\": \\\"int\\\",\\n \\\"info\\\": \\\"The timeout to use for the request.\\\",\\n \\\"value\\\": 5,\\n },\\n }\\n\\n def get_document(\\n self, session: requests.Session, url: str, headers: Optional[dict], timeout: int\\n ) -> Document:\\n try:\\n response = session.get(url, headers=headers, timeout=int(timeout))\\n try:\\n response_json = response.json()\\n result = orjson_dumps(response_json, indent_2=False)\\n except Exception:\\n result = response.text\\n self.repr_value = result\\n return Document(\\n page_content=result,\\n metadata={\\n \\\"source\\\": url,\\n \\\"headers\\\": headers,\\n \\\"status_code\\\": response.status_code,\\n },\\n )\\n except requests.Timeout:\\n return Document(\\n page_content=\\\"Request Timed Out\\\",\\n metadata={\\\"source\\\": url, \\\"headers\\\": headers, \\\"status_code\\\": 408},\\n )\\n except Exception as exc:\\n return Document(\\n page_content=str(exc),\\n metadata={\\\"source\\\": url, \\\"headers\\\": headers, \\\"status_code\\\": 500},\\n )\\n\\n def build(\\n self,\\n url: str,\\n headers: Optional[dict] = None,\\n timeout: int = 5,\\n ) -> list[Document]:\\n if headers is None:\\n headers = {}\\n urls = url if isinstance(url, list) else [url]\\n with requests.Session() as session:\\n documents = [self.get_document(session, u, headers, timeout) for u in urls]\\n self.repr_value = documents\\n return documents\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"headers\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"headers\",\"display_name\":\"Headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"The headers to send with the request.\",\"type\":\"code\",\"list\":false},\"timeout\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":5,\"password\":false,\"name\":\"timeout\",\"display_name\":\"Timeout\",\"advanced\":false,\"dynamic\":false,\"info\":\"The timeout to use for the request.\",\"type\":\"int\",\"list\":false},\"url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"The URL to make the request to\",\"type\":\"str\",\"list\":true}},\"description\":\"Make a GET request to the given URL.\",\"base_classes\":[\"Document\"],\"display_name\":\"GET Request\",\"custom_fields\":{\"headers\":null,\"timeout\":null,\"url\":null},\"output_types\":[\"GetRequest\"],\"documentation\":\"https://docs.langflow.org/components/utilities#get-request\",\"beta\":true,\"error\":null},\"PostRequest\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\nfrom langchain.schema import Document\\nfrom langflow.services.database.models.base import orjson_dumps\\nimport requests\\nfrom typing import Optional\\n\\n\\nclass PostRequest(CustomComponent):\\n display_name: str = \\\"POST Request\\\"\\n description: str = \\\"Make a POST request to the given URL.\\\"\\n output_types: list[str] = [\\\"Document\\\"]\\n documentation: str = \\\"https://docs.langflow.org/components/utilities#post-request\\\"\\n beta = True\\n field_config = {\\n \\\"url\\\": {\\\"display_name\\\": \\\"URL\\\", \\\"info\\\": \\\"The URL to make the request to.\\\"},\\n \\\"headers\\\": {\\n \\\"display_name\\\": \\\"Headers\\\",\\n \\\"field_type\\\": \\\"code\\\",\\n \\\"info\\\": \\\"The headers to send with the request.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"document\\\": {\\\"display_name\\\": \\\"Document\\\"},\\n }\\n\\n def post_document(\\n self,\\n session: requests.Session,\\n document: Document,\\n url: str,\\n headers: Optional[dict] = None,\\n ) -> Document:\\n try:\\n response = session.post(url, headers=headers, data=document.page_content)\\n try:\\n response_json = response.json()\\n result = orjson_dumps(response_json, indent_2=False)\\n except Exception:\\n result = response.text\\n self.repr_value = result\\n return Document(\\n page_content=result,\\n metadata={\\n \\\"source\\\": url,\\n \\\"headers\\\": headers,\\n \\\"status_code\\\": response,\\n },\\n )\\n except Exception as exc:\\n return Document(\\n page_content=str(exc),\\n metadata={\\n \\\"source\\\": url,\\n \\\"headers\\\": headers,\\n \\\"status_code\\\": 500,\\n },\\n )\\n\\n def build(\\n self,\\n document: Document,\\n url: str,\\n headers: Optional[dict] = None,\\n ) -> list[Document]:\\n if headers is None:\\n headers = {}\\n\\n if not isinstance(document, list) and isinstance(document, Document):\\n documents: list[Document] = [document]\\n elif isinstance(document, list) and all(\\n isinstance(doc, Document) for doc in document\\n ):\\n documents = document\\n else:\\n raise ValueError(\\\"document must be a Document or a list of Documents\\\")\\n\\n with requests.Session() as session:\\n documents = [\\n self.post_document(session, doc, url, headers) for doc in documents\\n ]\\n self.repr_value = documents\\n return documents\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"document\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"document\",\"display_name\":\"Document\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":false},\"headers\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"headers\",\"display_name\":\"Headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"The headers to send with the request.\",\"type\":\"code\",\"list\":false},\"url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"The URL to make the request to.\",\"type\":\"str\",\"list\":false}},\"description\":\"Make a POST request to the given URL.\",\"base_classes\":[\"Document\"],\"display_name\":\"POST Request\",\"custom_fields\":{\"document\":null,\"headers\":null,\"url\":null},\"output_types\":[\"PostRequest\"],\"documentation\":\"https://docs.langflow.org/components/utilities#post-request\",\"beta\":true,\"error\":null},\"UpdateRequest\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":false,\"multiline\":true,\"value\":\"from typing import List, Optional\\nimport requests\\nfrom langflow import CustomComponent\\nfrom langchain.schema import Document\\nfrom langflow.services.database.models.base import orjson_dumps\\n\\n\\nclass UpdateRequest(CustomComponent):\\n display_name: str = \\\"Update Request\\\"\\n description: str = \\\"Make a PATCH request to the given URL.\\\"\\n output_types: list[str] = [\\\"Document\\\"]\\n documentation: str = \\\"https://docs.langflow.org/components/utilities#update-request\\\"\\n beta = True\\n field_config = {\\n \\\"url\\\": {\\\"display_name\\\": \\\"URL\\\", \\\"info\\\": \\\"The URL to make the request to.\\\"},\\n \\\"headers\\\": {\\n \\\"display_name\\\": \\\"Headers\\\",\\n \\\"field_type\\\": \\\"code\\\",\\n \\\"info\\\": \\\"The headers to send with the request.\\\",\\n },\\n \\\"code\\\": {\\\"show\\\": False},\\n \\\"document\\\": {\\\"display_name\\\": \\\"Document\\\"},\\n \\\"method\\\": {\\n \\\"display_name\\\": \\\"Method\\\",\\n \\\"field_type\\\": \\\"str\\\",\\n \\\"info\\\": \\\"The HTTP method to use.\\\",\\n \\\"options\\\": [\\\"PATCH\\\", \\\"PUT\\\"],\\n \\\"value\\\": \\\"PATCH\\\",\\n },\\n }\\n\\n def update_document(\\n self,\\n session: requests.Session,\\n document: Document,\\n url: str,\\n headers: Optional[dict] = None,\\n method: str = \\\"PATCH\\\",\\n ) -> Document:\\n try:\\n if method == \\\"PATCH\\\":\\n response = session.patch(\\n url, headers=headers, data=document.page_content\\n )\\n elif method == \\\"PUT\\\":\\n response = session.put(url, headers=headers, data=document.page_content)\\n else:\\n raise ValueError(f\\\"Unsupported method: {method}\\\")\\n try:\\n response_json = response.json()\\n result = orjson_dumps(response_json, indent_2=False)\\n except Exception:\\n result = response.text\\n self.repr_value = result\\n return Document(\\n page_content=result,\\n metadata={\\n \\\"source\\\": url,\\n \\\"headers\\\": headers,\\n \\\"status_code\\\": response.status_code,\\n },\\n )\\n except Exception as exc:\\n return Document(\\n page_content=str(exc),\\n metadata={\\\"source\\\": url, \\\"headers\\\": headers, \\\"status_code\\\": 500},\\n )\\n\\n def build(\\n self,\\n method: str,\\n document: Document,\\n url: str,\\n headers: Optional[dict] = None,\\n ) -> List[Document]:\\n if headers is None:\\n headers = {}\\n\\n if not isinstance(document, list) and isinstance(document, Document):\\n documents: list[Document] = [document]\\n elif isinstance(document, list) and all(\\n isinstance(doc, Document) for doc in document\\n ):\\n documents = document\\n else:\\n raise ValueError(\\\"document must be a Document or a list of Documents\\\")\\n\\n with requests.Session() as session:\\n documents = [\\n self.update_document(session, doc, url, headers, method)\\n for doc in documents\\n ]\\n self.repr_value = documents\\n return documents\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"document\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"document\",\"display_name\":\"Document\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":false},\"headers\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"headers\",\"display_name\":\"Headers\",\"advanced\":false,\"dynamic\":false,\"info\":\"The headers to send with the request.\",\"type\":\"code\",\"list\":false},\"method\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"PATCH\",\"password\":false,\"options\":[\"PATCH\",\"PUT\"],\"name\":\"method\",\"display_name\":\"Method\",\"advanced\":false,\"dynamic\":false,\"info\":\"The HTTP method to use.\",\"type\":\"str\",\"list\":true},\"url\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"url\",\"display_name\":\"URL\",\"advanced\":false,\"dynamic\":false,\"info\":\"The URL to make the request to.\",\"type\":\"str\",\"list\":false}},\"description\":\"Make a PATCH request to the given URL.\",\"base_classes\":[\"Document\"],\"display_name\":\"Update Request\",\"custom_fields\":{\"document\":null,\"headers\":null,\"method\":null,\"url\":null},\"output_types\":[\"UpdateRequest\"],\"documentation\":\"https://docs.langflow.org/components/utilities#update-request\",\"beta\":true,\"error\":null},\"JSONDocumentBuilder\":{\"template\":{\"code\":{\"dynamic\":true,\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"### JSON Document Builder\\n\\n# Build a Document containing a JSON object using a key and another Document page content.\\n\\n# **Params**\\n\\n# - **Key:** The key to use for the JSON object.\\n# - **Document:** The Document page to use for the JSON object.\\n\\n# **Output**\\n\\n# - **Document:** The Document containing the JSON object.\\n\\nfrom langflow import CustomComponent\\nfrom langchain.schema import Document\\nfrom langflow.services.database.models.base import orjson_dumps\\n\\n\\nclass JSONDocumentBuilder(CustomComponent):\\n display_name: str = \\\"JSON Document Builder\\\"\\n description: str = \\\"Build a Document containing a JSON object using a key and another Document page content.\\\"\\n output_types: list[str] = [\\\"Document\\\"]\\n beta = True\\n documentation: str = (\\n \\\"https://docs.langflow.org/components/utilities#json-document-builder\\\"\\n )\\n\\n field_config = {\\n \\\"key\\\": {\\\"display_name\\\": \\\"Key\\\"},\\n \\\"document\\\": {\\\"display_name\\\": \\\"Document\\\"},\\n }\\n\\n def build(\\n self,\\n key: str,\\n document: Document,\\n ) -> Document:\\n documents = None\\n if isinstance(document, list):\\n documents = [\\n Document(\\n page_content=orjson_dumps({key: doc.page_content}, indent_2=False)\\n )\\n for doc in document\\n ]\\n elif isinstance(document, Document):\\n documents = Document(\\n page_content=orjson_dumps({key: document.page_content}, indent_2=False)\\n )\\n else:\\n raise TypeError(\\n f\\\"Expected Document or list of Documents, got {type(document)}\\\"\\n )\\n self.repr_value = documents\\n return documents\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\",\"document\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"document\",\"display_name\":\"Document\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"Document\",\"list\":false},\"key\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"key\",\"display_name\":\"Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false}},\"description\":\"Build a Document containing a JSON object using a key and another Document page content.\",\"base_classes\":[\"Document\"],\"display_name\":\"JSON Document Builder\",\"custom_fields\":{\"document\":null,\"key\":null},\"output_types\":[\"JSONDocumentBuilder\"],\"documentation\":\"https://docs.langflow.org/components/utilities#json-document-builder\",\"beta\":true,\"error\":null}},\"output_parsers\":{\"ResponseSchema\":{\"template\":{\"description\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"password\":false,\"name\":\"description\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"name\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"name\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"type\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"string\",\"password\":false,\"name\":\"type\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"ResponseSchema\"},\"description\":\"A schema for a response from a structured output parser.\",\"base_classes\":[\"ResponseSchema\"],\"display_name\":\"ResponseSchema\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/output_parsers/structured\",\"beta\":false,\"error\":null},\"StructuredOutputParser\":{\"template\":{\"response_schemas\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"response_schemas\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"ResponseSchema\",\"list\":true},\"_type\":\"StructuredOutputParser\"},\"description\":\"\",\"base_classes\":[\"StructuredOutputParser\",\"BaseOutputParser\",\"BaseLLMOutputParser\"],\"display_name\":\"StructuredOutputParser\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/model_io/output_parsers/structured\",\"beta\":false,\"error\":null}},\"retrievers\":{\"MultiQueryRetriever\":{\"template\":{\"llm\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"llm\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseLLM\",\"list\":false},\"prompt\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":{\"input_variables\":[\"question\"],\"output_parser\":null,\"partial_variables\":{},\"template\":\"You are an AI language model assistant. Your task is \\n to generate 3 different versions of the given user \\n question to retrieve relevant documents from a vector database. \\n By generating multiple perspectives on the user question, \\n your goal is to help the user overcome some of the limitations \\n of distance-based similarity search. Provide these alternative \\n questions separated by newlines. Original question: {question}\",\"template_format\":\"f-string\",\"validate_template\":true,\"_type\":\"prompt\"},\"password\":false,\"name\":\"prompt\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"PromptTemplate\",\"list\":false},\"retriever\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"password\":false,\"name\":\"retriever\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"BaseRetriever\",\"list\":false},\"parser_key\":{\"required\":false,\"placeholder\":\"\",\"show\":true,\"multiline\":false,\"value\":\"lines\",\"password\":false,\"name\":\"parser_key\",\"display_name\":\"Parser Key\",\"advanced\":false,\"dynamic\":false,\"info\":\"\",\"type\":\"str\",\"list\":false},\"_type\":\"MultiQueryRetriever\"},\"description\":\"Initialize from llm using default template.\",\"base_classes\":[\"BaseRetriever\",\"MultiQueryRetriever\"],\"display_name\":\"MultiQueryRetriever\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"https://python.langchain.com/docs/modules/data_connection/retrievers/how_to/MultiQueryRetriever\",\"beta\":false,\"error\":null}},\"custom_components\":{\"CustomComponent\":{\"template\":{\"code\":{\"required\":true,\"placeholder\":\"\",\"show\":true,\"multiline\":true,\"value\":\"from langflow import CustomComponent\\n\\nfrom langchain.llms.base import BaseLLM\\nfrom langchain.chains import LLMChain\\nfrom langchain import PromptTemplate\\nfrom langchain.schema import Document\\n\\nimport requests\\n\\nclass YourComponent(CustomComponent):\\n display_name: str = \\\"Custom Component\\\"\\n description: str = \\\"Create any custom component you want!\\\"\\n\\n def build_config(self):\\n return { \\\"url\\\": { \\\"multiline\\\": True, \\\"required\\\": True } }\\n\\n def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document:\\n response = requests.get(url)\\n chain = LLMChain(llm=llm, prompt=prompt)\\n result = chain.run(response.text[:300])\\n return Document(page_content=str(result))\\n\",\"password\":false,\"name\":\"code\",\"advanced\":false,\"dynamic\":true,\"info\":\"\",\"type\":\"code\",\"list\":false},\"_type\":\"CustomComponent\"},\"description\":\"Create any custom component you want!\",\"base_classes\":[],\"display_name\":\"Custom Component\",\"custom_fields\":{},\"output_types\":[],\"documentation\":\"\",\"beta\":true,\"error\":null}}}" + }, + "headersSize": -1, + "bodySize": -1, + "redirectURL": "" + }, + "cache": {}, + "timings": { "send": -1, "wait": -1, "receive": 1.031 } + }, + { + "startedDateTime": "2023-08-31T14:55:36.131Z", + "time": 0.743, + "request": { + "method": "GET", + "url": "http://localhost:3000/api/v1/flows/", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Accept", "value": "application/json, text/plain, */*" }, + { "name": "Accept-Encoding", "value": "gzip, deflate, br" }, + { "name": "Accept-Language", "value": "en-US,en;q=0.9" }, + { "name": "Authorization", "value": "Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJiNWIxYjFhZC05M2VjLTRjMjgtYWMxNy01OGMxNDQ0MWI1ZGQiLCJleHAiOjE3MjUwMjk3MzV9.8qhEtryWqA9RbwEP2s20umKdVE7J7jHGoogXZqxLNWk" }, + { "name": "Connection", "value": "keep-alive" }, + { "name": "Cookie", "value": "access_token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJiNWIxYjFhZC05M2VjLTRjMjgtYWMxNy01OGMxNDQ0MWI1ZGQiLCJleHAiOjE3MjUwMjk3MzV9.8qhEtryWqA9RbwEP2s20umKdVE7J7jHGoogXZqxLNWk; refresh_token=auto" }, + { "name": "Host", "value": "localhost:3000" }, + { "name": "Referer", "value": "http://localhost:3000/" }, + { "name": "Sec-Fetch-Dest", "value": "empty" }, + { "name": "Sec-Fetch-Mode", "value": "cors" }, + { "name": "Sec-Fetch-Site", "value": "same-origin" }, + { "name": "User-Agent", "value": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36" }, + { "name": "sec-ch-ua", "value": "\"Not)A;Brand\";v=\"24\", \"Chromium\";v=\"116\"" }, + { "name": "sec-ch-ua-mobile", "value": "?0" }, + { "name": "sec-ch-ua-platform", "value": "\"Linux\"" } + ], + "queryString": [], + "headersSize": -1, + "bodySize": -1 + }, + "response": { + "status": 200, + "statusText": "OK", + "httpVersion": "HTTP/1.1", + "cookies": [], + "headers": [ + { "name": "Access-Control-Allow-Origin", "value": "*" }, + { "name": "connection", "value": "close" }, + { "name": "content-length", "value": "2" }, + { "name": "content-type", "value": "application/json" }, + { "name": "date", "value": "Thu, 31 Aug 2023 14:55:35 GMT" }, + { "name": "server", "value": "uvicorn" } + ], + "content": { + "size": -1, + "mimeType": "application/json", + "text": "[]" + }, + "headersSize": -1, + "bodySize": -1, + "redirectURL": "" + }, + "cache": {}, + "timings": { "send": -1, "wait": -1, "receive": 0.743 } + } + ] + } + } \ No newline at end of file diff --git a/src/frontend/nginx.conf b/src/frontend/nginx.conf index df665f5b6..cabf31c7a 100644 --- a/src/frontend/nginx.conf +++ b/src/frontend/nginx.conf @@ -1,18 +1,20 @@ server { - gzip on; - gzip_comp_level 2; - gzip_min_length 1000; - gzip_types text/xml text/css; - gzip_http_version 1.1; - gzip_vary on; - gzip_disable "MSIE [4-6] \."; + gzip on; + gzip_comp_level 2; + gzip_min_length 1000; + gzip_types text/xml text/css; + gzip_http_version 1.1; + gzip_vary on; + gzip_disable "MSIE [4-6] \."; - listen 80; + listen 80; - location / { - root /usr/share/nginx/html; - index index.html index.htm; - try_files $uri $uri/ /index.html =404; - } - + location / { + root /usr/share/nginx/html; + index index.html index.htm; + try_files $uri $uri/ /index.html =404; + } + + + include /etc/nginx/extra-conf.d/*.conf; } diff --git a/src/frontend/package-lock.json b/src/frontend/package-lock.json index f0a1ff7ee..2a02d8c14 100644 --- a/src/frontend/package-lock.json +++ b/src/frontend/package-lock.json @@ -8,11 +8,11 @@ "name": "langflow", "version": "0.1.2", "dependencies": { - "@emotion/react": "^11.10.5", - "@emotion/styled": "^11.10.5", - "@headlessui/react": "^1.7.10", - "@heroicons/react": "^2.0.15", - "@mui/material": "^5.11.9", + "@emotion/react": "^11.11.1", + "@emotion/styled": "^11.11.0", + "@headlessui/react": "^1.7.17", + "@heroicons/react": "^2.0.18", + "@mui/material": "^5.14.7", "@radix-ui/react-accordion": "^1.1.2", "@radix-ui/react-checkbox": "^1.0.4", "@radix-ui/react-dialog": "^1.0.4", @@ -29,71 +29,74 @@ "@radix-ui/react-switch": "^1.0.3", "@radix-ui/react-tabs": "^1.0.4", "@radix-ui/react-tooltip": "^1.0.6", - "@tabler/icons-react": "^2.18.0", - "@tailwindcss/forms": "^0.5.3", + "@tabler/icons-react": "^2.32.0", + "@tailwindcss/forms": "^0.5.6", "@tailwindcss/line-clamp": "^0.4.4", "@types/axios": "^0.14.0", "accordion": "^3.0.2", - "ace-builds": "^1.16.0", + "ace-builds": "^1.24.1", "add": "^2.0.6", "ansi-to-html": "^0.7.2", - "axios": "^1.3.2", + "axios": "^1.5.0", "base64-js": "^1.5.1", - "class-variance-authority": "^0.6.0", + "class-variance-authority": "^0.6.1", "clsx": "^1.2.1", - "dompurify": "^3.0.4", - "esbuild": "^0.17.18", + "dompurify": "^3.0.5", + "esbuild": "^0.17.19", "lodash": "^4.17.21", "lucide-react": "^0.233.0", + "moment": "^2.29.4", "react": "^18.2.0", "react-ace": "^10.1.0", "react-cookie": "^4.1.1", "react-dom": "^18.2.0", - "react-error-boundary": "^4.0.2", - "react-icons": "^4.8.0", + "react-error-boundary": "^4.0.11", + "react-icons": "^4.10.1", "react-laag": "^2.0.5", "react-markdown": "^8.0.7", - "react-router-dom": "^6.8.1", + "react-router-dom": "^6.15.0", "react-syntax-highlighter": "^15.5.0", - "react-tabs": "^6.0.0", - "react-tooltip": "^5.13.1", - "reactflow": "^11.5.5", - "rehype-mathjax": "^4.0.2", + "react-tabs": "^6.0.2", + "react-tooltip": "^5.21.1", + "react18-json-view": "^0.2.3", + "reactflow": "^11.8.3", + "rehype-mathjax": "^4.0.3", "remark-gfm": "^3.0.1", "remark-math": "^5.1.1", - "shadcn-ui": "^0.2.2", + "shadcn-ui": "^0.2.3", "short-unique-id": "^4.4.4", "switch": "^0.0.0", "table": "^6.8.1", - "tailwind-merge": "^1.13.0", - "tailwindcss-animate": "^1.0.5", + "tailwind-merge": "^1.14.0", + "tailwindcss-animate": "^1.0.7", "uuid": "^9.0.0", "vite-plugin-svgr": "^3.2.0", "web-vitals": "^2.1.4" }, "devDependencies": { + "@playwright/test": "^1.38.0", "@swc/cli": "^0.1.62", - "@swc/core": "^1.3.62", + "@swc/core": "^1.3.80", "@tailwindcss/typography": "^0.5.9", - "@testing-library/jest-dom": "^5.16.5", + "@testing-library/jest-dom": "^5.17.0", "@testing-library/react": "^13.4.0", "@testing-library/user-event": "^13.5.0", "@types/jest": "^27.5.2", - "@types/lodash": "^4.14.194", - "@types/node": "^16.18.12", - "@types/react": "^18.0.28", - "@types/react-dom": "^18.0.11", - "@types/uuid": "^9.0.1", - "@vitejs/plugin-react-swc": "^3.0.0", - "autoprefixer": "^10.4.14", - "daisyui": "^3.1.1", - "postcss": "^8.4.23", + "@types/lodash": "^4.14.197", + "@types/node": "^16.18.46", + "@types/react": "^18.2.21", + "@types/react-dom": "^18.2.7", + "@types/uuid": "^9.0.2", + "@vitejs/plugin-react-swc": "^3.3.2", + "autoprefixer": "^10.4.15", + "daisyui": "^3.6.3", + "postcss": "^8.4.29", "prettier": "^2.8.8", - "prettier-plugin-organize-imports": "^3.2.2", + "prettier-plugin-organize-imports": "^3.2.3", "prettier-plugin-tailwindcss": "^0.3.0", - "tailwindcss": "^3.3.2", - "typescript": "^5.0.2", - "vite": "^4.3.9" + "tailwindcss": "^3.3.3", + "typescript": "^5.2.2", + "vite": "^4.4.9" } }, "node_modules/@adobe/css-tools": { @@ -126,9 +129,9 @@ } }, "node_modules/@antfu/ni": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@antfu/ni/-/ni-0.21.5.tgz", - "integrity": "sha512-rFmuqZMFa1OTRbxdu3vmfytsy1CtsIUFH0bO85rZ1xdu2uLoioSaEi6iOULDVTQUrnes50jMs+UW355Ndj7Oxg==", + "version": "0.21.8", + "resolved": "https://registry.npmjs.org/@antfu/ni/-/ni-0.21.8.tgz", + "integrity": "sha512-90X8pU2szlvw0AJo9EZMbYc2eQKkmO7mAdC4tD4r5co2Mm56MT37MIG8EyB7p4WRheuzGxuLDxJ63mF6+Zajiw==", "bin": { "na": "bin/na.mjs", "nci": "bin/nci.mjs", @@ -140,11 +143,11 @@ } }, "node_modules/@babel/code-frame": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.10.tgz", - "integrity": "sha512-/KKIMG4UEL35WmI9OlvMhurwtytjvXoFcGNrOvyG9zIzA8YmPjVtIZUf7b05+TPO7G7/GEmLHDaoCgACHl9hhA==", + "version": "7.22.13", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.13.tgz", + "integrity": "sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w==", "dependencies": { - "@babel/highlight": "^7.22.10", + "@babel/highlight": "^7.22.13", "chalk": "^2.4.2" }, "engines": { @@ -216,32 +219,32 @@ } }, "node_modules/@babel/compat-data": { - "version": "7.22.9", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.22.9.tgz", - "integrity": "sha512-5UamI7xkUcJ3i9qVDS+KFDEK8/7oJ55/sJMB1Ge7IEapr7KfdfV/HErR+koZwOfd+SgtFKOKRhRakdg++DcJpQ==", + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.22.20.tgz", + "integrity": "sha512-BQYjKbpXjoXwFW5jGqiizJQQT/aC7pFm9Ok1OWssonuguICi264lbgMzRp2ZMmRSlfkX6DsWDDcsrctK8Rwfiw==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.22.10.tgz", - "integrity": "sha512-fTmqbbUBAwCcre6zPzNngvsI0aNrPZe77AeqvDxWM9Nm+04RrJ3CAmGHA9f7lJQY6ZMhRztNemy4uslDxTX4Qw==", + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.23.0.tgz", + "integrity": "sha512-97z/ju/Jy1rZmDxybphrBuI+jtJjFVoz7Mr9yUQVVVi+DNZE333uFQeMOqcCIy1x3WYBIbWftUSLmbNXNT7qFQ==", "dependencies": { "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.22.10", - "@babel/generator": "^7.22.10", - "@babel/helper-compilation-targets": "^7.22.10", - "@babel/helper-module-transforms": "^7.22.9", - "@babel/helpers": "^7.22.10", - "@babel/parser": "^7.22.10", - "@babel/template": "^7.22.5", - "@babel/traverse": "^7.22.10", - "@babel/types": "^7.22.10", - "convert-source-map": "^1.7.0", + "@babel/code-frame": "^7.22.13", + "@babel/generator": "^7.23.0", + "@babel/helper-compilation-targets": "^7.22.15", + "@babel/helper-module-transforms": "^7.23.0", + "@babel/helpers": "^7.23.0", + "@babel/parser": "^7.23.0", + "@babel/template": "^7.22.15", + "@babel/traverse": "^7.23.0", + "@babel/types": "^7.23.0", + "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", - "json5": "^2.2.2", + "json5": "^2.2.3", "semver": "^6.3.1" }, "engines": { @@ -252,6 +255,11 @@ "url": "https://opencollective.com/babel" } }, + "node_modules/@babel/core/node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==" + }, "node_modules/@babel/core/node_modules/semver": { "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", @@ -261,11 +269,11 @@ } }, "node_modules/@babel/generator": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.22.10.tgz", - "integrity": "sha512-79KIf7YiWjjdZ81JnLujDRApWtl7BxTqWD88+FFdQEIOG8LJ0etDOM7CXuIgGJa55sGOwZVwuEsaLEm0PJ5/+A==", + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.23.0.tgz", + "integrity": "sha512-lN85QRR+5IbYrMWM6Y4pE/noaQtg4pNiqeNGX60eqOfo6gtEj6uw/JagelB8vVztSd7R6M5n1+PQkDbHbBRU4g==", "dependencies": { - "@babel/types": "^7.22.10", + "@babel/types": "^7.23.0", "@jridgewell/gen-mapping": "^0.3.2", "@jridgewell/trace-mapping": "^0.3.17", "jsesc": "^2.5.1" @@ -275,12 +283,12 @@ } }, "node_modules/@babel/helper-compilation-targets": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.10.tgz", - "integrity": "sha512-JMSwHD4J7SLod0idLq5PKgI+6g/hLD/iuWBq08ZX49xE14VpVEojJ5rHWptpirV2j020MvypRLAXAO50igCJ5Q==", + "version": "7.22.15", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.15.tgz", + "integrity": "sha512-y6EEzULok0Qvz8yyLkCvVX+02ic+By2UdOhylwUOvOn9dvYc9mKICJuuU1n1XBI02YWsNsnrY1kc6DVbjcXbtw==", "dependencies": { "@babel/compat-data": "^7.22.9", - "@babel/helper-validator-option": "^7.22.5", + "@babel/helper-validator-option": "^7.22.15", "browserslist": "^4.21.9", "lru-cache": "^5.1.1", "semver": "^6.3.1" @@ -311,20 +319,20 @@ "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" }, "node_modules/@babel/helper-environment-visitor": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.5.tgz", - "integrity": "sha512-XGmhECfVA/5sAt+H+xpSg0mfrHq6FzNr9Oxh7PSEBBRUb/mL7Kz3NICXb194rCqAEdxkhPT1a88teizAFyvk8Q==", + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz", + "integrity": "sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-function-name": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.22.5.tgz", - "integrity": "sha512-wtHSq6jMRE3uF2otvfuD3DIvVhOsSNshQl0Qrd7qC9oQJzHvOL4qQXlQn2916+CXGywIjpGuIkoyZRRxHPiNQQ==", + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz", + "integrity": "sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw==", "dependencies": { - "@babel/template": "^7.22.5", - "@babel/types": "^7.22.5" + "@babel/template": "^7.22.15", + "@babel/types": "^7.23.0" }, "engines": { "node": ">=6.9.0" @@ -342,26 +350,26 @@ } }, "node_modules/@babel/helper-module-imports": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.22.5.tgz", - "integrity": "sha512-8Dl6+HD/cKifutF5qGd/8ZJi84QeAKh+CEe1sBzz8UayBBGg1dAIJrdHOcOM5b2MpzWL2yuotJTtGjETq0qjXg==", + "version": "7.22.15", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.22.15.tgz", + "integrity": "sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w==", "dependencies": { - "@babel/types": "^7.22.5" + "@babel/types": "^7.22.15" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.22.9", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.22.9.tgz", - "integrity": "sha512-t+WA2Xn5K+rTeGtC8jCsdAH52bjggG5TKRuRrAGNM/mjIbO4GxvlLMFOEz9wXY5I2XQ60PMFsAG2WIcG82dQMQ==", + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.23.0.tgz", + "integrity": "sha512-WhDWw1tdrlT0gMgUJSlX0IQvoO1eN279zrAUbVB+KpV2c3Tylz8+GnKOLllCS6Z/iZQEyVYxhZVUdPTqs2YYPw==", "dependencies": { - "@babel/helper-environment-visitor": "^7.22.5", - "@babel/helper-module-imports": "^7.22.5", + "@babel/helper-environment-visitor": "^7.22.20", + "@babel/helper-module-imports": "^7.22.15", "@babel/helper-simple-access": "^7.22.5", "@babel/helper-split-export-declaration": "^7.22.6", - "@babel/helper-validator-identifier": "^7.22.5" + "@babel/helper-validator-identifier": "^7.22.20" }, "engines": { "node": ">=6.9.0" @@ -401,40 +409,40 @@ } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz", - "integrity": "sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ==", + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz", + "integrity": "sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-option": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.22.5.tgz", - "integrity": "sha512-R3oB6xlIVKUnxNUxbmgq7pKjxpru24zlimpE8WK47fACIlM0II/Hm1RS8IaOI7NgCr6LNS+jl5l75m20npAziw==", + "version": "7.22.15", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.22.15.tgz", + "integrity": "sha512-bMn7RmyFjY/mdECUbgn9eoSY4vqvacUnS9i9vGAGttgFWesO6B4CYWA7XlpbWgBt71iv/hfbPlynohStqnu5hA==", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helpers": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.22.10.tgz", - "integrity": "sha512-a41J4NW8HyZa1I1vAndrraTlPZ/eZoga2ZgS7fEr0tZJGVU4xqdE80CEm0CcNjha5EZ8fTBYLKHF0kqDUuAwQw==", + "version": "7.23.1", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.23.1.tgz", + "integrity": "sha512-chNpneuK18yW5Oxsr+t553UZzzAs3aZnFm4bxhebsNTeshrC95yA7l5yl7GBAG+JG1rF0F7zzD2EixK9mWSDoA==", "dependencies": { - "@babel/template": "^7.22.5", - "@babel/traverse": "^7.22.10", - "@babel/types": "^7.22.10" + "@babel/template": "^7.22.15", + "@babel/traverse": "^7.23.0", + "@babel/types": "^7.23.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/highlight": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.22.10.tgz", - "integrity": "sha512-78aUtVcT7MUscr0K5mIEnkwxPE0MaxkR5RxRwuHaQ+JuU5AmTPhY+do2mdzVTnIJJpyBglql2pehuBIWHug+WQ==", + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.22.20.tgz", + "integrity": "sha512-dkdMCN3py0+ksCgYmGG8jKeGA/8Tk+gJwSYYlFGxG5lmhfKNoAy004YpLxpS1W2J8m/EK2Ew+yOs9pVRwO89mg==", "dependencies": { - "@babel/helper-validator-identifier": "^7.22.5", + "@babel/helper-validator-identifier": "^7.22.20", "chalk": "^2.4.2", "js-tokens": "^4.0.0" }, @@ -507,9 +515,9 @@ } }, "node_modules/@babel/parser": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.22.10.tgz", - "integrity": "sha512-lNbdGsQb9ekfsnjFGhEiF4hfFqGgfOP3H3d27re3n+CGhNuTSUEQdfWk556sTLNTloczcdM5TYF2LhzmDQKyvQ==", + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.23.0.tgz", + "integrity": "sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw==", "bin": { "parser": "bin/babel-parser.js" }, @@ -518,9 +526,9 @@ } }, "node_modules/@babel/runtime": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.22.10.tgz", - "integrity": "sha512-21t/fkKLMZI4pqP2wlmsQAWnYW1PDyKyyUV4vCi+B25ydmdaYTKXPwCj0BzSUnZf4seIiYvSA3jcZ3gdsMFkLQ==", + "version": "7.23.1", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.23.1.tgz", + "integrity": "sha512-hC2v6p8ZSI/W0HUzh3V8C5g+NwSKzKPtJwSpTjwl0o297GP9+ZLQSkdvHz46CM3LqyoXxq+5G9komY+eSqSO0g==", "dependencies": { "regenerator-runtime": "^0.14.0" }, @@ -529,31 +537,31 @@ } }, "node_modules/@babel/template": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.5.tgz", - "integrity": "sha512-X7yV7eiwAxdj9k94NEylvbVHLiVG1nvzCV2EAowhxLTwODV1jl9UzZ48leOC0sH7OnuHrIkllaBgneUykIcZaw==", + "version": "7.22.15", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.15.tgz", + "integrity": "sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w==", "dependencies": { - "@babel/code-frame": "^7.22.5", - "@babel/parser": "^7.22.5", - "@babel/types": "^7.22.5" + "@babel/code-frame": "^7.22.13", + "@babel/parser": "^7.22.15", + "@babel/types": "^7.22.15" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.22.10.tgz", - "integrity": "sha512-Q/urqV4pRByiNNpb/f5OSv28ZlGJiFiiTh+GAHktbIrkPhPbl90+uW6SmpoLyZqutrg9AEaEf3Q/ZBRHBXgxig==", + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.23.0.tgz", + "integrity": "sha512-t/QaEvyIoIkwzpiZ7aoSKK8kObQYeF7T2v+dazAYCb8SXtp58zEVkWW7zAnju8FNKNdr4ScAOEDmMItbyOmEYw==", "dependencies": { - "@babel/code-frame": "^7.22.10", - "@babel/generator": "^7.22.10", - "@babel/helper-environment-visitor": "^7.22.5", - "@babel/helper-function-name": "^7.22.5", + "@babel/code-frame": "^7.22.13", + "@babel/generator": "^7.23.0", + "@babel/helper-environment-visitor": "^7.22.20", + "@babel/helper-function-name": "^7.23.0", "@babel/helper-hoist-variables": "^7.22.5", "@babel/helper-split-export-declaration": "^7.22.6", - "@babel/parser": "^7.22.10", - "@babel/types": "^7.22.10", + "@babel/parser": "^7.23.0", + "@babel/types": "^7.23.0", "debug": "^4.1.0", "globals": "^11.1.0" }, @@ -562,12 +570,12 @@ } }, "node_modules/@babel/types": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.22.10.tgz", - "integrity": "sha512-obaoigiLrlDZ7TUQln/8m4mSqIW2QFeOrCQc9r+xsaHGNoplVNYlRVpsfE8Vj35GEm2ZH4ZhrNYogs/3fj85kg==", + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.23.0.tgz", + "integrity": "sha512-0oIyUfKoI3mSqMvsxBdclDwxXKXAUA8v/apZbc+iSyARYou1o8ZGDxbUYyLFoW2arqS2jDGqJuZvv1d/io1axg==", "dependencies": { "@babel/helper-string-parser": "^7.22.5", - "@babel/helper-validator-identifier": "^7.22.5", + "@babel/helper-validator-identifier": "^7.22.20", "to-fast-properties": "^2.0.0" }, "engines": { @@ -1038,28 +1046,28 @@ } }, "node_modules/@floating-ui/core": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.4.1.tgz", - "integrity": "sha512-jk3WqquEJRlcyu7997NtR5PibI+y5bi+LS3hPmguVClypenMsCY3CBa3LAQnozRCtCrYWSEtAdiskpamuJRFOQ==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.5.0.tgz", + "integrity": "sha512-kK1h4m36DQ0UHGj5Ah4db7R0rHemTqqO0QLvUqi1/mUUp3LuAWbWxdxSIf/XsnH9VS6rRVPLJCncjRzUvyCLXg==", "dependencies": { - "@floating-ui/utils": "^0.1.1" + "@floating-ui/utils": "^0.1.3" } }, "node_modules/@floating-ui/dom": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.5.1.tgz", - "integrity": "sha512-KwvVcPSXg6mQygvA1TjbN/gh///36kKtllIF8SUm0qpFj8+rvYrpvlYdL1JoA71SHpDqgSSdGOSoQ0Mp3uY5aw==", + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.5.3.tgz", + "integrity": "sha512-ClAbQnEqJAKCJOEbbLo5IUlZHkNszqhuxS4fHAVxRPXPya6Ysf2G8KypnYcOTpx6I8xcgF9bbHb6g/2KpbV8qA==", "dependencies": { - "@floating-ui/core": "^1.4.1", - "@floating-ui/utils": "^0.1.1" + "@floating-ui/core": "^1.4.2", + "@floating-ui/utils": "^0.1.3" } }, "node_modules/@floating-ui/react-dom": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.0.1.tgz", - "integrity": "sha512-rZtAmSht4Lry6gdhAJDrCp/6rKN7++JnL1/Anbr/DdeyYXQPxvg/ivrbYvJulbRf4vL8b212suwMM2lxbv+RQA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.0.2.tgz", + "integrity": "sha512-5qhlDvjaLmAst/rKb3VdlCinwTF4EYMiVxuuc/HVUjs46W0zgtbMmAZ1UTsDrRTxRmUEzl92mOtWbeeXL26lSQ==", "dependencies": { - "@floating-ui/dom": "^1.3.0" + "@floating-ui/dom": "^1.5.1" }, "peerDependencies": { "react": ">=16.8.0", @@ -1067,9 +1075,9 @@ } }, "node_modules/@floating-ui/utils": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.1.1.tgz", - "integrity": "sha512-m0G6wlnhm/AX0H12IOWtK8gASEMffnX08RtKkCgTdHb9JpHKGloI7icFfLg9ZmQeavcvR0PKmzxClyuFPSjKWw==" + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.1.4.tgz", + "integrity": "sha512-qprfWkn82Iw821mcKofJ5Pk9wgioHicxcQMxx+5zt5GSKoqdWvgG5AxVmpmUUjzTLPVSH5auBrhI93Deayn/DA==" }, "node_modules/@headlessui/react": { "version": "1.7.17", @@ -1157,18 +1165,17 @@ } }, "node_modules/@mui/base": { - "version": "5.0.0-beta.11", - "resolved": "https://registry.npmjs.org/@mui/base/-/base-5.0.0-beta.11.tgz", - "integrity": "sha512-FdKZGPd8qmC3ZNke7CNhzcEgToc02M6WYZc9hcBsNQ17bgAd3s9F//1bDDYgMVBYxDM71V0sv/hBHlOY4I1ZVA==", + "version": "5.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@mui/base/-/base-5.0.0-beta.17.tgz", + "integrity": "sha512-xNbk7iOXrglNdIxFBN0k3ySsPIFLWCnFxqsAYl7CIcDkD9low4kJ7IUuy6ctwx/HAy2fenrT3KXHr1sGjAMgpQ==", "dependencies": { - "@babel/runtime": "^7.22.6", - "@emotion/is-prop-valid": "^1.2.1", + "@babel/runtime": "^7.22.15", + "@floating-ui/react-dom": "^2.0.2", "@mui/types": "^7.2.4", - "@mui/utils": "^5.14.5", + "@mui/utils": "^5.14.11", "@popperjs/core": "^2.11.8", "clsx": "^2.0.0", - "prop-types": "^15.8.1", - "react-is": "^18.2.0" + "prop-types": "^15.8.1" }, "engines": { "node": ">=12.0.0" @@ -1197,25 +1204,25 @@ } }, "node_modules/@mui/core-downloads-tracker": { - "version": "5.14.5", - "resolved": "https://registry.npmjs.org/@mui/core-downloads-tracker/-/core-downloads-tracker-5.14.5.tgz", - "integrity": "sha512-+wpGH1USwPcKMFPMvXqYPC6fEvhxM3FzxC8lyDiNK/imLyyJ6y2DPb1Oue7OGIKJWBmYBqrWWtfovrxd1aJHTA==", + "version": "5.14.11", + "resolved": "https://registry.npmjs.org/@mui/core-downloads-tracker/-/core-downloads-tracker-5.14.11.tgz", + "integrity": "sha512-uY8FLQURhXe3f3O4dS5OSGML9KDm9+IE226cBu78jarVIzdQGPlXwGIlSI9VJR8MvZDA6C0+6XfWDhWCHruC5Q==", "funding": { "type": "opencollective", "url": "https://opencollective.com/mui" } }, "node_modules/@mui/material": { - "version": "5.14.5", - "resolved": "https://registry.npmjs.org/@mui/material/-/material-5.14.5.tgz", - "integrity": "sha512-4qa4GMfuZH0Ai3mttk5ccXP8a3sf7aPlAJwyMrUSz6h9hPri6BPou94zeu3rENhhmKLby9S/W1y+pmficy8JKA==", + "version": "5.14.11", + "resolved": "https://registry.npmjs.org/@mui/material/-/material-5.14.11.tgz", + "integrity": "sha512-DnSdJzcR7lwG12JA5L2t8JF+RDzMygu5rCNW+logWb/KW2/TRzwLyVWO+CorHTBjBRd38DBxnwOCDiYkDd+N3A==", "dependencies": { - "@babel/runtime": "^7.22.6", - "@mui/base": "5.0.0-beta.11", - "@mui/core-downloads-tracker": "^5.14.5", - "@mui/system": "^5.14.5", + "@babel/runtime": "^7.22.15", + "@mui/base": "5.0.0-beta.17", + "@mui/core-downloads-tracker": "^5.14.11", + "@mui/system": "^5.14.11", "@mui/types": "^7.2.4", - "@mui/utils": "^5.14.5", + "@mui/utils": "^5.14.11", "@types/react-transition-group": "^4.4.6", "clsx": "^2.0.0", "csstype": "^3.1.2", @@ -1258,12 +1265,12 @@ } }, "node_modules/@mui/private-theming": { - "version": "5.14.5", - "resolved": "https://registry.npmjs.org/@mui/private-theming/-/private-theming-5.14.5.tgz", - "integrity": "sha512-cC4C5RrpXpDaaZyH9QwmPhRLgz+f2SYbOty3cPkk4qPSOSfif2ZEcDD9HTENKDDd9deB+xkPKzzZhi8cxIx8Ig==", + "version": "5.14.11", + "resolved": "https://registry.npmjs.org/@mui/private-theming/-/private-theming-5.14.11.tgz", + "integrity": "sha512-MSnNNzTu9pfKLCKs1ZAKwOTgE4bz+fQA0fNr8Jm7NDmuWmw0CaN9Vq2/MHsatE7+S0A25IAKby46Uv1u53rKVQ==", "dependencies": { - "@babel/runtime": "^7.22.6", - "@mui/utils": "^5.14.5", + "@babel/runtime": "^7.22.15", + "@mui/utils": "^5.14.11", "prop-types": "^15.8.1" }, "engines": { @@ -1284,11 +1291,11 @@ } }, "node_modules/@mui/styled-engine": { - "version": "5.13.2", - "resolved": "https://registry.npmjs.org/@mui/styled-engine/-/styled-engine-5.13.2.tgz", - "integrity": "sha512-VCYCU6xVtXOrIN8lcbuPmoG+u7FYuOERG++fpY74hPpEWkyFQG97F+/XfTQVYzlR2m7nPjnwVUgATcTCMEaMvw==", + "version": "5.14.11", + "resolved": "https://registry.npmjs.org/@mui/styled-engine/-/styled-engine-5.14.11.tgz", + "integrity": "sha512-jdUlqRgTYQ8RMtPX4MbRZqar6W2OiIb6J5KEFbIu4FqvPrk44Each4ppg/LAqp1qNlBYq5i+7Q10MYLMpDxX9A==", "dependencies": { - "@babel/runtime": "^7.21.0", + "@babel/runtime": "^7.22.15", "@emotion/cache": "^11.11.0", "csstype": "^3.1.2", "prop-types": "^15.8.1" @@ -1315,15 +1322,15 @@ } }, "node_modules/@mui/system": { - "version": "5.14.5", - "resolved": "https://registry.npmjs.org/@mui/system/-/system-5.14.5.tgz", - "integrity": "sha512-mextXZHDeGcR7E1kx43TRARrVXy+gI4wzpUgNv7MqZs1dvTVXQGVeAT6ydj9d6FUqHBPMNLGV/21vJOrpqsL+w==", + "version": "5.14.11", + "resolved": "https://registry.npmjs.org/@mui/system/-/system-5.14.11.tgz", + "integrity": "sha512-yl8xV+y0k7j6dzBsHabKwoShmjqLa8kTxrhUI3JpqLG358VRVMJRW/ES0HhvfcCi4IVXde+Tc2P3K1akGL8zoA==", "dependencies": { - "@babel/runtime": "^7.22.6", - "@mui/private-theming": "^5.14.5", - "@mui/styled-engine": "^5.13.2", + "@babel/runtime": "^7.22.15", + "@mui/private-theming": "^5.14.11", + "@mui/styled-engine": "^5.14.11", "@mui/types": "^7.2.4", - "@mui/utils": "^5.14.5", + "@mui/utils": "^5.14.11", "clsx": "^2.0.0", "csstype": "^3.1.2", "prop-types": "^15.8.1" @@ -1375,13 +1382,12 @@ } }, "node_modules/@mui/utils": { - "version": "5.14.5", - "resolved": "https://registry.npmjs.org/@mui/utils/-/utils-5.14.5.tgz", - "integrity": "sha512-6Hzw63VR9C5xYv+CbjndoRLU6Gntal8rJ5W+GUzkyHrGWIyYPWZPa6AevnyGioySNETATe1H9oXS8f/7qgIHJA==", + "version": "5.14.11", + "resolved": "https://registry.npmjs.org/@mui/utils/-/utils-5.14.11.tgz", + "integrity": "sha512-fmkIiCPKyDssYrJ5qk+dime1nlO3dmWfCtaPY/uVBqCRMBZ11JhddB9m8sjI2mgqQQwRJG5bq3biaosNdU/s4Q==", "dependencies": { - "@babel/runtime": "^7.22.6", + "@babel/runtime": "^7.22.15", "@types/prop-types": "^15.7.5", - "@types/react-is": "^18.2.1", "prop-types": "^15.8.1", "react-is": "^18.2.0" }, @@ -1393,7 +1399,13 @@ "url": "https://opencollective.com/mui" }, "peerDependencies": { + "@types/react": "^17.0.0 || ^18.0.0", "react": "^17.0.0 || ^18.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } } }, "node_modules/@nodelib/fs.scandir": { @@ -1428,6 +1440,21 @@ "node": ">= 8" } }, + "node_modules/@playwright/test": { + "version": "1.38.1", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.38.1.tgz", + "integrity": "sha512-NqRp8XMwj3AK+zKLbZShl0r/9wKgzqI/527bkptKXomtuo+dOjU9NdMASQ8DNC9z9zLOMbG53T4eihYr3XR+BQ==", + "dev": true, + "dependencies": { + "playwright": "1.38.1" + }, + "bin": { + "playwright": "cli.js" + }, + "engines": { + "node": ">=16" + } + }, "node_modules/@popperjs/core": { "version": "2.11.8", "resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.8.tgz", @@ -1628,19 +1655,19 @@ } }, "node_modules/@radix-ui/react-dialog": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.0.4.tgz", - "integrity": "sha512-hJtRy/jPULGQZceSAP2Re6/4NpKo8im6V8P2hUqZsdFiSL8l35kYsw3qbRI6Ay5mQd2+wlLqje770eq+RJ3yZg==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.0.5.tgz", + "integrity": "sha512-GjWJX/AUpB703eEBanuBnIWdIXg6NvJFCXcNlSZk4xdszCdhrJgBoUd1cGk67vFO+WdA2pfI/plOpqz/5GUP6Q==", "dependencies": { "@babel/runtime": "^7.13.10", "@radix-ui/primitive": "1.0.1", "@radix-ui/react-compose-refs": "1.0.1", "@radix-ui/react-context": "1.0.1", - "@radix-ui/react-dismissable-layer": "1.0.4", + "@radix-ui/react-dismissable-layer": "1.0.5", "@radix-ui/react-focus-guards": "1.0.1", - "@radix-ui/react-focus-scope": "1.0.3", + "@radix-ui/react-focus-scope": "1.0.4", "@radix-ui/react-id": "1.0.1", - "@radix-ui/react-portal": "1.0.3", + "@radix-ui/react-portal": "1.0.4", "@radix-ui/react-presence": "1.0.1", "@radix-ui/react-primitive": "1.0.3", "@radix-ui/react-slot": "1.0.2", @@ -1681,9 +1708,9 @@ } }, "node_modules/@radix-ui/react-dismissable-layer": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.4.tgz", - "integrity": "sha512-7UpBa/RKMoHJYjie1gkF1DlK8l1fdU/VKDpoS3rCCo8YBJR294GwcEHyxHw72yvphJ7ld0AXEcSLAzY2F/WyCg==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.5.tgz", + "integrity": "sha512-aJeDjQhywg9LBu2t/At58hCvr7pEm0o2Ke1x33B+MhjNmmZ17sy4KImo0KPLgsnc/zN7GPdce8Cnn0SWvwZO7g==", "dependencies": { "@babel/runtime": "^7.13.10", "@radix-ui/primitive": "1.0.1", @@ -1708,16 +1735,16 @@ } }, "node_modules/@radix-ui/react-dropdown-menu": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dropdown-menu/-/react-dropdown-menu-2.0.5.tgz", - "integrity": "sha512-xdOrZzOTocqqkCkYo8yRPCib5OkTkqN7lqNCdxwPOdE466DOaNl4N8PkUIlsXthQvW5Wwkd+aEmWpfWlBoDPEw==", + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dropdown-menu/-/react-dropdown-menu-2.0.6.tgz", + "integrity": "sha512-i6TuFOoWmLWq+M/eCLGd/bQ2HfAX1RJgvrBQ6AQLmzfvsLdefxbWu8G9zczcPFfcSPehz9GcpF6K9QYreFV8hA==", "dependencies": { "@babel/runtime": "^7.13.10", "@radix-ui/primitive": "1.0.1", "@radix-ui/react-compose-refs": "1.0.1", "@radix-ui/react-context": "1.0.1", "@radix-ui/react-id": "1.0.1", - "@radix-ui/react-menu": "2.0.5", + "@radix-ui/react-menu": "2.0.6", "@radix-ui/react-primitive": "1.0.3", "@radix-ui/react-use-controllable-state": "1.0.1" }, @@ -1754,9 +1781,9 @@ } }, "node_modules/@radix-ui/react-focus-scope": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.0.3.tgz", - "integrity": "sha512-upXdPfqI4islj2CslyfUBNlaJCPybbqRHAi1KER7Isel9Q2AtSJ0zRBZv8mWQiFXD2nyAJ4BhC3yXgZ6kMBSrQ==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.0.4.tgz", + "integrity": "sha512-sL04Mgvf+FmyvZeYfNu1EPAaaxD+aw7cYeIB9L9Fvq8+urhltTRaEo5ysKOpHuKPclsZcSUMKlN05x4u+CINpA==", "dependencies": { "@babel/runtime": "^7.13.10", "@radix-ui/react-compose-refs": "1.0.1", @@ -1856,9 +1883,9 @@ } }, "node_modules/@radix-ui/react-menu": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-2.0.5.tgz", - "integrity": "sha512-Gw4f9pwdH+w5w+49k0gLjN0PfRDHvxmAgG16AbyJZ7zhwZ6PBHKtWohvnSwfusfnK3L68dpBREHpVkj8wEM7ZA==", + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-2.0.6.tgz", + "integrity": "sha512-BVkFLS+bUC8HcImkRKPSiVumA1VPOOEC5WBMiT+QAVsPzW1FJzI9KnqgGxVDPBcql5xXrHkD3JOVoXWEXD8SYA==", "dependencies": { "@babel/runtime": "^7.13.10", "@radix-ui/primitive": "1.0.1", @@ -1866,12 +1893,12 @@ "@radix-ui/react-compose-refs": "1.0.1", "@radix-ui/react-context": "1.0.1", "@radix-ui/react-direction": "1.0.1", - "@radix-ui/react-dismissable-layer": "1.0.4", + "@radix-ui/react-dismissable-layer": "1.0.5", "@radix-ui/react-focus-guards": "1.0.1", - "@radix-ui/react-focus-scope": "1.0.3", + "@radix-ui/react-focus-scope": "1.0.4", "@radix-ui/react-id": "1.0.1", - "@radix-ui/react-popper": "1.1.2", - "@radix-ui/react-portal": "1.0.3", + "@radix-ui/react-popper": "1.1.3", + "@radix-ui/react-portal": "1.0.4", "@radix-ui/react-presence": "1.0.1", "@radix-ui/react-primitive": "1.0.3", "@radix-ui/react-roving-focus": "1.0.4", @@ -1896,9 +1923,9 @@ } }, "node_modules/@radix-ui/react-menubar": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-menubar/-/react-menubar-1.0.3.tgz", - "integrity": "sha512-GqjdxzYCjjKhcgEODDP8SrYfbWNh/Hm3lyuFkP5Q5IbX0QfXklLF1o1AqA3oTV2kulUgN/kOZVS92hIIShEgpA==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-menubar/-/react-menubar-1.0.4.tgz", + "integrity": "sha512-bHgUo9gayKZfaQcWSSLr++LyS0rgh+MvD89DE4fJ6TkGHvjHgPaBZf44hdka7ogOxIOdj9163J+5xL2Dn4qzzg==", "dependencies": { "@babel/runtime": "^7.13.10", "@radix-ui/primitive": "1.0.1", @@ -1907,7 +1934,7 @@ "@radix-ui/react-context": "1.0.1", "@radix-ui/react-direction": "1.0.1", "@radix-ui/react-id": "1.0.1", - "@radix-ui/react-menu": "2.0.5", + "@radix-ui/react-menu": "2.0.6", "@radix-ui/react-primitive": "1.0.3", "@radix-ui/react-roving-focus": "1.0.4", "@radix-ui/react-use-controllable-state": "1.0.1" @@ -1928,20 +1955,20 @@ } }, "node_modules/@radix-ui/react-popover": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-popover/-/react-popover-1.0.6.tgz", - "integrity": "sha512-cZ4defGpkZ0qTRtlIBzJLSzL6ht7ofhhW4i1+pkemjV1IKXm0wgCRnee154qlV6r9Ttunmh2TNZhMfV2bavUyA==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-popover/-/react-popover-1.0.7.tgz", + "integrity": "sha512-shtvVnlsxT6faMnK/a7n0wptwBD23xc1Z5mdrtKLwVEfsEMXodS0r5s0/g5P0hX//EKYZS2sxUjqfzlg52ZSnQ==", "dependencies": { "@babel/runtime": "^7.13.10", "@radix-ui/primitive": "1.0.1", "@radix-ui/react-compose-refs": "1.0.1", "@radix-ui/react-context": "1.0.1", - "@radix-ui/react-dismissable-layer": "1.0.4", + "@radix-ui/react-dismissable-layer": "1.0.5", "@radix-ui/react-focus-guards": "1.0.1", - "@radix-ui/react-focus-scope": "1.0.3", + "@radix-ui/react-focus-scope": "1.0.4", "@radix-ui/react-id": "1.0.1", - "@radix-ui/react-popper": "1.1.2", - "@radix-ui/react-portal": "1.0.3", + "@radix-ui/react-popper": "1.1.3", + "@radix-ui/react-portal": "1.0.4", "@radix-ui/react-presence": "1.0.1", "@radix-ui/react-primitive": "1.0.3", "@radix-ui/react-slot": "1.0.2", @@ -1965,9 +1992,9 @@ } }, "node_modules/@radix-ui/react-popper": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.1.2.tgz", - "integrity": "sha512-1CnGGfFi/bbqtJZZ0P/NQY20xdG3E0LALJaLUEoKwPLwl6PPPfbeiCqMVQnhoFRAxjJj4RpBRJzDmUgsex2tSg==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.1.3.tgz", + "integrity": "sha512-cKpopj/5RHZWjrbF2846jBNacjQVwkP068DfmgrNJXpvVWrOvlAmE9xSiy5OqeE+Gi8D9fP+oDhUnPqNMY8/5w==", "dependencies": { "@babel/runtime": "^7.13.10", "@floating-ui/react-dom": "^2.0.0", @@ -1997,9 +2024,9 @@ } }, "node_modules/@radix-ui/react-portal": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.0.3.tgz", - "integrity": "sha512-xLYZeHrWoPmA5mEKEfZZevoVRK/Q43GfzRXkWV6qawIWWK8t6ifIiLQdd7rmQ4Vk1bmI21XhqF9BN3jWf+phpA==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.0.4.tgz", + "integrity": "sha512-Qki+C/EuGUVCQTOTD5vzJzJuMUlewbzuKyUy+/iHM2uwGiru9gZeBJtHAPKAEkB5KWGi9mP/CHKcY0wt1aW45Q==", "dependencies": { "@babel/runtime": "^7.13.10", "@radix-ui/react-primitive": "1.0.3" @@ -2164,6 +2191,113 @@ } } }, + "node_modules/@radix-ui/react-select/node_modules/@radix-ui/react-dismissable-layer": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.4.tgz", + "integrity": "sha512-7UpBa/RKMoHJYjie1gkF1DlK8l1fdU/VKDpoS3rCCo8YBJR294GwcEHyxHw72yvphJ7ld0AXEcSLAzY2F/WyCg==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/primitive": "1.0.1", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-use-callback-ref": "1.0.1", + "@radix-ui/react-use-escape-keydown": "1.0.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-select/node_modules/@radix-ui/react-focus-scope": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.0.3.tgz", + "integrity": "sha512-upXdPfqI4islj2CslyfUBNlaJCPybbqRHAi1KER7Isel9Q2AtSJ0zRBZv8mWQiFXD2nyAJ4BhC3yXgZ6kMBSrQ==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-use-callback-ref": "1.0.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-select/node_modules/@radix-ui/react-popper": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.1.2.tgz", + "integrity": "sha512-1CnGGfFi/bbqtJZZ0P/NQY20xdG3E0LALJaLUEoKwPLwl6PPPfbeiCqMVQnhoFRAxjJj4RpBRJzDmUgsex2tSg==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@floating-ui/react-dom": "^2.0.0", + "@radix-ui/react-arrow": "1.0.3", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-context": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-use-callback-ref": "1.0.1", + "@radix-ui/react-use-layout-effect": "1.0.1", + "@radix-ui/react-use-rect": "1.0.1", + "@radix-ui/react-use-size": "1.0.1", + "@radix-ui/rect": "1.0.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-select/node_modules/@radix-ui/react-portal": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.0.3.tgz", + "integrity": "sha512-xLYZeHrWoPmA5mEKEfZZevoVRK/Q43GfzRXkWV6qawIWWK8t6ifIiLQdd7rmQ4Vk1bmI21XhqF9BN3jWf+phpA==", + "dependencies": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-primitive": "1.0.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0", + "react-dom": "^16.8 || ^17.0 || ^18.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, "node_modules/@radix-ui/react-separator": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/@radix-ui/react-separator/-/react-separator-1.0.3.tgz", @@ -2265,18 +2399,18 @@ } }, "node_modules/@radix-ui/react-tooltip": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.0.6.tgz", - "integrity": "sha512-DmNFOiwEc2UDigsYj6clJENma58OelxD24O4IODoZ+3sQc3Zb+L8w1EP+y9laTuKCLAysPw4fD6/v0j4KNV8rg==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.0.7.tgz", + "integrity": "sha512-lPh5iKNFVQ/jav/j6ZrWq3blfDJ0OH9R6FlNUHPMqdLuQ9vwDgFsRxvl8b7Asuy5c8xmoojHUxKHQSOAvMHxyw==", "dependencies": { "@babel/runtime": "^7.13.10", "@radix-ui/primitive": "1.0.1", "@radix-ui/react-compose-refs": "1.0.1", "@radix-ui/react-context": "1.0.1", - "@radix-ui/react-dismissable-layer": "1.0.4", + "@radix-ui/react-dismissable-layer": "1.0.5", "@radix-ui/react-id": "1.0.1", - "@radix-ui/react-popper": "1.1.2", - "@radix-ui/react-portal": "1.0.3", + "@radix-ui/react-popper": "1.1.3", + "@radix-ui/react-portal": "1.0.4", "@radix-ui/react-presence": "1.0.1", "@radix-ui/react-primitive": "1.0.3", "@radix-ui/react-slot": "1.0.2", @@ -2453,11 +2587,11 @@ } }, "node_modules/@reactflow/background": { - "version": "11.2.7", - "resolved": "https://registry.npmjs.org/@reactflow/background/-/background-11.2.7.tgz", - "integrity": "sha512-cI9nE4B/1BCASbFpCsyjy45qOWBCnog0rsDqI0Vrv41rDj4MVYHZ2ZegyvgdPPQluoGB9fwwVtRQA4m1I9RSSw==", + "version": "11.3.1", + "resolved": "https://registry.npmjs.org/@reactflow/background/-/background-11.3.1.tgz", + "integrity": "sha512-FjYHXvzwwaCJJS5rRxXbU4r8xbAqEvLQkO1l4czjWgrE0VQELC93RKVI4lRpkv2RkkIwRNMvoGb6kXNbJWiU/g==", "dependencies": { - "@reactflow/core": "11.8.2", + "@reactflow/core": "11.9.1", "classcat": "^5.0.3", "zustand": "^4.4.1" }, @@ -2467,11 +2601,11 @@ } }, "node_modules/@reactflow/controls": { - "version": "11.1.18", - "resolved": "https://registry.npmjs.org/@reactflow/controls/-/controls-11.1.18.tgz", - "integrity": "sha512-sBfS7mZyyEzJNhCu0Zg4PDSoYI6XzWybyVRE0n2YKTwpXxE2Yy7eag35mloHjbDV+knNZRjsXl87bALmq08AoA==", + "version": "11.2.1", + "resolved": "https://registry.npmjs.org/@reactflow/controls/-/controls-11.2.1.tgz", + "integrity": "sha512-mW8kz/J77tc5uRwSTSUH1OtkksaVc52Dbz7aTuF0aKZXwPHoAfRPFeet2VLQV1IdKeSab82Mqy0WTSJj6Hl15w==", "dependencies": { - "@reactflow/core": "11.8.2", + "@reactflow/core": "11.9.1", "classcat": "^5.0.3", "zustand": "^4.4.1" }, @@ -2481,9 +2615,9 @@ } }, "node_modules/@reactflow/core": { - "version": "11.8.2", - "resolved": "https://registry.npmjs.org/@reactflow/core/-/core-11.8.2.tgz", - "integrity": "sha512-Wywa5jDlojorOkET+PxLqlFnIt0PrnbYqrfQEu4I9+ZWII0TWp84YPxF5j63Cd+92vTWAzgFrVMVNwda6nU/rg==", + "version": "11.9.1", + "resolved": "https://registry.npmjs.org/@reactflow/core/-/core-11.9.1.tgz", + "integrity": "sha512-qXWwB2KSWkp3MH3fhY1HL4yFBvMZ+7y9mkfk0we7wCuGNgQzWVJDwvVEpPTkyp5xyxX53sSF3UDC+UaFCB4ElA==", "dependencies": { "@types/d3": "^7.4.0", "@types/d3-drag": "^3.0.1", @@ -2501,11 +2635,11 @@ } }, "node_modules/@reactflow/minimap": { - "version": "11.6.2", - "resolved": "https://registry.npmjs.org/@reactflow/minimap/-/minimap-11.6.2.tgz", - "integrity": "sha512-rJvIDv6NkfNgHEEr+znGVo5gVhgkXNImzo5FNCKenMQI4x8BSGxQGaADfr+f/3KOmMq9PpvjEHsrLqB0VdPaCg==", + "version": "11.7.1", + "resolved": "https://registry.npmjs.org/@reactflow/minimap/-/minimap-11.7.1.tgz", + "integrity": "sha512-ZNHx1wURS055kYYaiFhrmGYI2UXA7Cwcjxfww4Mq3PWvIIaEFL9t34aUWoXCYm+NPFHElwC8jM7Vyp7xDR9oyg==", "dependencies": { - "@reactflow/core": "11.8.2", + "@reactflow/core": "11.9.1", "@types/d3-selection": "^3.0.3", "@types/d3-zoom": "^3.0.1", "classcat": "^5.0.3", @@ -2519,11 +2653,11 @@ } }, "node_modules/@reactflow/node-resizer": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/@reactflow/node-resizer/-/node-resizer-2.1.4.tgz", - "integrity": "sha512-9oNnNvlylnsyBBvZQITw1MyMZfYWHhu+GkqboIsrgOdaYwURkuRfhMFaQb+j0rCSjoq6AhbJbGWkZiuonAgELA==", + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/@reactflow/node-resizer/-/node-resizer-2.2.1.tgz", + "integrity": "sha512-nUh73WHVAYsJLvgSKqKAxIPU9vUkEM5iold2zQsEOHqjfnk86m1r33fTEiw5+MPqCFHMoGzyZe2gxXUMFqg/3A==", "dependencies": { - "@reactflow/core": "11.8.2", + "@reactflow/core": "11.9.1", "classcat": "^5.0.4", "d3-drag": "^3.0.0", "d3-selection": "^3.0.0", @@ -2535,11 +2669,11 @@ } }, "node_modules/@reactflow/node-toolbar": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/@reactflow/node-toolbar/-/node-toolbar-1.2.6.tgz", - "integrity": "sha512-f+zKVS30XOQStXj+Ry7Yf4FlIB1XWHhrRNG2f2pNUH6FTpDBOvbxiIsQh99LH9vykMJ54fRJwbAjTg24x0UnyA==", + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/@reactflow/node-toolbar/-/node-toolbar-1.3.1.tgz", + "integrity": "sha512-YkrpB2+/9ymZ2h1nmuKytmrFlIXdiDzeDqmZnyJF2UNH/4sSo/GswPJmqiA8ETOP2oEdIkNmO9dwVpiDvWtZ5A==", "dependencies": { - "@reactflow/core": "11.8.2", + "@reactflow/core": "11.9.1", "classcat": "^5.0.3", "zustand": "^4.4.1" }, @@ -2549,17 +2683,17 @@ } }, "node_modules/@remix-run/router": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.8.0.tgz", - "integrity": "sha512-mrfKqIHnSZRyIzBcanNJmVQELTnX+qagEDlcKO90RgRBVOZGSGvZKeDihTRfWcqoDn5N/NkUcwWTccnpN18Tfg==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.9.0.tgz", + "integrity": "sha512-bV63itrKBC0zdT27qYm6SDZHlkXwFL1xMBuhkn+X7l0+IIhNaH5wuuvZKp6eKhCD4KFhujhfhCT1YxXW6esUIA==", "engines": { "node": ">=14.0.0" } }, "node_modules/@rollup/pluginutils": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.0.3.tgz", - "integrity": "sha512-hfllNN4a80rwNQ9QCxhxuHCGHMAvabXqxNdaChUSSadMre7t4iEUI6fFAhBOn/eIYTgYVhBv7vCLsAJ4u3lf3g==", + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.0.4.tgz", + "integrity": "sha512-0KJnIoRI8A+a1dqOYLxH8vBf8bphDmty5QvIm2hqm7oFCFYKCAZWWd2hXgMibaPsNDhI0AtpYfQZJG47pt/k4g==", "dependencies": { "@types/estree": "^1.0.0", "estree-walker": "^2.0.2", @@ -2590,9 +2724,9 @@ } }, "node_modules/@svgr/babel-plugin-add-jsx-attribute": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-7.0.0.tgz", - "integrity": "sha512-khWbXesWIP9v8HuKCl2NU2HNAyqpSQ/vkIl36Nbn4HIwEYSRWL0H7Gs6idJdha2DkpFDWlsqMELvoCE8lfFY6Q==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-8.0.0.tgz", + "integrity": "sha512-b9MIk7yhdS1pMCZM8VeNfUlSKVRhsHZNMl5O9SfaX0l0t5wjdgu4IDzGB8bpnGBBOjGST3rRFVsaaEtI4W6f7g==", "engines": { "node": ">=14" }, @@ -2605,9 +2739,9 @@ } }, "node_modules/@svgr/babel-plugin-remove-jsx-attribute": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-7.0.0.tgz", - "integrity": "sha512-iiZaIvb3H/c7d3TH2HBeK91uI2rMhZNwnsIrvd7ZwGLkFw6mmunOCoVnjdYua662MqGFxlN9xTq4fv9hgR4VXQ==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-8.0.0.tgz", + "integrity": "sha512-BcCkm/STipKvbCl6b7QFrMh/vx00vIP63k2eM66MfHJzPr6O2U0jYEViXkHJWqXqQYjdeA9cuCl5KWmlwjDvbA==", "engines": { "node": ">=14" }, @@ -2620,9 +2754,9 @@ } }, "node_modules/@svgr/babel-plugin-remove-jsx-empty-expression": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-7.0.0.tgz", - "integrity": "sha512-sQQmyo+qegBx8DfFc04PFmIO1FP1MHI1/QEpzcIcclo5OAISsOJPW76ZIs0bDyO/DBSJEa/tDa1W26pVtt0FRw==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-8.0.0.tgz", + "integrity": "sha512-5BcGCBfBxB5+XSDSWnhTThfI9jcO5f0Ai2V24gZpG+wXF14BzwxxdDb4g6trdOux0rhibGs385BeFMSmxtS3uA==", "engines": { "node": ">=14" }, @@ -2635,9 +2769,9 @@ } }, "node_modules/@svgr/babel-plugin-replace-jsx-attribute-value": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-7.0.0.tgz", - "integrity": "sha512-i6MaAqIZXDOJeikJuzocByBf8zO+meLwfQ/qMHIjCcvpnfvWf82PFvredEZElErB5glQFJa2KVKk8N2xV6tRRA==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-8.0.0.tgz", + "integrity": "sha512-KVQ+PtIjb1BuYT3ht8M5KbzWBhdAjjUPdlMtpuw/VjT8coTrItWX6Qafl9+ji831JaJcu6PJNKCV0bp01lBNzQ==", "engines": { "node": ">=14" }, @@ -2650,9 +2784,9 @@ } }, "node_modules/@svgr/babel-plugin-svg-dynamic-title": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-7.0.0.tgz", - "integrity": "sha512-BoVSh6ge3SLLpKC0pmmN9DFlqgFy4NxNgdZNLPNJWBUU7TQpDWeBuyVuDW88iXydb5Cv0ReC+ffa5h3VrKfk1w==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-8.0.0.tgz", + "integrity": "sha512-omNiKqwjNmOQJ2v6ge4SErBbkooV2aAWwaPFs2vUY7p7GhVkzRkJ00kILXQvRhA6miHnNpXv7MRnnSjdRjK8og==", "engines": { "node": ">=14" }, @@ -2665,9 +2799,9 @@ } }, "node_modules/@svgr/babel-plugin-svg-em-dimensions": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-7.0.0.tgz", - "integrity": "sha512-tNDcBa+hYn0gO+GkP/AuNKdVtMufVhU9fdzu+vUQsR18RIJ9RWe7h/pSBY338RO08wArntwbDk5WhQBmhf2PaA==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-8.0.0.tgz", + "integrity": "sha512-mURHYnu6Iw3UBTbhGwE/vsngtCIbHE43xCRK7kCw4t01xyGqb2Pd+WXekRRoFOBIY29ZoOhUCTEweDMdrjfi9g==", "engines": { "node": ">=14" }, @@ -2680,9 +2814,9 @@ } }, "node_modules/@svgr/babel-plugin-transform-react-native-svg": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-7.0.0.tgz", - "integrity": "sha512-qw54u8ljCJYL2KtBOjI5z7Nzg8LnSvQOP5hPKj77H4VQL4+HdKbAT5pnkkZLmHKYwzsIHSYKXxHouD8zZamCFQ==", + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-8.1.0.tgz", + "integrity": "sha512-Tx8T58CHo+7nwJ+EhUwx3LfdNSG9R2OKfaIXXs5soiy5HtgoAEkDay9LIimLOcG8dJQH1wPZp/cnAv6S9CrR1Q==", "engines": { "node": ">=14" }, @@ -2695,9 +2829,9 @@ } }, "node_modules/@svgr/babel-plugin-transform-svg-component": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-7.0.0.tgz", - "integrity": "sha512-CcFECkDj98daOg9jE3Bh3uyD9kzevCAnZ+UtzG6+BQG/jOQ2OA3jHnX6iG4G1MCJkUQFnUvEv33NvQfqrb/F3A==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-8.0.0.tgz", + "integrity": "sha512-DFx8xa3cZXTdb/k3kfPeaixecQLgKh5NVBMwD0AQxOzcZawK4oo1Jh9LbrcACUivsCA7TLG8eeWgrDXjTMhRmw==", "engines": { "node": ">=12" }, @@ -2710,18 +2844,18 @@ } }, "node_modules/@svgr/babel-preset": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-preset/-/babel-preset-7.0.0.tgz", - "integrity": "sha512-EX/NHeFa30j5UjldQGVQikuuQNHUdGmbh9kEpBKofGUtF0GUPJ4T4rhoYiqDAOmBOxojyot36JIFiDUHUK1ilQ==", + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-preset/-/babel-preset-8.1.0.tgz", + "integrity": "sha512-7EYDbHE7MxHpv4sxvnVPngw5fuR6pw79SkcrILHJ/iMpuKySNCl5W1qcwPEpU+LgyRXOaAFgH0KhwD18wwg6ug==", "dependencies": { - "@svgr/babel-plugin-add-jsx-attribute": "^7.0.0", - "@svgr/babel-plugin-remove-jsx-attribute": "^7.0.0", - "@svgr/babel-plugin-remove-jsx-empty-expression": "^7.0.0", - "@svgr/babel-plugin-replace-jsx-attribute-value": "^7.0.0", - "@svgr/babel-plugin-svg-dynamic-title": "^7.0.0", - "@svgr/babel-plugin-svg-em-dimensions": "^7.0.0", - "@svgr/babel-plugin-transform-react-native-svg": "^7.0.0", - "@svgr/babel-plugin-transform-svg-component": "^7.0.0" + "@svgr/babel-plugin-add-jsx-attribute": "8.0.0", + "@svgr/babel-plugin-remove-jsx-attribute": "8.0.0", + "@svgr/babel-plugin-remove-jsx-empty-expression": "8.0.0", + "@svgr/babel-plugin-replace-jsx-attribute-value": "8.0.0", + "@svgr/babel-plugin-svg-dynamic-title": "8.0.0", + "@svgr/babel-plugin-svg-em-dimensions": "8.0.0", + "@svgr/babel-plugin-transform-react-native-svg": "8.1.0", + "@svgr/babel-plugin-transform-svg-component": "8.0.0" }, "engines": { "node": ">=14" @@ -2735,14 +2869,15 @@ } }, "node_modules/@svgr/core": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/core/-/core-7.0.0.tgz", - "integrity": "sha512-ztAoxkaKhRVloa3XydohgQQCb0/8x9T63yXovpmHzKMkHO6pkjdsIAWKOS4bE95P/2quVh1NtjSKlMRNzSBffw==", + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@svgr/core/-/core-8.1.0.tgz", + "integrity": "sha512-8QqtOQT5ACVlmsvKOJNEaWmRPmcojMOzCz4Hs2BGG/toAp/K38LcsMRyLp349glq5AzJbCEeimEoxaX6v/fLrA==", "dependencies": { "@babel/core": "^7.21.3", - "@svgr/babel-preset": "^7.0.0", + "@svgr/babel-preset": "8.1.0", "camelcase": "^6.2.0", - "cosmiconfig": "^8.1.3" + "cosmiconfig": "^8.1.3", + "snake-case": "^3.0.4" }, "engines": { "node": ">=14" @@ -2753,13 +2888,13 @@ } }, "node_modules/@svgr/core/node_modules/cosmiconfig": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.2.0.tgz", - "integrity": "sha512-3rTMnFJA1tCOPwRxtgF4wd7Ab2qvDbL8jX+3smjIbS4HlZBagTlpERbdN7iAbWlrfxE3M8c27kTwTawQ7st+OQ==", + "version": "8.3.6", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", + "integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==", "dependencies": { - "import-fresh": "^3.2.1", + "import-fresh": "^3.3.0", "js-yaml": "^4.1.0", - "parse-json": "^5.0.0", + "parse-json": "^5.2.0", "path-type": "^4.0.0" }, "engines": { @@ -2767,12 +2902,20 @@ }, "funding": { "url": "https://github.com/sponsors/d-fischer" + }, + "peerDependencies": { + "typescript": ">=4.9.5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } } }, "node_modules/@svgr/hast-util-to-babel-ast": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-7.0.0.tgz", - "integrity": "sha512-42Ej9sDDEmsJKjrfQ1PHmiDiHagh/u9AHO9QWbeNx4KmD9yS5d1XHmXUNINfUcykAU+4431Cn+k6Vn5mWBYimQ==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-8.0.0.tgz", + "integrity": "sha512-EbDKwO9GpfWP4jN9sGdYwPBU0kdomaPIL2Eu4YwmgP+sJeXT+L7bMwJUBnhzfH8Q2qMBqZ4fJwpCyYsAN3mt2Q==", "dependencies": { "@babel/types": "^7.21.3", "entities": "^4.4.0" @@ -2797,13 +2940,13 @@ } }, "node_modules/@svgr/plugin-jsx": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/plugin-jsx/-/plugin-jsx-7.0.0.tgz", - "integrity": "sha512-SWlTpPQmBUtLKxXWgpv8syzqIU8XgFRvyhfkam2So8b3BE0OS0HPe5UfmlJ2KIC+a7dpuuYovPR2WAQuSyMoPw==", + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@svgr/plugin-jsx/-/plugin-jsx-8.1.0.tgz", + "integrity": "sha512-0xiIyBsLlr8quN+WyuxooNW9RJ0Dpr8uOnH/xrCVO8GLUcwHISwj1AG0k+LFzteTkAA0GbX0kj9q6Dk70PTiPA==", "dependencies": { "@babel/core": "^7.21.3", - "@svgr/babel-preset": "^7.0.0", - "@svgr/hast-util-to-babel-ast": "^7.0.0", + "@svgr/babel-preset": "8.1.0", + "@svgr/hast-util-to-babel-ast": "8.0.0", "svg-parser": "^2.0.4" }, "engines": { @@ -2812,6 +2955,9 @@ "funding": { "type": "github", "url": "https://github.com/sponsors/gregberge" + }, + "peerDependencies": { + "@svgr/core": "*" } }, "node_modules/@swc/cli": { @@ -2855,11 +3001,15 @@ } }, "node_modules/@swc/core": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.3.77.tgz", - "integrity": "sha512-CiLD2NGTdhE8JnWFHeRAglaCAcvwOxvpeWNtCIT261GrxTKCXHPAn4eqIWiBzXnwWDmZ6XdyrCL4/GmPESNnrg==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.3.89.tgz", + "integrity": "sha512-+FchWateF57g50ChX6++QQDwgVd6iWZX5HA6m9LRIdJIB56bIqbwRQDwVL3Q8Rlbry4kmw+RxiOW2FjAx9mQOQ==", "dev": true, "hasInstallScript": true, + "dependencies": { + "@swc/counter": "^0.1.1", + "@swc/types": "^0.1.5" + }, "engines": { "node": ">=10" }, @@ -2868,16 +3018,16 @@ "url": "https://opencollective.com/swc" }, "optionalDependencies": { - "@swc/core-darwin-arm64": "1.3.77", - "@swc/core-darwin-x64": "1.3.77", - "@swc/core-linux-arm-gnueabihf": "1.3.77", - "@swc/core-linux-arm64-gnu": "1.3.77", - "@swc/core-linux-arm64-musl": "1.3.77", - "@swc/core-linux-x64-gnu": "1.3.77", - "@swc/core-linux-x64-musl": "1.3.77", - "@swc/core-win32-arm64-msvc": "1.3.77", - "@swc/core-win32-ia32-msvc": "1.3.77", - "@swc/core-win32-x64-msvc": "1.3.77" + "@swc/core-darwin-arm64": "1.3.89", + "@swc/core-darwin-x64": "1.3.89", + "@swc/core-linux-arm-gnueabihf": "1.3.89", + "@swc/core-linux-arm64-gnu": "1.3.89", + "@swc/core-linux-arm64-musl": "1.3.89", + "@swc/core-linux-x64-gnu": "1.3.89", + "@swc/core-linux-x64-musl": "1.3.89", + "@swc/core-win32-arm64-msvc": "1.3.89", + "@swc/core-win32-ia32-msvc": "1.3.89", + "@swc/core-win32-x64-msvc": "1.3.89" }, "peerDependencies": { "@swc/helpers": "^0.5.0" @@ -2889,9 +3039,9 @@ } }, "node_modules/@swc/core-darwin-arm64": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.77.tgz", - "integrity": "sha512-l4KGQAGB4Ih1Al2tWoUBrtVJCF/xZRjH3jCMCRD52KZDRAnRVDq42JKek7+aHjjH8juzTISaqzsI8Ipv6zvKhA==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.89.tgz", + "integrity": "sha512-LVCZQ2yGrX2678uMvW66IF1bzcOMqiABi+ioNDnJtAIsE/zRVMEYp1ivbOrH32FmPplBby6CGgJIOT3P4VaP1g==", "cpu": [ "arm64" ], @@ -2905,9 +3055,9 @@ } }, "node_modules/@swc/core-darwin-x64": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.3.77.tgz", - "integrity": "sha512-eFCkZg/BzObOn5IWn7t/Ywz+jlZKff/1XBymT7Arh/UkO39Agh+rYdBqjbylp4JQMl0qGRBfxD3wPgDRoViNVQ==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.3.89.tgz", + "integrity": "sha512-IwKlX65YrPBF3urOxBJia0PjnZeaICnCkSwGLiYyV1RhM8XwZ/XyEDTBEsdph3WxUM5wCZQSk8UY/d0saIsX9w==", "cpu": [ "x64" ], @@ -2921,9 +3071,9 @@ } }, "node_modules/@swc/core-linux-arm-gnueabihf": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.77.tgz", - "integrity": "sha512-+1BueyGcCQAtxSORJml0CU8aKQNssQ5E3ABMFJwCbcec+lUCiGYK1fBfqj4FmWQMbXuQ+mn1SMeXSZAtaXoQ3w==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.89.tgz", + "integrity": "sha512-u5qAPh7NkKoDJYwfaB5zuRvzW2+A89CQQHp5xcYjpctRsk3sUrPmC7vNeE12xipBNKLujIG59ppbrf6Pkp5XIg==", "cpu": [ "arm" ], @@ -2937,9 +3087,9 @@ } }, "node_modules/@swc/core-linux-arm64-gnu": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.77.tgz", - "integrity": "sha512-3smbzVcuuCiWWPFeUIp1c0aAXd+fGsc8x8rUcYvoJAWBgLJ45JymOI5WSUjIybl3rk0prdkbFylZuR0t1Rue3A==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.89.tgz", + "integrity": "sha512-eykuO7XtPltk600HvnnRr1nU5qGk7PeqLmztHA7R2bu2SbtcbCGsewPNcAX5eP8by2VwpGcLPdxaKyqeUwCgoA==", "cpu": [ "arm64" ], @@ -2953,9 +3103,9 @@ } }, "node_modules/@swc/core-linux-arm64-musl": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.77.tgz", - "integrity": "sha512-e81+i4ef5vDeu9AkMY2AamPcmtPVPUqeqq3aNWM1tcHCaUej1DwY4xhRxrd1OvEoYyVBLtiMb5nenF3V9OzXIQ==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.89.tgz", + "integrity": "sha512-i/65Vt3ljfd6EyR+WWZ5aAjZLTQMIHoR+Ay97jE0kysRn8MEOINu0SWyiEwcdXzRGlt+zkrKYfOxp745sWPDAw==", "cpu": [ "arm64" ], @@ -2969,9 +3119,9 @@ } }, "node_modules/@swc/core-linux-x64-gnu": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.77.tgz", - "integrity": "sha512-gl3+9VESckZ/GYCmGClGgXqB2tAA2MivEV/51Wde+2alo2lPSSujEhxE6Q3TNYkXOLAHSupYyDZ0ou9RfXufOw==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.89.tgz", + "integrity": "sha512-ERETXe68CJRdNkL3EIN62gErh3p6+/6hmz4C0epnYJ4F7QspdW/EOluL1o9bl4dux4Xz0nmBPSZsqfHq/nl1KA==", "cpu": [ "x64" ], @@ -2985,9 +3135,9 @@ } }, "node_modules/@swc/core-linux-x64-musl": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.77.tgz", - "integrity": "sha512-AqQLZAMYTaNrA4i/Nv/GhXdildDZyRv6xsK8u2actevv5PPjD/69yYB3Z4uaptwh/4ys4W/Y2vnt+OPCNH4OQg==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.89.tgz", + "integrity": "sha512-EXiwgU5E/yC5zuJtOXXWv+wMwpe5DR380XhVxIOBG6nFi6MR3O2X37KxeEdQZX8RwN7/KU6kNHeifzEiSvixfA==", "cpu": [ "x64" ], @@ -3001,9 +3151,9 @@ } }, "node_modules/@swc/core-win32-arm64-msvc": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.77.tgz", - "integrity": "sha512-Wdw++6w7WyavxZ3WruElCrRJ6EO0iHS0Mts4qHnbKgD08GJqIMTZPtZ5qhRe9zCf6sj2rQqhAMf/HKhYrHoF+w==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.89.tgz", + "integrity": "sha512-j7GvkgeOrZlB55MpEwX+6E6KjxwOmwRXpIqMjF11JDIZ0wEwHlBxZhlnQQ58iuI6jL6AJgDH/ktDhMyELoBiHw==", "cpu": [ "arm64" ], @@ -3017,9 +3167,9 @@ } }, "node_modules/@swc/core-win32-ia32-msvc": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.77.tgz", - "integrity": "sha512-ObNVpdtLdXDpmVKuMZh87yBYL4ti64WX95o2j5Oq3r0e0RqwIGqGvPDxvJVEiyCnaXHfl8eSNKWuiOxPHPkMNQ==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.89.tgz", + "integrity": "sha512-n57nE7d3FXBa3Y2+VoJdPulcUAS0ZGAGVGxFpeM/tZt1MBEN5OvpOSOIp35dK5HAAxAzTPlmqj9KUYnVxLMVKw==", "cpu": [ "ia32" ], @@ -3033,9 +3183,9 @@ } }, "node_modules/@swc/core-win32-x64-msvc": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.77.tgz", - "integrity": "sha512-Ew6jg/qr0v/2ixeJXvIUBuAPMKTz8HRoDBO/nHkvlnDFmkhsyH7h5YwJS1rLBwAEhWuJaVYjYi7cibZTI/QRYQ==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.89.tgz", + "integrity": "sha512-6yMAmqgseAwEXFIwurP7CL8yIH8n7/Rg62ooOVSLSWL5O/Pwlpy1WrpoA0eKhgMLLkIrPvNuKaE/rG7c2iNQHA==", "cpu": [ "x64" ], @@ -3048,6 +3198,18 @@ "node": ">=10" } }, + "node_modules/@swc/counter": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.1.tgz", + "integrity": "sha512-xVRaR4u9hcYjFvcSg71Lz5Bo4//CyjAAfMxa7UsaDSYxAshflUkVJWiyVWrfxC59z2kP1IzI4/1BEpnhI9o3Mw==", + "dev": true + }, + "node_modules/@swc/types": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.5.tgz", + "integrity": "sha512-myfUej5naTBWnqOCc/MdVOLVjXUXtIA+NpDrDBKJtLLg2shUjBu3cZmB/85RyitKc55+lUUyl7oRfLOvkr2hsw==", + "dev": true + }, "node_modules/@szmarczak/http-timer": { "version": "4.0.6", "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-4.0.6.tgz", @@ -3061,20 +3223,20 @@ } }, "node_modules/@tabler/icons": { - "version": "2.30.0", - "resolved": "https://registry.npmjs.org/@tabler/icons/-/icons-2.30.0.tgz", - "integrity": "sha512-tvtmkI4ALjKThVVORh++sB9JnkFY7eGInKxNy+Df7WVQiF7T85tlvGADzlgX4Ic+CK5MIUzZ0jhOlQ/RRlgXpg==", + "version": "2.35.0", + "resolved": "https://registry.npmjs.org/@tabler/icons/-/icons-2.35.0.tgz", + "integrity": "sha512-qW/itKdmFvfGw6mAQ+cZy+2MYTXb0XdGAVhO3obYLJEfsSPMwQRO0S9ckFk1xMQX/Tj7REC3TEmWUBWNi3/o3g==", "funding": { "type": "github", "url": "https://github.com/sponsors/codecalm" } }, "node_modules/@tabler/icons-react": { - "version": "2.30.0", - "resolved": "https://registry.npmjs.org/@tabler/icons-react/-/icons-react-2.30.0.tgz", - "integrity": "sha512-aYggXusHW133L4KujJkVf4GIIrjg7tIRHgNf/n37mnoHqMjwNP+PjmVdrBM1Z8Ywx9PKFRlrwM0eUMDcG+I4HA==", + "version": "2.35.0", + "resolved": "https://registry.npmjs.org/@tabler/icons-react/-/icons-react-2.35.0.tgz", + "integrity": "sha512-jK2zgtMF2+LSjtbjYsfer+ryz72TwyJqv3MsmCfEpQwP37u01xvmTlVhJ+ox3bV+trsgjojsldVDuB05JuXLaw==", "dependencies": { - "@tabler/icons": "2.30.0", + "@tabler/icons": "2.35.0", "prop-types": "^15.7.2" }, "funding": { @@ -3086,9 +3248,9 @@ } }, "node_modules/@tailwindcss/forms": { - "version": "0.5.4", - "resolved": "https://registry.npmjs.org/@tailwindcss/forms/-/forms-0.5.4.tgz", - "integrity": "sha512-YAm12D3R7/9Mh4jFbYSMnsd6jG++8KxogWgqs7hbdo/86aWjjlIEvL7+QYdVELmAI0InXTpZqFIg5e7aDVWI2Q==", + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/@tailwindcss/forms/-/forms-0.5.6.tgz", + "integrity": "sha512-Fw+2BJ0tmAwK/w01tEFL5TiaJBX1NLT1/YbWgvm7ws3Qcn11kiXxzNTEQDMs5V3mQemhB56l3u0i9dwdzSQldA==", "dependencies": { "mini-svg-data-uri": "^1.2.3" }, @@ -3105,9 +3267,9 @@ } }, "node_modules/@tailwindcss/typography": { - "version": "0.5.9", - "resolved": "https://registry.npmjs.org/@tailwindcss/typography/-/typography-0.5.9.tgz", - "integrity": "sha512-t8Sg3DyynFysV9f4JDOVISGsjazNb48AeIYQwcL+Bsq5uf4RYL75C1giZ43KISjeDGBaTN3Kxh7Xj/vRSMJUUg==", + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/@tailwindcss/typography/-/typography-0.5.10.tgz", + "integrity": "sha512-Pe8BuPJQJd3FfRnm6H0ulKIGoMEQS+Vq01R6M5aCrFB/ccR/shT+0kXLjouGC1gFLm9hopTFN+DMP0pfwRWzPw==", "dev": true, "dependencies": { "lodash.castarray": "^4.4.0", @@ -3120,9 +3282,9 @@ } }, "node_modules/@testing-library/dom": { - "version": "9.3.1", - "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-9.3.1.tgz", - "integrity": "sha512-0DGPd9AR3+iDTjGoMpxIkAsUihHZ3Ai6CneU6bRRrffXMgzCdlNk43jTrD2/5LT6CBb3MWTP8v510JzYtahD2w==", + "version": "9.3.3", + "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-9.3.3.tgz", + "integrity": "sha512-fB0R+fa3AUqbLHWyxXa2kGVtf1Fe1ZZFr0Zp6AIbIAzXb2mKbEXl+PCQNUOaq5lbTab5tfctfXRNsWXxa2f7Aw==", "dev": true, "peer": true, "dependencies": { @@ -3275,9 +3437,9 @@ } }, "node_modules/@types/aria-query": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-5.0.1.tgz", - "integrity": "sha512-XTIieEY+gvJ39ChLcB4If5zHtPxt3Syj5rgZR+e1ctpmK8NjPf0zFqsz4JpLJT0xla9GFDKjy8Cpu331nrmE1Q==", + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-5.0.2.tgz", + "integrity": "sha512-PHKZuMN+K5qgKIWhBodXzQslTo5P+K/6LqeKXS6O/4liIDdZqaX5RXrCK++LAw+y/nptN48YmUMFiQHRSWYwtQ==", "dev": true }, "node_modules/@types/axios": { @@ -3307,9 +3469,9 @@ "integrity": "sha512-LKVP3cgXBT9RYj+t+9FDKwS5tdI+rPBXaNSkma7hvqy35lc7mAokC2zsqWJH0LaqIt3B962nuYI77hsJoT1gow==" }, "node_modules/@types/d3": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/@types/d3/-/d3-7.4.0.tgz", - "integrity": "sha512-jIfNVK0ZlxcuRDKtRS/SypEyOQ6UHaFQBKv032X45VvxSJ6Yi5G9behy9h6tNTHTDGh5Vq+KbmBjUWLgY4meCA==", + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/@types/d3/-/d3-7.4.1.tgz", + "integrity": "sha512-lBpYmbHTCtFKO1DB1R7E9dXp9/g1F3JXSGOF7iKPZ+wRmYg/Q6tCRHODGOc5Qk25fJRe2PI60EDRf2HLPUncMA==", "dependencies": { "@types/d3-array": "*", "@types/d3-axis": "*", @@ -3344,67 +3506,67 @@ } }, "node_modules/@types/d3-array": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.0.5.tgz", - "integrity": "sha512-Qk7fpJ6qFp+26VeQ47WY0mkwXaiq8+76RJcncDEfMc2ocRzXLO67bLFRNI4OX1aGBoPzsM5Y2T+/m1pldOgD+A==" + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.0.8.tgz", + "integrity": "sha512-2xAVyAUgaXHX9fubjcCbGAUOqYfRJN1em1EKR2HfzWBpObZhwfnZKvofTN4TplMqJdFQao61I+NVSai/vnBvDQ==" }, "node_modules/@types/d3-axis": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-axis/-/d3-axis-3.0.2.tgz", - "integrity": "sha512-uGC7DBh0TZrU/LY43Fd8Qr+2ja1FKmH07q2FoZFHo1eYl8aj87GhfVoY1saJVJiq24rp1+wpI6BvQJMKgQm8oA==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-axis/-/d3-axis-3.0.4.tgz", + "integrity": "sha512-ySnjI/7qm+J602VjcejXcqs1hEuu5UBbGaJGp+Cn/yKVc1iS3JueLVpToGdQsS2sqta7tqA/kG4ore/+LH90UA==", "dependencies": { "@types/d3-selection": "*" } }, "node_modules/@types/d3-brush": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-brush/-/d3-brush-3.0.2.tgz", - "integrity": "sha512-2TEm8KzUG3N7z0TrSKPmbxByBx54M+S9lHoP2J55QuLU0VSQ9mE96EJSAOVNEqd1bbynMjeTS9VHmz8/bSw8rA==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-brush/-/d3-brush-3.0.4.tgz", + "integrity": "sha512-Kg5uIsdJNMCs5lTqeZFsTKqj9lBvpiFRDkYN3j2CDlPhonNDg9/gXVpv1E/MKh3tEqArryIj9o6RBGE/MQe+6Q==", "dependencies": { "@types/d3-selection": "*" } }, "node_modules/@types/d3-chord": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-chord/-/d3-chord-3.0.2.tgz", - "integrity": "sha512-abT/iLHD3sGZwqMTX1TYCMEulr+wBd0SzyOQnjYNLp7sngdOHYtNkMRI5v3w5thoN+BWtlHVDx2Osvq6fxhZWw==" + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-chord/-/d3-chord-3.0.4.tgz", + "integrity": "sha512-p4PvN1N+7GL3Y/NI9Ug1TKwowUV6h664kmxL79ctp1HRYCk1mhP0+SXhjRsoWXCdnJfbLLLmpV99rt8dMrHrzg==" }, "node_modules/@types/d3-color": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.0.tgz", - "integrity": "sha512-HKuicPHJuvPgCD+np6Se9MQvS6OCbJmOjGvylzMJRlDwUXjKTTXs6Pwgk79O09Vj/ho3u1ofXnhFOaEWWPrlwA==" + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.1.tgz", + "integrity": "sha512-CSAVrHAtM9wfuLJ2tpvvwCU/F22sm7rMHNN+yh9D6O6hyAms3+O0cgMpC1pm6UEUMOntuZC8bMt74PteiDUdCg==" }, "node_modules/@types/d3-contour": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-contour/-/d3-contour-3.0.2.tgz", - "integrity": "sha512-k6/bGDoAGJZnZWaKzeB+9glgXCYGvh6YlluxzBREiVo8f/X2vpTEdgPy9DN7Z2i42PZOZ4JDhVdlTSTSkLDPlQ==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-contour/-/d3-contour-3.0.4.tgz", + "integrity": "sha512-B0aeX8Xg3MNUglULxqDvlgY1SVXuN2xtEleYSAY0iMhl/SMVT7snzgAveejjwM3KaWuNXIoXEJ7dmXE8oPq/jA==", "dependencies": { "@types/d3-array": "*", "@types/geojson": "*" } }, "node_modules/@types/d3-delaunay": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/@types/d3-delaunay/-/d3-delaunay-6.0.1.tgz", - "integrity": "sha512-tLxQ2sfT0p6sxdG75c6f/ekqxjyYR0+LwPrsO1mbC9YDBzPJhs2HbJJRrn8Ez1DBoHRo2yx7YEATI+8V1nGMnQ==" + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-delaunay/-/d3-delaunay-6.0.2.tgz", + "integrity": "sha512-WplUJ/OHU7eITneDqNnzK+2pgR+WDzUHG6XAUVo+oWHPQq74VcgUdw8a4ODweaZzF56OVYK+x9GxCyuq6hSu1A==" }, "node_modules/@types/d3-dispatch": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-dispatch/-/d3-dispatch-3.0.2.tgz", - "integrity": "sha512-rxN6sHUXEZYCKV05MEh4z4WpPSqIw+aP7n9ZN6WYAAvZoEAghEK1WeVZMZcHRBwyaKflU43PCUAJNjFxCzPDjg==" + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-dispatch/-/d3-dispatch-3.0.4.tgz", + "integrity": "sha512-NApHpGHRNxUy7e2Lfzl/cwOucmn4Xdx6FdmXzAoomo8T81LyGmlBjjko/vP0TVzawlvEFLDq8OCRLulW6DDzKw==" }, "node_modules/@types/d3-drag": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-drag/-/d3-drag-3.0.2.tgz", - "integrity": "sha512-qmODKEDvyKWVHcWWCOVcuVcOwikLVsyc4q4EBJMREsoQnR2Qoc2cZQUyFUPgO9q4S3qdSqJKBsuefv+h0Qy+tw==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-drag/-/d3-drag-3.0.4.tgz", + "integrity": "sha512-/t53K1erTuUbP7WIX9SE0hlmytpTYRbIthlhbGkBHzCV5vPO++7yrk8OlisWPyIJO5TGowTmqCtGH2tokY5T/g==", "dependencies": { "@types/d3-selection": "*" } }, "node_modules/@types/d3-dsv": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@types/d3-dsv/-/d3-dsv-3.0.1.tgz", - "integrity": "sha512-76pBHCMTvPLt44wFOieouXcGXWOF0AJCceUvaFkxSZEu4VDUdv93JfpMa6VGNFs01FHfuP4a5Ou68eRG1KBfTw==" + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-dsv/-/d3-dsv-3.0.4.tgz", + "integrity": "sha512-YxfUVJ55HxR8oq88136w09mBMPNhgH7PZjteq72onWXWOohGif/cLQnQv8V4A5lEGjXF04LhwSTpmzpY9wyVyA==" }, "node_modules/@types/d3-ease": { "version": "3.0.0", @@ -3412,40 +3574,40 @@ "integrity": "sha512-aMo4eaAOijJjA6uU+GIeW018dvy9+oH5Y2VPPzjjfxevvGQ/oRDs+tfYC9b50Q4BygRR8yE2QCLsrT0WtAVseA==" }, "node_modules/@types/d3-fetch": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-fetch/-/d3-fetch-3.0.2.tgz", - "integrity": "sha512-gllwYWozWfbep16N9fByNBDTkJW/SyhH6SGRlXloR7WdtAaBui4plTP+gbUgiEot7vGw/ZZop1yDZlgXXSuzjA==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-fetch/-/d3-fetch-3.0.4.tgz", + "integrity": "sha512-RleYajubALkGjrvatxWhlygfvB1KNF0Uzz9guRUeeA+M/2B7l8rxObYdktaX9zU1st04lMCHjZWe4vbl+msH2Q==", "dependencies": { "@types/d3-dsv": "*" } }, "node_modules/@types/d3-force": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@types/d3-force/-/d3-force-3.0.4.tgz", - "integrity": "sha512-q7xbVLrWcXvSBBEoadowIUJ7sRpS1yvgMWnzHJggFy5cUZBq2HZL5k/pBSm0GdYWS1vs5/EDwMjSKF55PDY4Aw==" + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-force/-/d3-force-3.0.6.tgz", + "integrity": "sha512-G9wbOvCxkNlLrppoHLZ6oFpbm3z7ibfkXwLD8g5/4Aa7iTEV0Z7TQ0OL8UxAtvdOhCa2VZcSuqn1NQqyCEqmiw==" }, "node_modules/@types/d3-format": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@types/d3-format/-/d3-format-3.0.1.tgz", - "integrity": "sha512-5KY70ifCCzorkLuIkDe0Z9YTf9RR2CjBX1iaJG+rgM/cPP+sO+q9YdQ9WdhQcgPj1EQiJ2/0+yUkkziTG6Lubg==" + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-format/-/d3-format-3.0.2.tgz", + "integrity": "sha512-9oQWvKk2qVBo49FQq8yD/et8Lx0W5Ac2FdGSOUecqOFKqh0wkpyHqf9Qc7A06ftTR+Lz13Pi3jHIQis0aCueOA==" }, "node_modules/@types/d3-geo": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@types/d3-geo/-/d3-geo-3.0.3.tgz", - "integrity": "sha512-bK9uZJS3vuDCNeeXQ4z3u0E7OeJZXjUgzFdSOtNtMCJCLvDtWDwfpRVWlyt3y8EvRzI0ccOu9xlMVirawolSCw==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@types/d3-geo/-/d3-geo-3.0.5.tgz", + "integrity": "sha512-ysEEU93Wv9p2UZBxTK3kUP7veHgyhTA0qYtI7bxK5EMXb3JxGv0D4IH54PxprAF26n+uHci24McVmzwIdLgvgQ==", "dependencies": { "@types/geojson": "*" } }, "node_modules/@types/d3-hierarchy": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@types/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz", - "integrity": "sha512-9hjRTVoZjRFR6xo8igAJyNXQyPX6Aq++Nhb5ebrUF414dv4jr2MitM2fWiOY475wa3Za7TOS2Gh9fmqEhLTt0A==" + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/@types/d3-hierarchy/-/d3-hierarchy-3.1.4.tgz", + "integrity": "sha512-wrvjpRFdmEu6yAqgjGy8MSud9ggxJj+I9XLuztLeSf/E0j0j6RQYtxH2J8U0Cfbgiw9ZDHyhpmaVuWhxscYaAQ==" }, "node_modules/@types/d3-interpolate": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.1.tgz", - "integrity": "sha512-jx5leotSeac3jr0RePOH1KdR9rISG91QIE4Q2PYTu4OymLTZfA3SrnURSLzKH48HmXVUru50b8nje4E79oQSQw==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.2.tgz", + "integrity": "sha512-zAbCj9lTqW9J9PlF4FwnvEjXZUy75NQqPm7DMHZXuxCFTpuTrdK2NMYGQekf4hlasL78fCYOLu4EE3/tXElwow==", "dependencies": { "@types/d3-color": "*" } @@ -3461,9 +3623,9 @@ "integrity": "sha512-D49z4DyzTKXM0sGKVqiTDTYr+DHg/uxsiWDAkNrwXYuiZVd9o9wXZIo+YsHkifOiyBkmSWlEngHCQme54/hnHw==" }, "node_modules/@types/d3-quadtree": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-quadtree/-/d3-quadtree-3.0.2.tgz", - "integrity": "sha512-QNcK8Jguvc8lU+4OfeNx+qnVy7c0VrDJ+CCVFS9srBo2GL9Y18CnIxBdTF3v38flrGy5s1YggcoAiu6s4fLQIw==" + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/d3-quadtree/-/d3-quadtree-3.0.3.tgz", + "integrity": "sha512-GDWaR+rGEk4ToLQSGugYnoh9AYYblsg/8kmdpa1KAJMwcdZ0v8rwgnldURxI5UrzxPlCPzF7by/Tjmv+Jn21Dg==" }, "node_modules/@types/d3-random": { "version": "3.0.1", @@ -3471,9 +3633,9 @@ "integrity": "sha512-IIE6YTekGczpLYo/HehAy3JGF1ty7+usI97LqraNa8IiDur+L44d0VOjAvFQWJVdZOJHukUJw+ZdZBlgeUsHOQ==" }, "node_modules/@types/d3-scale": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.3.tgz", - "integrity": "sha512-PATBiMCpvHJSMtZAMEhc2WyL+hnzarKzI6wAHYjhsonjWJYGq5BXTzQjv4l8m2jO183/4wZ90rKvSeT7o72xNQ==", + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.5.tgz", + "integrity": "sha512-w/C++3W394MHzcLKO2kdsIn5KKNTOqeQVzyPSGPLzQbkPw/jpeaGtSRlakcKevGgGsjJxGsbqS0fPrVFDbHrDA==", "dependencies": { "@types/d3-time": "*" } @@ -3484,27 +3646,27 @@ "integrity": "sha512-dsoJGEIShosKVRBZB0Vo3C8nqSDqVGujJU6tPznsBJxNJNwMF8utmS83nvCBKQYPpjCzaaHcrf66iTRpZosLPw==" }, "node_modules/@types/d3-selection": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/@types/d3-selection/-/d3-selection-3.0.5.tgz", - "integrity": "sha512-xCB0z3Hi8eFIqyja3vW8iV01+OHGYR2di/+e+AiOcXIOrY82lcvWW8Ke1DYE/EUVMsBl4Db9RppSBS3X1U6J0w==" + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@types/d3-selection/-/d3-selection-3.0.7.tgz", + "integrity": "sha512-qoj2O7KjfqCobmtFOth8FMvjwMVPUAAmn6xiUbLl1ld7vQCPgffvyV5BBcEFfqWdilAUm+3zciU/3P3vZrUMlg==" }, "node_modules/@types/d3-shape": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.1.tgz", - "integrity": "sha512-6Uh86YFF7LGg4PQkuO2oG6EMBRLuW9cbavUW46zkIO5kuS2PfTqo2o9SkgtQzguBHbLgNnU90UNsITpsX1My+A==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.3.tgz", + "integrity": "sha512-cHMdIq+rhF5IVwAV7t61pcEXfEHsEsrbBUPkFGBwTXuxtTAkBBrnrNA8++6OWm3jwVsXoZYQM8NEekg6CPJ3zw==", "dependencies": { "@types/d3-path": "*" } }, "node_modules/@types/d3-time": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.0.tgz", - "integrity": "sha512-sZLCdHvBUcNby1cB6Fd3ZBrABbjz3v1Vm90nysCQ6Vt7vd6e/h9Lt7SiJUoEX0l4Dzc7P5llKyhqSi1ycSf1Hg==" + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.1.tgz", + "integrity": "sha512-5j/AnefKAhCw4HpITmLDTPlf4vhi8o/dES+zbegfPb7LaGfNyqkLxBR6E+4yvTAgnJLmhe80EXFMzUs38fw4oA==" }, "node_modules/@types/d3-time-format": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@types/d3-time-format/-/d3-time-format-4.0.0.tgz", - "integrity": "sha512-yjfBUe6DJBsDin2BMIulhSHmr5qNR5Pxs17+oW4DoVPyVIXZ+m6bs7j1UVKP08Emv6jRmYrYqxYzO63mQxy1rw==" + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@types/d3-time-format/-/d3-time-format-4.0.1.tgz", + "integrity": "sha512-Br6EFeu9B1Zrem7KaYbr800xCmEDyq8uE60kEU8rWhC/XpFYX6ocGMZuRJDQfFCq6SyakQxNHFqIfJbFLf4x6Q==" }, "node_modules/@types/d3-timer": { "version": "3.0.0", @@ -3512,61 +3674,61 @@ "integrity": "sha512-HNB/9GHqu7Fo8AQiugyJbv6ZxYz58wef0esl4Mv828w1ZKpAshw/uFWVDUcIB9KKFeFKoxS3cHY07FFgtTRZ1g==" }, "node_modules/@types/d3-transition": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@types/d3-transition/-/d3-transition-3.0.3.tgz", - "integrity": "sha512-/S90Od8Id1wgQNvIA8iFv9jRhCiZcGhPd2qX0bKF/PS+y0W5CrXKgIiELd2CvG1mlQrWK/qlYh3VxicqG1ZvgA==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@types/d3-transition/-/d3-transition-3.0.5.tgz", + "integrity": "sha512-dcfjP6prFxj3ziFOJrnt4W2P0oXNj/sGxsJXH8286sHtVZ4qWGbjuZj+RRCYx4YZ4C0izpeE8OqXVCtoWEtzYg==", "dependencies": { "@types/d3-selection": "*" } }, "node_modules/@types/d3-zoom": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@types/d3-zoom/-/d3-zoom-3.0.3.tgz", - "integrity": "sha512-OWk1yYIIWcZ07+igN6BeoG6rqhnJ/pYe+R1qWFM2DtW49zsoSjgb9G5xB0ZXA8hh2jAzey1XuRmMSoXdKw8MDA==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@types/d3-zoom/-/d3-zoom-3.0.5.tgz", + "integrity": "sha512-mIefdTLtxuWUWTbBupCUXPAXVPmi8/Uwrq41gQpRh0rD25GMU1ku+oTELqNY2NuuiI0F3wXC5e1liBQi7YS7XQ==", "dependencies": { "@types/d3-interpolate": "*", "@types/d3-selection": "*" } }, "node_modules/@types/debug": { - "version": "4.1.8", - "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.8.tgz", - "integrity": "sha512-/vPO1EPOs306Cvhwv7KfVfYvOJqA/S/AXjaHQiJboCZzcNDb+TIJFN9/2C9DZ//ijSKWioNyUxD792QmDJ+HKQ==", + "version": "4.1.9", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.9.tgz", + "integrity": "sha512-8Hz50m2eoS56ldRlepxSBa6PWEVCtzUo/92HgLc2qTMnotJNIm7xP+UZhyWoYsyOdd5dxZ+NZLb24rsKyFs2ow==", "dependencies": { "@types/ms": "*" } }, "node_modules/@types/estree": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.1.tgz", - "integrity": "sha512-LG4opVs2ANWZ1TJoKc937iMmNstM/d0ae1vNbnBvBhqCSezgVUOzcLCqbI5elV8Vy6WKwKjaqR+zO9VKirBBCA==" + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.2.tgz", + "integrity": "sha512-VeiPZ9MMwXjO32/Xu7+OwflfmeoRwkE/qzndw42gGtgJwZopBnzy2gD//NN1+go1mADzkDcqf/KnFRSjTJ8xJA==" }, "node_modules/@types/geojson": { - "version": "7946.0.10", - "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.10.tgz", - "integrity": "sha512-Nmh0K3iWQJzniTuPRcJn5hxXkfB1T1pgB89SBig5PlJQU5yocazeu4jATJlaA0GYFKWMqDdvYemoSnF2pXgLVA==" + "version": "7946.0.11", + "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.11.tgz", + "integrity": "sha512-L7A0AINMXQpVwxHJ4jxD6/XjZ4NDufaRlUJHjNIFKYUFBH1SvOW+neaqb0VTRSLW5suSrSu19ObFEFnfNcr+qg==" }, "node_modules/@types/hast": { - "version": "2.3.5", - "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.5.tgz", - "integrity": "sha512-SvQi0L/lNpThgPoleH53cdjB3y9zpLlVjRbqB3rH8hx1jiRSBGAhyjV3H+URFjNVRqt2EdYNrbZE5IsGlNfpRg==", + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.6.tgz", + "integrity": "sha512-47rJE80oqPmFdVDCD7IheXBrVdwuBgsYwoczFvKmwfo2Mzsnt+V9OONsYauFmICb6lQPpCuXYJWejBNs4pDJRg==", "dependencies": { "@types/unist": "^2" } }, "node_modules/@types/hoist-non-react-statics": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.1.tgz", - "integrity": "sha512-iMIqiko6ooLrTh1joXodJK5X9xeEALT1kM5G3ZLhD3hszxBdIEd5C75U834D9mLcINgD4OyZf5uQXjkuYydWvA==", + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", + "integrity": "sha512-YIQtIg4PKr7ZyqNPZObpxfHsHEmuB8dXCxd6qVcGuQVDK2bpsF7bYNnBJ4Nn7giuACZg+WewExgrtAJ3XnA4Xw==", "dependencies": { "@types/react": "*", "hoist-non-react-statics": "^3.3.0" } }, "node_modules/@types/http-cache-semantics": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz", - "integrity": "sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.2.tgz", + "integrity": "sha512-FD+nQWA2zJjh4L9+pFXqWOi0Hs1ryBCfI+985NjluQ1p8EYtoLvjLOKidXBtZ4/IcxDX4o8/E8qDS3540tNliw==", "dev": true }, "node_modules/@types/jest": { @@ -3580,9 +3742,9 @@ } }, "node_modules/@types/katex": { - "version": "0.16.2", - "resolved": "https://registry.npmjs.org/@types/katex/-/katex-0.16.2.tgz", - "integrity": "sha512-dHsSjSlU/EWEEbeNADr3FtZZOAXPkFPUO457QCnoNqcZQXNqNEu/svQd0Nritvd3wNff4vvC/f4e6xgX3Llt8A==" + "version": "0.16.3", + "resolved": "https://registry.npmjs.org/@types/katex/-/katex-0.16.3.tgz", + "integrity": "sha512-CeVMX9EhVUW8MWnei05eIRks4D5Wscw/W9Byz1s3PA+yJvcdvq9SaDjiUKvRvEgjpdTyJMjQA43ae4KTwsvOPg==" }, "node_modules/@types/keyv": { "version": "3.1.4", @@ -3594,9 +3756,9 @@ } }, "node_modules/@types/lodash": { - "version": "4.14.197", - "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.197.tgz", - "integrity": "sha512-BMVOiWs0uNxHVlHBgzTIqJYmj+PgCo4euloGF+5m4okL3rEYzM2EEv78mw8zWSMM57dM7kVIgJ2QDvwHSoCI5g==", + "version": "4.14.199", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.199.tgz", + "integrity": "sha512-Vrjz5N5Ia4SEzWWgIVwnHNEnb1UE1XMkvY5DGXrAeOGE9imk0hgTHh5GyDjLDJi9OTCn9oo9dXH1uToK1VRfrg==", "dev": true }, "node_modules/@types/mathjax": { @@ -3618,9 +3780,9 @@ "integrity": "sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==" }, "node_modules/@types/node": { - "version": "16.18.40", - "resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.40.tgz", - "integrity": "sha512-+yno3ItTEwGxXiS/75Q/aHaa5srkpnJaH+kdkTVJ3DtJEwv92itpKbxU+FjPoh2m/5G9zmUQfrL4A4C13c+iGA==", + "version": "16.18.54", + "resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.54.tgz", + "integrity": "sha512-oTmGy68gxZZ21FhTJVVvZBYpQHEBZxHKTsGshobMqm9qWpbqdZsA5jvsuPZcHu0KwpmLrOHWPdEfg7XDpNT9UA==", "devOptional": true }, "node_modules/@types/parse-json": { @@ -3629,14 +3791,14 @@ "integrity": "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==" }, "node_modules/@types/prop-types": { - "version": "15.7.5", - "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz", - "integrity": "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==" + "version": "15.7.7", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.7.tgz", + "integrity": "sha512-FbtmBWCcSa2J4zL781Zf1p5YUBXQomPEcep9QZCfRfQgTxz3pJWiDFLebohZ9fFntX5ibzOkSsrJ0TEew8cAog==" }, "node_modules/@types/react": { - "version": "18.2.20", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.20.tgz", - "integrity": "sha512-WKNtmsLWJM/3D5mG4U84cysVY31ivmyw85dE84fOCk5Hx78wezB/XEjVPWl2JTZ5FkEeaTJf+VgUAUn3PE7Isw==", + "version": "18.2.22", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.22.tgz", + "integrity": "sha512-60fLTOLqzarLED2O3UQImc/lsNRgG0jE/a1mPW9KjMemY0LMITWEsbS4VvZ4p6rorEHd5YKxxmMKSDK505GHpA==", "dependencies": { "@types/prop-types": "*", "@types/scheduler": "*", @@ -3652,14 +3814,6 @@ "@types/react": "*" } }, - "node_modules/@types/react-is": { - "version": "18.2.1", - "resolved": "https://registry.npmjs.org/@types/react-is/-/react-is-18.2.1.tgz", - "integrity": "sha512-wyUkmaaSZEzFZivD8F2ftSyAfk6L+DfFliVj/mYdOXbVjRcS87fQJLTnhk6dRZPuJjI+9g6RZJO4PNCngUrmyw==", - "dependencies": { - "@types/react": "*" - } - }, "node_modules/@types/react-transition-group": { "version": "4.4.6", "resolved": "https://registry.npmjs.org/@types/react-transition-group/-/react-transition-group-4.4.6.tgz", @@ -3678,9 +3832,9 @@ } }, "node_modules/@types/scheduler": { - "version": "0.16.3", - "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.3.tgz", - "integrity": "sha512-5cJ8CB4yAx7BH1oMvdU0Jh9lrEXyPkar6F9G/ERswkCuvP4KQZfZkSjcMbAICCpQTN4OuZn8tz0HiKv9TGZgrQ==" + "version": "0.16.4", + "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.4.tgz", + "integrity": "sha512-2L9ifAGl7wmXwP4v3pN4p2FLhD0O1qsJpvKmNin5VA8+UvNVb447UDaAEV6UdrkA+m/Xs58U1RFps44x6TFsVQ==" }, "node_modules/@types/testing-library__jest-dom": { "version": "5.14.9", @@ -3692,23 +3846,23 @@ } }, "node_modules/@types/unist": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.7.tgz", - "integrity": "sha512-cputDpIbFgLUaGQn6Vqg3/YsJwxUwHLO13v3i5ouxT4lat0khip9AEWxtERujXV9wxIB1EyF97BSJFt6vpdI8g==" + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.8.tgz", + "integrity": "sha512-d0XxK3YTObnWVp6rZuev3c49+j4Lo8g4L1ZRm9z5L0xpoZycUPshHgczK5gsUMaZOstjVYYi09p5gYvUtfChYw==" }, "node_modules/@types/uuid": { - "version": "9.0.2", - "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.2.tgz", - "integrity": "sha512-kNnC1GFBLuhImSnV7w4njQkUiJi0ZXUycu1rUaouPqiKlXkh77JKgdRnTAp1x5eBwcIwbtI+3otwzuIDEuDoxQ==", + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.4.tgz", + "integrity": "sha512-zAuJWQflfx6dYJM62vna+Sn5aeSWhh3OB+wfUEACNcqUSc0AGc5JKl+ycL1vrH7frGTXhJchYjE1Hak8L819dA==", "dev": true }, "node_modules/@vitejs/plugin-react-swc": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-3.3.2.tgz", - "integrity": "sha512-VJFWY5sfoZerQRvJrh518h3AcQt6f/yTuWn4/TRB+dqmYU0NX1qz7qM5Wfd+gOQqUzQW4gxKqKN3KpE/P3+zrA==", + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-3.4.0.tgz", + "integrity": "sha512-m7UaA4Uvz82N/0EOVpZL4XsFIakRqrFKeSNxa1FBLSXGvWrWRBwmZb4qxk+ZIVAZcW3c3dn5YosomDgx62XWcQ==", "dev": true, "dependencies": { - "@swc/core": "^1.3.61" + "@swc/core": "^1.3.85" }, "peerDependencies": { "vite": "^4" @@ -3725,9 +3879,9 @@ "integrity": "sha512-jbQfFaw+57OBwPt7qSNHuW+RA8smmRwkWRS1Ozh6K/QxUspBgBV/LpdSzlY7vee8TomS6j3D33B9rIeH1qMwsA==" }, "node_modules/ace-builds": { - "version": "1.24.1", - "resolved": "https://registry.npmjs.org/ace-builds/-/ace-builds-1.24.1.tgz", - "integrity": "sha512-TLcxMxiTRX5Eq9bBVSd/bTJlanCBULiv/IULLohJDDaCAfcpZKJBVSd4OWfN/j2c2jCLc+jhpNWGELiJZw3wPw==" + "version": "1.28.0", + "resolved": "https://registry.npmjs.org/ace-builds/-/ace-builds-1.28.0.tgz", + "integrity": "sha512-wkJp+Wz8MRHtCVdt65L/jPFLAQ0iqJZ2EeD2XWOvKGbIi4mZNwHlpHRLRB8ZnQ07VoiB0TLFWwIjjm2FL9gUcQ==" }, "node_modules/acorn": { "version": "8.10.0", @@ -3918,9 +4072,9 @@ "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "node_modules/autoprefixer": { - "version": "10.4.15", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.15.tgz", - "integrity": "sha512-KCuPB8ZCIqFdA4HwKXsvz7j6gvSDNhDP7WnUjBleRkKjPdvCmHFuQ77ocavI8FT6NdvlBnE2UFr2H4Mycn8Vew==", + "version": "10.4.16", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.16.tgz", + "integrity": "sha512-7vd3UC6xKp0HLfua5IjZlcXvGAGy7cBAXTg2lyQ/8WpNhd6SiZ8Be+xm3FyBSYJx5GKcpRCzBh7RH4/0dnY+uQ==", "dev": true, "funding": [ { @@ -3938,8 +4092,8 @@ ], "dependencies": { "browserslist": "^4.21.10", - "caniuse-lite": "^1.0.30001520", - "fraction.js": "^4.2.0", + "caniuse-lite": "^1.0.30001538", + "fraction.js": "^4.3.6", "normalize-range": "^0.1.2", "picocolors": "^1.0.0", "postcss-value-parser": "^4.2.0" @@ -3967,9 +4121,9 @@ } }, "node_modules/axios": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.4.0.tgz", - "integrity": "sha512-S4XCWMEmzvo64T9GfvQDOXgYRDJ/wsSZc7Jvdgx5u1sd0JwsuPLqb3SYmusag+edF6ziyMensPVqLTSc1PiSEA==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.5.0.tgz", + "integrity": "sha512-D4DdjDo5CY50Qms0qGQTTw6Q44jl7zRwY7bthds06pUGfChBCTcQs+N743eFWGEd6pRTMd6A+I87aWyFV5wiZQ==", "dependencies": { "follow-redirects": "^1.15.0", "form-data": "^4.0.0", @@ -4226,9 +4380,9 @@ } }, "node_modules/browserslist": { - "version": "4.21.10", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.10.tgz", - "integrity": "sha512-bipEBdZfVH5/pwrvqc+Ub0kUPVfGUhlKxbvfD+z1BDnPEO/X98ruXGA1WP5ASpAFKan7Qr6j736IacbZQuAlKQ==", + "version": "4.21.11", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.11.tgz", + "integrity": "sha512-xn1UXOKUz7DjdGlg9RrUr0GGiWzI97UQJnugHtH0OLDfJB7jMgoIkYvRIEO1l9EeEERVqeqLYOcFBW9ldjypbQ==", "funding": [ { "type": "opencollective", @@ -4244,10 +4398,10 @@ } ], "dependencies": { - "caniuse-lite": "^1.0.30001517", - "electron-to-chromium": "^1.4.477", + "caniuse-lite": "^1.0.30001538", + "electron-to-chromium": "^1.4.526", "node-releases": "^2.0.13", - "update-browserslist-db": "^1.0.11" + "update-browserslist-db": "^1.0.13" }, "bin": { "browserslist": "cli.js" @@ -4362,9 +4516,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001521", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001521.tgz", - "integrity": "sha512-fnx1grfpEOvDGH+V17eccmNjucGUnCbP6KL+l5KqBIerp26WK/+RQ7CIDE37KGJjaPyqWXXlFUyKiWmvdNNKmQ==", + "version": "1.0.30001539", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001539.tgz", + "integrity": "sha512-hfS5tE8bnNiNvEOEkm8HElUHroYwlqMMENEzELymy77+tJ6m+gA2krtHl5hxJaj71OlpC2cHZbdSMX1/YEqEkA==", "funding": [ { "type": "opencollective", @@ -4494,9 +4648,9 @@ } }, "node_modules/cli-spinners": { - "version": "2.9.0", - "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.0.tgz", - "integrity": "sha512-4/aL9X3Wh0yiMQlE+eeRhWP6vclO3QRtw1JHKIT0FFUs5FjpFmESqtMvYZ0+lbzBw900b95mS0hohy+qn2VK/g==", + "version": "2.9.1", + "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.1.tgz", + "integrity": "sha512-jHgecW0pxkonBJdrKsqxgRX9AcG+u/5k0Q7WPDfi8AogLAdwxEkyYYNWwZ5GvVFoFx2uiY1eNcSK00fh+1+FyQ==", "engines": { "node": ">=6" }, @@ -4799,9 +4953,9 @@ } }, "node_modules/daisyui": { - "version": "3.5.1", - "resolved": "https://registry.npmjs.org/daisyui/-/daisyui-3.5.1.tgz", - "integrity": "sha512-7GG+9QXnr2qQMCqnyFU8TxpaOYJigXiEtmzoivmiiZZHvxqIwYdaMAkgivqTVxEgy3Hot3m1suzZjmt1zUrvmA==", + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/daisyui/-/daisyui-3.8.0.tgz", + "integrity": "sha512-VsWD//XlHkOBFSiRNTOZTxTJ/W8xI65erowErfbDWrPTkMYqf0ee/FTaqn4rquZoCcumENdFegAL8eELMnztxQ==", "dev": true, "dependencies": { "colord": "^2.9", @@ -4957,12 +5111,27 @@ "node": ">=10" } }, - "node_modules/define-properties": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.0.tgz", - "integrity": "sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA==", + "node_modules/define-data-property": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.0.tgz", + "integrity": "sha512-UzGwzcjyv3OtAvolTj1GoyNYzfFR+iqbGjcnBEENZVCpM4/Ng1yhGNvS3lR/xDS74Tb2wGG9WzNSNIOS9UVb2g==", "dev": true, "dependencies": { + "get-intrinsic": "^1.2.1", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/define-properties": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", + "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", + "dev": true, + "dependencies": { + "define-data-property": "^1.0.1", "has-property-descriptors": "^1.0.0", "object-keys": "^1.1.1" }, @@ -5057,10 +5226,19 @@ "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.0.5.tgz", "integrity": "sha512-F9e6wPGtY+8KNMRAVfxeCOHU0/NPWMSENNq4pQctuXRqqdEPW7q3CrLbR5Nse044WwacyjHGOMlvNsBe1y6z9A==" }, + "node_modules/dot-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz", + "integrity": "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==", + "dependencies": { + "no-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, "node_modules/electron-to-chromium": { - "version": "1.4.495", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.495.tgz", - "integrity": "sha512-mwknuemBZnoOCths4GtpU/SDuVMp3uQHKa2UNJT9/aVD6WVRjGpXOxRGX7lm6ILIenTdGXPSTCTDaWos5tEU8Q==" + "version": "1.4.529", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.529.tgz", + "integrity": "sha512-6uyPyXTo8lkv8SWAmjKFbG42U073TXlzD4R8rW3EzuznhFS2olCIAfjjQtV2dV2ar/vRF55KUd3zQYnCB0dd3A==" }, "node_modules/emoji-regex": { "version": "8.0.0", @@ -5443,9 +5621,9 @@ } }, "node_modules/follow-redirects": { - "version": "1.15.2", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz", - "integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==", + "version": "1.15.3", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz", + "integrity": "sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==", "funding": [ { "type": "individual", @@ -5503,16 +5681,16 @@ } }, "node_modules/fraction.js": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.2.0.tgz", - "integrity": "sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.6.tgz", + "integrity": "sha512-n2aZ9tNfYDwaHhvFTkhFErqOMIb8uyzSQ+vGJBjZyanAKZVbGUQ1sngfk9FdkBw7G26O7AgNjLcecLffD1c7eg==", "dev": true, "engines": { "node": "*" }, "funding": { "type": "patreon", - "url": "https://www.patreon.com/infusion" + "url": "https://github.com/sponsors/rawify" } }, "node_modules/fs-extra": { @@ -6506,9 +6684,9 @@ } }, "node_modules/jiti": { - "version": "1.19.1", - "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.19.1.tgz", - "integrity": "sha512-oVhqoRDaBXf7sjkll95LHVS6Myyyb1zaunVwk4Z0+WPSW4gjS0pl01zYKHScTuyEhQsFxV5L4DR5r+YqSyqyyg==", + "version": "1.20.0", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.20.0.tgz", + "integrity": "sha512-3TV69ZbrvV6U5DfQimop50jE9Dl6J8O1ja1dvBbMba/sZ3YBEQqJ2VZRoQPVnhlzjNtU1vaXRZVrVjU4qtm8yA==", "bin": { "jiti": "bin/jiti.js" } @@ -6759,6 +6937,14 @@ "loose-envify": "cli.js" } }, + "node_modules/lower-case": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", + "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", + "dependencies": { + "tslib": "^2.0.3" + } + }, "node_modules/lowercase-keys": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", @@ -7730,6 +7916,14 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/moment": { + "version": "2.29.4", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.4.tgz", + "integrity": "sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w==", + "engines": { + "node": "*" + } + }, "node_modules/mri": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", @@ -7770,6 +7964,15 @@ "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" } }, + "node_modules/no-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", + "integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==", + "dependencies": { + "lower-case": "^2.0.2", + "tslib": "^2.0.3" + } + }, "node_modules/node-domexception": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", @@ -8156,10 +8359,40 @@ "node": ">= 6" } }, + "node_modules/playwright": { + "version": "1.38.1", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.38.1.tgz", + "integrity": "sha512-oRMSJmZrOu1FP5iu3UrCx8JEFRIMxLDM0c/3o4bpzU5Tz97BypefWf7TuTNPWeCe279TPal5RtPPZ+9lW/Qkow==", + "dev": true, + "dependencies": { + "playwright-core": "1.38.1" + }, + "bin": { + "playwright": "cli.js" + }, + "engines": { + "node": ">=16" + }, + "optionalDependencies": { + "fsevents": "2.3.2" + } + }, + "node_modules/playwright-core": { + "version": "1.38.1", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.38.1.tgz", + "integrity": "sha512-tQqNFUKa3OfMf4b2jQ7aGLB8o9bS3bOY0yMEtldtC2+spf8QXG9zvXLTXUeRsoNuxEYMgLYR+NXfAa1rjKRcrg==", + "dev": true, + "bin": { + "playwright-core": "cli.js" + }, + "engines": { + "node": ">=16" + } + }, "node_modules/postcss": { - "version": "8.4.28", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.28.tgz", - "integrity": "sha512-Z7V5j0cq8oEKyejIKfpD8b4eBy9cwW2JWPk0+fB1HOAMsfHbnAXLLS+PfVWlzMSLQaWttKDt607I0XHmpE67Vw==", + "version": "8.4.30", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.30.tgz", + "integrity": "sha512-7ZEao1g4kd68l97aWG/etQKPKq07us0ieSZ2TnFDk11i0ZfDW2AwKHYU8qv4MZKqN2fdBfg+7q0ES06UA73C1g==", "funding": [ { "type": "opencollective", @@ -8246,9 +8479,9 @@ } }, "node_modules/postcss-load-config/node_modules/yaml": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.1.tgz", - "integrity": "sha512-2eHWfjaoXgTBC2jNM1LRef62VQa0umtvRiDSk6HSzW7RvS5YtkabJrwYLLEKWBc8a5U2PTSCs+dJjUTJdlHsWQ==", + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.2.tgz", + "integrity": "sha512-N/lyzTPaJasoDmfV7YTrYCI0G/3ivm/9wdG0aHuheKowWQwGTsK0Eoiw6utmzAnI6pkJa0DUVygvp3spqqEKXg==", "engines": { "node": ">= 14" } @@ -8478,9 +8711,9 @@ "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" }, "node_modules/property-information": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.2.0.tgz", - "integrity": "sha512-kma4U7AFCTwpqq5twzC1YVIDXSqg6qQK6JN0smOw8fgRy1OkMi0CYSzFmsy6dnqSenamAtj0CyXMUJ1Mf6oROg==", + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.3.0.tgz", + "integrity": "sha512-gVNZ74nqhRMiIUYWGQdosYetaKc83x8oT41a0LlV3AAFCAZwCpg4vmGkq8t34+cUhp3cnM4XDiU/7xlgK7HGrg==", "funding": { "type": "github", "url": "https://github.com/sponsors/wooorm" @@ -8620,9 +8853,9 @@ } }, "node_modules/react-icons": { - "version": "4.10.1", - "resolved": "https://registry.npmjs.org/react-icons/-/react-icons-4.10.1.tgz", - "integrity": "sha512-/ngzDP/77tlCfqthiiGNZeYFACw85fUjZtLbedmJ5DTlNDIwETxhwBzdOJ21zj4iJdvc0J3y7yOsX3PpxAJzrw==", + "version": "4.11.0", + "resolved": "https://registry.npmjs.org/react-icons/-/react-icons-4.11.0.tgz", + "integrity": "sha512-V+4khzYcE5EBk/BvcuYRq6V/osf11ODUM2J8hg2FDSswRrGvqiYUYPRy4OdrWaQOBj4NcpJfmHZLNaD+VH0TyA==", "peerDependencies": { "react": "*" } @@ -8720,11 +8953,11 @@ } }, "node_modules/react-router": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.15.0.tgz", - "integrity": "sha512-NIytlzvzLwJkCQj2HLefmeakxxWHWAP+02EGqWEZy+DgfHHKQMUoBBjUQLOtFInBMhWtb3hiUy6MfFgwLjXhqg==", + "version": "6.16.0", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.16.0.tgz", + "integrity": "sha512-VT4Mmc4jj5YyjpOi5jOf0I+TYzGpvzERy4ckNSvSh2RArv8LLoCxlsZ2D+tc7zgjxcY34oTz2hZaeX5RVprKqA==", "dependencies": { - "@remix-run/router": "1.8.0" + "@remix-run/router": "1.9.0" }, "engines": { "node": ">=14.0.0" @@ -8734,12 +8967,12 @@ } }, "node_modules/react-router-dom": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.15.0.tgz", - "integrity": "sha512-aR42t0fs7brintwBGAv2+mGlCtgtFQeOzK0BM1/OiqEzRejOZtpMZepvgkscpMUnKb8YO84G7s3LsHnnDNonbQ==", + "version": "6.16.0", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.16.0.tgz", + "integrity": "sha512-aTfBLv3mk/gaKLxgRDUPbPw+s4Y/O+ma3rEN1u8EgEpLpPe6gNjIsWt9rxushMHHMb7mSwxRGdGlGdvmFsyPIg==", "dependencies": { - "@remix-run/router": "1.8.0", - "react-router": "6.15.0" + "@remix-run/router": "1.9.0", + "react-router": "6.16.0" }, "engines": { "node": ">=14.0.0" @@ -8807,9 +9040,9 @@ } }, "node_modules/react-tooltip": { - "version": "5.21.1", - "resolved": "https://registry.npmjs.org/react-tooltip/-/react-tooltip-5.21.1.tgz", - "integrity": "sha512-wJqF/yzK1wuJuy5/zAkVErFA609fVv1ZukhGjw44PcMvg9wL0jomnpQyz3qH1H7TWjz/wqO/OMc3ipQNjZ8zYg==", + "version": "5.21.4", + "resolved": "https://registry.npmjs.org/react-tooltip/-/react-tooltip-5.21.4.tgz", + "integrity": "sha512-LZsllEbiu63zNwuCalq3gIFcBu2Xf0I0fMg7uuF7/5ROo5//uHe8Sum7v9L1Rtp6IozcoU9YAjkNUZdrxutsNg==", "dependencies": { "@floating-ui/dom": "^1.0.0", "classnames": "^2.3.0" @@ -8834,17 +9067,25 @@ "react-dom": ">=16.6.0" } }, + "node_modules/react18-json-view": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/react18-json-view/-/react18-json-view-0.2.5.tgz", + "integrity": "sha512-BiCWyRUCVbnaK4kfNay8crOXZnWsZ6XsnY3fwOf5C+ZaY9w9FSTawo2p+h2UG/KcDP8meZuGlkP95klfFG9GfQ==", + "peerDependencies": { + "react": ">=16.8.0" + } + }, "node_modules/reactflow": { - "version": "11.8.2", - "resolved": "https://registry.npmjs.org/reactflow/-/reactflow-11.8.2.tgz", - "integrity": "sha512-qqjg4x1yFtyzAu2ldyadRhoTzQjFRnuS+iPUrxaaPaBqGfuwkjm3KnFUrbbgincpOaRVMlIuTuCu5sIRQPcYBQ==", + "version": "11.9.1", + "resolved": "https://registry.npmjs.org/reactflow/-/reactflow-11.9.1.tgz", + "integrity": "sha512-lKWWWFne/o8/SShIeXXP4J/15384L0KDohH0It83gNKFgF6Yo7/EJE9p6k3CnYnOfyYETFRYb1KNOY04cSmtVA==", "dependencies": { - "@reactflow/background": "11.2.7", - "@reactflow/controls": "11.1.18", - "@reactflow/core": "11.8.2", - "@reactflow/minimap": "11.6.2", - "@reactflow/node-resizer": "2.1.4", - "@reactflow/node-toolbar": "1.2.6" + "@reactflow/background": "11.3.1", + "@reactflow/controls": "11.2.1", + "@reactflow/core": "11.9.1", + "@reactflow/minimap": "11.7.1", + "@reactflow/node-resizer": "2.2.1", + "@reactflow/node-toolbar": "1.3.1" }, "peerDependencies": { "react": ">=17", @@ -8940,14 +9181,14 @@ "integrity": "sha512-srw17NI0TUWHuGa5CFGGmhfNIeja30WMBfbslPNhf6JrqQlLN5gcrvig1oqPxiVaXb0oW0XRKtH6Nngs5lKCIA==" }, "node_modules/regexp.prototype.flags": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.0.tgz", - "integrity": "sha512-0SutC3pNudRKgquxGoRGIz946MZVHqbNfPjBdxeOhBrdgDKlRoXmYLQN9xRbrR09ZXWeGAdPuif7egofn6v5LA==", + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz", + "integrity": "sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg==", "dev": true, "dependencies": { "call-bind": "^1.0.2", "define-properties": "^1.2.0", - "functions-have-names": "^1.2.3" + "set-function-name": "^2.0.0" }, "engines": { "node": ">= 0.4" @@ -9048,9 +9289,9 @@ "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==" }, "node_modules/resolve": { - "version": "1.22.4", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.4.tgz", - "integrity": "sha512-PXNdCiPqDqeUou+w1C2eTQbNfxKSuMxqTCuvlmmMsk1NWHL5fRrhY6Pl0qEYYc6+QqGClco1Qj8XnjPego4wfg==", + "version": "1.22.6", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.6.tgz", + "integrity": "sha512-njhxM7mV12JfufShqGy3Rz8j11RPdLy4xi15UurGJeoHLfJpVXKdh3ueuOqbYUcDZnffr6X739JBo5LzyahEsw==", "dependencies": { "is-core-module": "^2.13.0", "path-parse": "^1.0.7", @@ -9114,9 +9355,9 @@ } }, "node_modules/rollup": { - "version": "3.28.0", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.28.0.tgz", - "integrity": "sha512-d7zhvo1OUY2SXSM6pfNjgD5+d0Nz87CUp4mt8l/GgVP3oBsPwzNvSzyu1me6BSG9JIgWNTVcafIXBIyM8yQ3yw==", + "version": "3.29.3", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.29.3.tgz", + "integrity": "sha512-T7du6Hum8jOkSWetjRgbwpM6Sy0nECYrYRSmZjayFcOddtKJWU4d17AC3HNUk7HRuqy4p+G7aEZclSHytqUmEg==", "bin": { "rollup": "dist/bin/rollup" }, @@ -9264,6 +9505,20 @@ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", "dev": true }, + "node_modules/set-function-name": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", + "integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==", + "dev": true, + "dependencies": { + "define-data-property": "^1.0.1", + "functions-have-names": "^1.2.3", + "has-property-descriptors": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/shadcn-ui": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/shadcn-ui/-/shadcn-ui-0.2.3.tgz", @@ -9319,13 +9574,13 @@ } }, "node_modules/shadcn-ui/node_modules/cosmiconfig": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.2.0.tgz", - "integrity": "sha512-3rTMnFJA1tCOPwRxtgF4wd7Ab2qvDbL8jX+3smjIbS4HlZBagTlpERbdN7iAbWlrfxE3M8c27kTwTawQ7st+OQ==", + "version": "8.3.6", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", + "integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==", "dependencies": { - "import-fresh": "^3.2.1", + "import-fresh": "^3.3.0", "js-yaml": "^4.1.0", - "parse-json": "^5.0.0", + "parse-json": "^5.2.0", "path-type": "^4.0.0" }, "engines": { @@ -9333,6 +9588,14 @@ }, "funding": { "url": "https://github.com/sponsors/d-fischer" + }, + "peerDependencies": { + "typescript": ">=4.9.5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } } }, "node_modules/shadcn-ui/node_modules/cross-spawn": { @@ -9593,6 +9856,15 @@ "url": "https://github.com/chalk/slice-ansi?sponsor=1" } }, + "node_modules/snake-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/snake-case/-/snake-case-3.0.4.tgz", + "integrity": "sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg==", + "dependencies": { + "dot-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, "node_modules/sort-keys": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-1.1.2.tgz", @@ -9965,9 +10237,9 @@ } }, "node_modules/tailwindcss-animate": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/tailwindcss-animate/-/tailwindcss-animate-1.0.6.tgz", - "integrity": "sha512-4WigSGMvbl3gCCact62ZvOngA+PRqhAn7si3TQ3/ZuPuQZcIEtVap+ENSXbzWhpojKB8CpvnIsrwBu8/RnHtuw==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/tailwindcss-animate/-/tailwindcss-animate-1.0.7.tgz", + "integrity": "sha512-bl6mpH3T7I3UFxuvDEXLxy/VuFxBk5bbzplh7tXI68mwMokNYd1t9qPBHlnyTwfa4JGC4zP516I1hYYtQ/vspA==", "peerDependencies": { "tailwindcss": ">=3.0.0 || insiders" } @@ -10158,15 +10430,15 @@ } }, "node_modules/tslib": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.1.tgz", - "integrity": "sha512-t0hLfiEKfMUoqhG+U1oid7Pva4bbDPHYfJNiB7BiIjRkj1pyC++4N3huJfqY6aRH6VTB0rvtzQwjM4K6qpfOig==" + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" }, "node_modules/typescript": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.1.6.tgz", - "integrity": "sha512-zaWCozRZ6DLEWAWFrVDz1H6FVXzUSfTy5FUMWsQlU8Ym5JP9eO4xkTIROFCQvhQf61z6O/G6ugw3SgAnvvm+HA==", - "dev": true, + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz", + "integrity": "sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==", + "devOptional": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -10307,9 +10579,9 @@ } }, "node_modules/update-browserslist-db": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz", - "integrity": "sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA==", + "version": "1.0.13", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz", + "integrity": "sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg==", "funding": [ { "type": "opencollective", @@ -10407,9 +10679,13 @@ "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, "node_modules/uuid": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.0.tgz", - "integrity": "sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg==", + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], "bin": { "uuid": "dist/bin/uuid" } @@ -10522,13 +10798,13 @@ } }, "node_modules/vite-plugin-svgr": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/vite-plugin-svgr/-/vite-plugin-svgr-3.2.0.tgz", - "integrity": "sha512-Uvq6niTvhqJU6ga78qLKBFJSDvxWhOnyfQSoKpDPMAGxJPo5S3+9hyjExE5YDj6Lpa4uaLkGc1cBgxXov+LjSw==", + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/vite-plugin-svgr/-/vite-plugin-svgr-3.3.0.tgz", + "integrity": "sha512-vWZMCcGNdPqgziYFKQ3Y95XP0d0YGp28+MM3Dp9cTa/px5CKcHHrIoPl2Jw81rgVm6/ZUNONzjXbZQZ7Kw66og==", "dependencies": { - "@rollup/pluginutils": "^5.0.2", - "@svgr/core": "^7.0.0", - "@svgr/plugin-jsx": "^7.0.0" + "@rollup/pluginutils": "^5.0.4", + "@svgr/core": "^8.1.0", + "@svgr/plugin-jsx": "^8.1.0" }, "peerDependencies": { "vite": "^2.6.0 || 3 || 4" @@ -11053,9 +11329,9 @@ "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, "node_modules/ws": { - "version": "8.13.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.13.0.tgz", - "integrity": "sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.14.2.tgz", + "integrity": "sha512-wEBG1ftX4jcglPxgFCMJmZ2PLtSbJ2Peg6TmpJFTbe9GZYOQCDPdMYu/Tm0/bGZkw8paZnJY45J4K2PZrLYq8g==", "engines": { "node": ">=10.0.0" }, @@ -11116,9 +11392,9 @@ } }, "node_modules/zod": { - "version": "3.22.1", - "resolved": "https://registry.npmjs.org/zod/-/zod-3.22.1.tgz", - "integrity": "sha512-+qUhAMl414+Elh+fRNtpU+byrwjDFOS1N7NioLY+tSlcADTx4TkCUua/hxJvxwDXcV4397/nZ420jy4n4+3WUg==", + "version": "3.22.2", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.22.2.tgz", + "integrity": "sha512-wvWkphh5WQsJbVk1tbx1l1Ly4yg+XecD+Mq280uBGt9wa5BKSWf4Mhp6GmrkPixhMxmabYY7RbzlwVP32pbGCg==", "funding": { "url": "https://github.com/sponsors/colinhacks" } @@ -11182,16 +11458,16 @@ } }, "@antfu/ni": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@antfu/ni/-/ni-0.21.5.tgz", - "integrity": "sha512-rFmuqZMFa1OTRbxdu3vmfytsy1CtsIUFH0bO85rZ1xdu2uLoioSaEi6iOULDVTQUrnes50jMs+UW355Ndj7Oxg==" + "version": "0.21.8", + "resolved": "https://registry.npmjs.org/@antfu/ni/-/ni-0.21.8.tgz", + "integrity": "sha512-90X8pU2szlvw0AJo9EZMbYc2eQKkmO7mAdC4tD4r5co2Mm56MT37MIG8EyB7p4WRheuzGxuLDxJ63mF6+Zajiw==" }, "@babel/code-frame": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.10.tgz", - "integrity": "sha512-/KKIMG4UEL35WmI9OlvMhurwtytjvXoFcGNrOvyG9zIzA8YmPjVtIZUf7b05+TPO7G7/GEmLHDaoCgACHl9hhA==", + "version": "7.22.13", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.13.tgz", + "integrity": "sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w==", "requires": { - "@babel/highlight": "^7.22.10", + "@babel/highlight": "^7.22.13", "chalk": "^2.4.2" }, "dependencies": { @@ -11247,32 +11523,37 @@ } }, "@babel/compat-data": { - "version": "7.22.9", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.22.9.tgz", - "integrity": "sha512-5UamI7xkUcJ3i9qVDS+KFDEK8/7oJ55/sJMB1Ge7IEapr7KfdfV/HErR+koZwOfd+SgtFKOKRhRakdg++DcJpQ==" + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.22.20.tgz", + "integrity": "sha512-BQYjKbpXjoXwFW5jGqiizJQQT/aC7pFm9Ok1OWssonuguICi264lbgMzRp2ZMmRSlfkX6DsWDDcsrctK8Rwfiw==" }, "@babel/core": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.22.10.tgz", - "integrity": "sha512-fTmqbbUBAwCcre6zPzNngvsI0aNrPZe77AeqvDxWM9Nm+04RrJ3CAmGHA9f7lJQY6ZMhRztNemy4uslDxTX4Qw==", + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.23.0.tgz", + "integrity": "sha512-97z/ju/Jy1rZmDxybphrBuI+jtJjFVoz7Mr9yUQVVVi+DNZE333uFQeMOqcCIy1x3WYBIbWftUSLmbNXNT7qFQ==", "requires": { "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.22.10", - "@babel/generator": "^7.22.10", - "@babel/helper-compilation-targets": "^7.22.10", - "@babel/helper-module-transforms": "^7.22.9", - "@babel/helpers": "^7.22.10", - "@babel/parser": "^7.22.10", - "@babel/template": "^7.22.5", - "@babel/traverse": "^7.22.10", - "@babel/types": "^7.22.10", - "convert-source-map": "^1.7.0", + "@babel/code-frame": "^7.22.13", + "@babel/generator": "^7.23.0", + "@babel/helper-compilation-targets": "^7.22.15", + "@babel/helper-module-transforms": "^7.23.0", + "@babel/helpers": "^7.23.0", + "@babel/parser": "^7.23.0", + "@babel/template": "^7.22.15", + "@babel/traverse": "^7.23.0", + "@babel/types": "^7.23.0", + "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", - "json5": "^2.2.2", + "json5": "^2.2.3", "semver": "^6.3.1" }, "dependencies": { + "convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==" + }, "semver": { "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", @@ -11281,23 +11562,23 @@ } }, "@babel/generator": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.22.10.tgz", - "integrity": "sha512-79KIf7YiWjjdZ81JnLujDRApWtl7BxTqWD88+FFdQEIOG8LJ0etDOM7CXuIgGJa55sGOwZVwuEsaLEm0PJ5/+A==", + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.23.0.tgz", + "integrity": "sha512-lN85QRR+5IbYrMWM6Y4pE/noaQtg4pNiqeNGX60eqOfo6gtEj6uw/JagelB8vVztSd7R6M5n1+PQkDbHbBRU4g==", "requires": { - "@babel/types": "^7.22.10", + "@babel/types": "^7.23.0", "@jridgewell/gen-mapping": "^0.3.2", "@jridgewell/trace-mapping": "^0.3.17", "jsesc": "^2.5.1" } }, "@babel/helper-compilation-targets": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.10.tgz", - "integrity": "sha512-JMSwHD4J7SLod0idLq5PKgI+6g/hLD/iuWBq08ZX49xE14VpVEojJ5rHWptpirV2j020MvypRLAXAO50igCJ5Q==", + "version": "7.22.15", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.15.tgz", + "integrity": "sha512-y6EEzULok0Qvz8yyLkCvVX+02ic+By2UdOhylwUOvOn9dvYc9mKICJuuU1n1XBI02YWsNsnrY1kc6DVbjcXbtw==", "requires": { "@babel/compat-data": "^7.22.9", - "@babel/helper-validator-option": "^7.22.5", + "@babel/helper-validator-option": "^7.22.15", "browserslist": "^4.21.9", "lru-cache": "^5.1.1", "semver": "^6.3.1" @@ -11324,17 +11605,17 @@ } }, "@babel/helper-environment-visitor": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.5.tgz", - "integrity": "sha512-XGmhECfVA/5sAt+H+xpSg0mfrHq6FzNr9Oxh7PSEBBRUb/mL7Kz3NICXb194rCqAEdxkhPT1a88teizAFyvk8Q==" + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz", + "integrity": "sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA==" }, "@babel/helper-function-name": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.22.5.tgz", - "integrity": "sha512-wtHSq6jMRE3uF2otvfuD3DIvVhOsSNshQl0Qrd7qC9oQJzHvOL4qQXlQn2916+CXGywIjpGuIkoyZRRxHPiNQQ==", + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz", + "integrity": "sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw==", "requires": { - "@babel/template": "^7.22.5", - "@babel/types": "^7.22.5" + "@babel/template": "^7.22.15", + "@babel/types": "^7.23.0" } }, "@babel/helper-hoist-variables": { @@ -11346,23 +11627,23 @@ } }, "@babel/helper-module-imports": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.22.5.tgz", - "integrity": "sha512-8Dl6+HD/cKifutF5qGd/8ZJi84QeAKh+CEe1sBzz8UayBBGg1dAIJrdHOcOM5b2MpzWL2yuotJTtGjETq0qjXg==", + "version": "7.22.15", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.22.15.tgz", + "integrity": "sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w==", "requires": { - "@babel/types": "^7.22.5" + "@babel/types": "^7.22.15" } }, "@babel/helper-module-transforms": { - "version": "7.22.9", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.22.9.tgz", - "integrity": "sha512-t+WA2Xn5K+rTeGtC8jCsdAH52bjggG5TKRuRrAGNM/mjIbO4GxvlLMFOEz9wXY5I2XQ60PMFsAG2WIcG82dQMQ==", + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.23.0.tgz", + "integrity": "sha512-WhDWw1tdrlT0gMgUJSlX0IQvoO1eN279zrAUbVB+KpV2c3Tylz8+GnKOLllCS6Z/iZQEyVYxhZVUdPTqs2YYPw==", "requires": { - "@babel/helper-environment-visitor": "^7.22.5", - "@babel/helper-module-imports": "^7.22.5", + "@babel/helper-environment-visitor": "^7.22.20", + "@babel/helper-module-imports": "^7.22.15", "@babel/helper-simple-access": "^7.22.5", "@babel/helper-split-export-declaration": "^7.22.6", - "@babel/helper-validator-identifier": "^7.22.5" + "@babel/helper-validator-identifier": "^7.22.20" } }, "@babel/helper-simple-access": { @@ -11387,31 +11668,31 @@ "integrity": "sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw==" }, "@babel/helper-validator-identifier": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz", - "integrity": "sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ==" + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz", + "integrity": "sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==" }, "@babel/helper-validator-option": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.22.5.tgz", - "integrity": "sha512-R3oB6xlIVKUnxNUxbmgq7pKjxpru24zlimpE8WK47fACIlM0II/Hm1RS8IaOI7NgCr6LNS+jl5l75m20npAziw==" + "version": "7.22.15", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.22.15.tgz", + "integrity": "sha512-bMn7RmyFjY/mdECUbgn9eoSY4vqvacUnS9i9vGAGttgFWesO6B4CYWA7XlpbWgBt71iv/hfbPlynohStqnu5hA==" }, "@babel/helpers": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.22.10.tgz", - "integrity": "sha512-a41J4NW8HyZa1I1vAndrraTlPZ/eZoga2ZgS7fEr0tZJGVU4xqdE80CEm0CcNjha5EZ8fTBYLKHF0kqDUuAwQw==", + "version": "7.23.1", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.23.1.tgz", + "integrity": "sha512-chNpneuK18yW5Oxsr+t553UZzzAs3aZnFm4bxhebsNTeshrC95yA7l5yl7GBAG+JG1rF0F7zzD2EixK9mWSDoA==", "requires": { - "@babel/template": "^7.22.5", - "@babel/traverse": "^7.22.10", - "@babel/types": "^7.22.10" + "@babel/template": "^7.22.15", + "@babel/traverse": "^7.23.0", + "@babel/types": "^7.23.0" } }, "@babel/highlight": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.22.10.tgz", - "integrity": "sha512-78aUtVcT7MUscr0K5mIEnkwxPE0MaxkR5RxRwuHaQ+JuU5AmTPhY+do2mdzVTnIJJpyBglql2pehuBIWHug+WQ==", + "version": "7.22.20", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.22.20.tgz", + "integrity": "sha512-dkdMCN3py0+ksCgYmGG8jKeGA/8Tk+gJwSYYlFGxG5lmhfKNoAy004YpLxpS1W2J8m/EK2Ew+yOs9pVRwO89mg==", "requires": { - "@babel/helper-validator-identifier": "^7.22.5", + "@babel/helper-validator-identifier": "^7.22.20", "chalk": "^2.4.2", "js-tokens": "^4.0.0" }, @@ -11468,52 +11749,52 @@ } }, "@babel/parser": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.22.10.tgz", - "integrity": "sha512-lNbdGsQb9ekfsnjFGhEiF4hfFqGgfOP3H3d27re3n+CGhNuTSUEQdfWk556sTLNTloczcdM5TYF2LhzmDQKyvQ==" + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.23.0.tgz", + "integrity": "sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw==" }, "@babel/runtime": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.22.10.tgz", - "integrity": "sha512-21t/fkKLMZI4pqP2wlmsQAWnYW1PDyKyyUV4vCi+B25ydmdaYTKXPwCj0BzSUnZf4seIiYvSA3jcZ3gdsMFkLQ==", + "version": "7.23.1", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.23.1.tgz", + "integrity": "sha512-hC2v6p8ZSI/W0HUzh3V8C5g+NwSKzKPtJwSpTjwl0o297GP9+ZLQSkdvHz46CM3LqyoXxq+5G9komY+eSqSO0g==", "requires": { "regenerator-runtime": "^0.14.0" } }, "@babel/template": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.5.tgz", - "integrity": "sha512-X7yV7eiwAxdj9k94NEylvbVHLiVG1nvzCV2EAowhxLTwODV1jl9UzZ48leOC0sH7OnuHrIkllaBgneUykIcZaw==", + "version": "7.22.15", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.15.tgz", + "integrity": "sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w==", "requires": { - "@babel/code-frame": "^7.22.5", - "@babel/parser": "^7.22.5", - "@babel/types": "^7.22.5" + "@babel/code-frame": "^7.22.13", + "@babel/parser": "^7.22.15", + "@babel/types": "^7.22.15" } }, "@babel/traverse": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.22.10.tgz", - "integrity": "sha512-Q/urqV4pRByiNNpb/f5OSv28ZlGJiFiiTh+GAHktbIrkPhPbl90+uW6SmpoLyZqutrg9AEaEf3Q/ZBRHBXgxig==", + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.23.0.tgz", + "integrity": "sha512-t/QaEvyIoIkwzpiZ7aoSKK8kObQYeF7T2v+dazAYCb8SXtp58zEVkWW7zAnju8FNKNdr4ScAOEDmMItbyOmEYw==", "requires": { - "@babel/code-frame": "^7.22.10", - "@babel/generator": "^7.22.10", - "@babel/helper-environment-visitor": "^7.22.5", - "@babel/helper-function-name": "^7.22.5", + "@babel/code-frame": "^7.22.13", + "@babel/generator": "^7.23.0", + "@babel/helper-environment-visitor": "^7.22.20", + "@babel/helper-function-name": "^7.23.0", "@babel/helper-hoist-variables": "^7.22.5", "@babel/helper-split-export-declaration": "^7.22.6", - "@babel/parser": "^7.22.10", - "@babel/types": "^7.22.10", + "@babel/parser": "^7.23.0", + "@babel/types": "^7.23.0", "debug": "^4.1.0", "globals": "^11.1.0" } }, "@babel/types": { - "version": "7.22.10", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.22.10.tgz", - "integrity": "sha512-obaoigiLrlDZ7TUQln/8m4mSqIW2QFeOrCQc9r+xsaHGNoplVNYlRVpsfE8Vj35GEm2ZH4ZhrNYogs/3fj85kg==", + "version": "7.23.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.23.0.tgz", + "integrity": "sha512-0oIyUfKoI3mSqMvsxBdclDwxXKXAUA8v/apZbc+iSyARYou1o8ZGDxbUYyLFoW2arqS2jDGqJuZvv1d/io1axg==", "requires": { "@babel/helper-string-parser": "^7.22.5", - "@babel/helper-validator-identifier": "^7.22.5", + "@babel/helper-validator-identifier": "^7.22.20", "to-fast-properties": "^2.0.0" } }, @@ -11764,34 +12045,34 @@ "optional": true }, "@floating-ui/core": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.4.1.tgz", - "integrity": "sha512-jk3WqquEJRlcyu7997NtR5PibI+y5bi+LS3hPmguVClypenMsCY3CBa3LAQnozRCtCrYWSEtAdiskpamuJRFOQ==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.5.0.tgz", + "integrity": "sha512-kK1h4m36DQ0UHGj5Ah4db7R0rHemTqqO0QLvUqi1/mUUp3LuAWbWxdxSIf/XsnH9VS6rRVPLJCncjRzUvyCLXg==", "requires": { - "@floating-ui/utils": "^0.1.1" + "@floating-ui/utils": "^0.1.3" } }, "@floating-ui/dom": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.5.1.tgz", - "integrity": "sha512-KwvVcPSXg6mQygvA1TjbN/gh///36kKtllIF8SUm0qpFj8+rvYrpvlYdL1JoA71SHpDqgSSdGOSoQ0Mp3uY5aw==", + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.5.3.tgz", + "integrity": "sha512-ClAbQnEqJAKCJOEbbLo5IUlZHkNszqhuxS4fHAVxRPXPya6Ysf2G8KypnYcOTpx6I8xcgF9bbHb6g/2KpbV8qA==", "requires": { - "@floating-ui/core": "^1.4.1", - "@floating-ui/utils": "^0.1.1" + "@floating-ui/core": "^1.4.2", + "@floating-ui/utils": "^0.1.3" } }, "@floating-ui/react-dom": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.0.1.tgz", - "integrity": "sha512-rZtAmSht4Lry6gdhAJDrCp/6rKN7++JnL1/Anbr/DdeyYXQPxvg/ivrbYvJulbRf4vL8b212suwMM2lxbv+RQA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.0.2.tgz", + "integrity": "sha512-5qhlDvjaLmAst/rKb3VdlCinwTF4EYMiVxuuc/HVUjs46W0zgtbMmAZ1UTsDrRTxRmUEzl92mOtWbeeXL26lSQ==", "requires": { - "@floating-ui/dom": "^1.3.0" + "@floating-ui/dom": "^1.5.1" } }, "@floating-ui/utils": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.1.1.tgz", - "integrity": "sha512-m0G6wlnhm/AX0H12IOWtK8gASEMffnX08RtKkCgTdHb9JpHKGloI7icFfLg9ZmQeavcvR0PKmzxClyuFPSjKWw==" + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.1.4.tgz", + "integrity": "sha512-qprfWkn82Iw821mcKofJ5Pk9wgioHicxcQMxx+5zt5GSKoqdWvgG5AxVmpmUUjzTLPVSH5auBrhI93Deayn/DA==" }, "@headlessui/react": { "version": "1.7.17", @@ -11858,18 +12139,17 @@ } }, "@mui/base": { - "version": "5.0.0-beta.11", - "resolved": "https://registry.npmjs.org/@mui/base/-/base-5.0.0-beta.11.tgz", - "integrity": "sha512-FdKZGPd8qmC3ZNke7CNhzcEgToc02M6WYZc9hcBsNQ17bgAd3s9F//1bDDYgMVBYxDM71V0sv/hBHlOY4I1ZVA==", + "version": "5.0.0-beta.17", + "resolved": "https://registry.npmjs.org/@mui/base/-/base-5.0.0-beta.17.tgz", + "integrity": "sha512-xNbk7iOXrglNdIxFBN0k3ySsPIFLWCnFxqsAYl7CIcDkD9low4kJ7IUuy6ctwx/HAy2fenrT3KXHr1sGjAMgpQ==", "requires": { - "@babel/runtime": "^7.22.6", - "@emotion/is-prop-valid": "^1.2.1", + "@babel/runtime": "^7.22.15", + "@floating-ui/react-dom": "^2.0.2", "@mui/types": "^7.2.4", - "@mui/utils": "^5.14.5", + "@mui/utils": "^5.14.11", "@popperjs/core": "^2.11.8", "clsx": "^2.0.0", - "prop-types": "^15.8.1", - "react-is": "^18.2.0" + "prop-types": "^15.8.1" }, "dependencies": { "clsx": { @@ -11880,21 +12160,21 @@ } }, "@mui/core-downloads-tracker": { - "version": "5.14.5", - "resolved": "https://registry.npmjs.org/@mui/core-downloads-tracker/-/core-downloads-tracker-5.14.5.tgz", - "integrity": "sha512-+wpGH1USwPcKMFPMvXqYPC6fEvhxM3FzxC8lyDiNK/imLyyJ6y2DPb1Oue7OGIKJWBmYBqrWWtfovrxd1aJHTA==" + "version": "5.14.11", + "resolved": "https://registry.npmjs.org/@mui/core-downloads-tracker/-/core-downloads-tracker-5.14.11.tgz", + "integrity": "sha512-uY8FLQURhXe3f3O4dS5OSGML9KDm9+IE226cBu78jarVIzdQGPlXwGIlSI9VJR8MvZDA6C0+6XfWDhWCHruC5Q==" }, "@mui/material": { - "version": "5.14.5", - "resolved": "https://registry.npmjs.org/@mui/material/-/material-5.14.5.tgz", - "integrity": "sha512-4qa4GMfuZH0Ai3mttk5ccXP8a3sf7aPlAJwyMrUSz6h9hPri6BPou94zeu3rENhhmKLby9S/W1y+pmficy8JKA==", + "version": "5.14.11", + "resolved": "https://registry.npmjs.org/@mui/material/-/material-5.14.11.tgz", + "integrity": "sha512-DnSdJzcR7lwG12JA5L2t8JF+RDzMygu5rCNW+logWb/KW2/TRzwLyVWO+CorHTBjBRd38DBxnwOCDiYkDd+N3A==", "requires": { - "@babel/runtime": "^7.22.6", - "@mui/base": "5.0.0-beta.11", - "@mui/core-downloads-tracker": "^5.14.5", - "@mui/system": "^5.14.5", + "@babel/runtime": "^7.22.15", + "@mui/base": "5.0.0-beta.17", + "@mui/core-downloads-tracker": "^5.14.11", + "@mui/system": "^5.14.11", "@mui/types": "^7.2.4", - "@mui/utils": "^5.14.5", + "@mui/utils": "^5.14.11", "@types/react-transition-group": "^4.4.6", "clsx": "^2.0.0", "csstype": "^3.1.2", @@ -11911,36 +12191,36 @@ } }, "@mui/private-theming": { - "version": "5.14.5", - "resolved": "https://registry.npmjs.org/@mui/private-theming/-/private-theming-5.14.5.tgz", - "integrity": "sha512-cC4C5RrpXpDaaZyH9QwmPhRLgz+f2SYbOty3cPkk4qPSOSfif2ZEcDD9HTENKDDd9deB+xkPKzzZhi8cxIx8Ig==", + "version": "5.14.11", + "resolved": "https://registry.npmjs.org/@mui/private-theming/-/private-theming-5.14.11.tgz", + "integrity": "sha512-MSnNNzTu9pfKLCKs1ZAKwOTgE4bz+fQA0fNr8Jm7NDmuWmw0CaN9Vq2/MHsatE7+S0A25IAKby46Uv1u53rKVQ==", "requires": { - "@babel/runtime": "^7.22.6", - "@mui/utils": "^5.14.5", + "@babel/runtime": "^7.22.15", + "@mui/utils": "^5.14.11", "prop-types": "^15.8.1" } }, "@mui/styled-engine": { - "version": "5.13.2", - "resolved": "https://registry.npmjs.org/@mui/styled-engine/-/styled-engine-5.13.2.tgz", - "integrity": "sha512-VCYCU6xVtXOrIN8lcbuPmoG+u7FYuOERG++fpY74hPpEWkyFQG97F+/XfTQVYzlR2m7nPjnwVUgATcTCMEaMvw==", + "version": "5.14.11", + "resolved": "https://registry.npmjs.org/@mui/styled-engine/-/styled-engine-5.14.11.tgz", + "integrity": "sha512-jdUlqRgTYQ8RMtPX4MbRZqar6W2OiIb6J5KEFbIu4FqvPrk44Each4ppg/LAqp1qNlBYq5i+7Q10MYLMpDxX9A==", "requires": { - "@babel/runtime": "^7.21.0", + "@babel/runtime": "^7.22.15", "@emotion/cache": "^11.11.0", "csstype": "^3.1.2", "prop-types": "^15.8.1" } }, "@mui/system": { - "version": "5.14.5", - "resolved": "https://registry.npmjs.org/@mui/system/-/system-5.14.5.tgz", - "integrity": "sha512-mextXZHDeGcR7E1kx43TRARrVXy+gI4wzpUgNv7MqZs1dvTVXQGVeAT6ydj9d6FUqHBPMNLGV/21vJOrpqsL+w==", + "version": "5.14.11", + "resolved": "https://registry.npmjs.org/@mui/system/-/system-5.14.11.tgz", + "integrity": "sha512-yl8xV+y0k7j6dzBsHabKwoShmjqLa8kTxrhUI3JpqLG358VRVMJRW/ES0HhvfcCi4IVXde+Tc2P3K1akGL8zoA==", "requires": { - "@babel/runtime": "^7.22.6", - "@mui/private-theming": "^5.14.5", - "@mui/styled-engine": "^5.13.2", + "@babel/runtime": "^7.22.15", + "@mui/private-theming": "^5.14.11", + "@mui/styled-engine": "^5.14.11", "@mui/types": "^7.2.4", - "@mui/utils": "^5.14.5", + "@mui/utils": "^5.14.11", "clsx": "^2.0.0", "csstype": "^3.1.2", "prop-types": "^15.8.1" @@ -11960,13 +12240,12 @@ "requires": {} }, "@mui/utils": { - "version": "5.14.5", - "resolved": "https://registry.npmjs.org/@mui/utils/-/utils-5.14.5.tgz", - "integrity": "sha512-6Hzw63VR9C5xYv+CbjndoRLU6Gntal8rJ5W+GUzkyHrGWIyYPWZPa6AevnyGioySNETATe1H9oXS8f/7qgIHJA==", + "version": "5.14.11", + "resolved": "https://registry.npmjs.org/@mui/utils/-/utils-5.14.11.tgz", + "integrity": "sha512-fmkIiCPKyDssYrJ5qk+dime1nlO3dmWfCtaPY/uVBqCRMBZ11JhddB9m8sjI2mgqQQwRJG5bq3biaosNdU/s4Q==", "requires": { - "@babel/runtime": "^7.22.6", + "@babel/runtime": "^7.22.15", "@types/prop-types": "^15.7.5", - "@types/react-is": "^18.2.1", "prop-types": "^15.8.1", "react-is": "^18.2.0" } @@ -11994,6 +12273,15 @@ "fastq": "^1.6.0" } }, + "@playwright/test": { + "version": "1.38.1", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.38.1.tgz", + "integrity": "sha512-NqRp8XMwj3AK+zKLbZShl0r/9wKgzqI/527bkptKXomtuo+dOjU9NdMASQ8DNC9z9zLOMbG53T4eihYr3XR+BQ==", + "dev": true, + "requires": { + "playwright": "1.38.1" + } + }, "@popperjs/core": { "version": "2.11.8", "resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.8.tgz", @@ -12102,19 +12390,19 @@ } }, "@radix-ui/react-dialog": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.0.4.tgz", - "integrity": "sha512-hJtRy/jPULGQZceSAP2Re6/4NpKo8im6V8P2hUqZsdFiSL8l35kYsw3qbRI6Ay5mQd2+wlLqje770eq+RJ3yZg==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.0.5.tgz", + "integrity": "sha512-GjWJX/AUpB703eEBanuBnIWdIXg6NvJFCXcNlSZk4xdszCdhrJgBoUd1cGk67vFO+WdA2pfI/plOpqz/5GUP6Q==", "requires": { "@babel/runtime": "^7.13.10", "@radix-ui/primitive": "1.0.1", "@radix-ui/react-compose-refs": "1.0.1", "@radix-ui/react-context": "1.0.1", - "@radix-ui/react-dismissable-layer": "1.0.4", + "@radix-ui/react-dismissable-layer": "1.0.5", "@radix-ui/react-focus-guards": "1.0.1", - "@radix-ui/react-focus-scope": "1.0.3", + "@radix-ui/react-focus-scope": "1.0.4", "@radix-ui/react-id": "1.0.1", - "@radix-ui/react-portal": "1.0.3", + "@radix-ui/react-portal": "1.0.4", "@radix-ui/react-presence": "1.0.1", "@radix-ui/react-primitive": "1.0.3", "@radix-ui/react-slot": "1.0.2", @@ -12132,9 +12420,9 @@ } }, "@radix-ui/react-dismissable-layer": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.4.tgz", - "integrity": "sha512-7UpBa/RKMoHJYjie1gkF1DlK8l1fdU/VKDpoS3rCCo8YBJR294GwcEHyxHw72yvphJ7ld0AXEcSLAzY2F/WyCg==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.5.tgz", + "integrity": "sha512-aJeDjQhywg9LBu2t/At58hCvr7pEm0o2Ke1x33B+MhjNmmZ17sy4KImo0KPLgsnc/zN7GPdce8Cnn0SWvwZO7g==", "requires": { "@babel/runtime": "^7.13.10", "@radix-ui/primitive": "1.0.1", @@ -12145,16 +12433,16 @@ } }, "@radix-ui/react-dropdown-menu": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dropdown-menu/-/react-dropdown-menu-2.0.5.tgz", - "integrity": "sha512-xdOrZzOTocqqkCkYo8yRPCib5OkTkqN7lqNCdxwPOdE466DOaNl4N8PkUIlsXthQvW5Wwkd+aEmWpfWlBoDPEw==", + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dropdown-menu/-/react-dropdown-menu-2.0.6.tgz", + "integrity": "sha512-i6TuFOoWmLWq+M/eCLGd/bQ2HfAX1RJgvrBQ6AQLmzfvsLdefxbWu8G9zczcPFfcSPehz9GcpF6K9QYreFV8hA==", "requires": { "@babel/runtime": "^7.13.10", "@radix-ui/primitive": "1.0.1", "@radix-ui/react-compose-refs": "1.0.1", "@radix-ui/react-context": "1.0.1", "@radix-ui/react-id": "1.0.1", - "@radix-ui/react-menu": "2.0.5", + "@radix-ui/react-menu": "2.0.6", "@radix-ui/react-primitive": "1.0.3", "@radix-ui/react-use-controllable-state": "1.0.1" } @@ -12168,9 +12456,9 @@ } }, "@radix-ui/react-focus-scope": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.0.3.tgz", - "integrity": "sha512-upXdPfqI4islj2CslyfUBNlaJCPybbqRHAi1KER7Isel9Q2AtSJ0zRBZv8mWQiFXD2nyAJ4BhC3yXgZ6kMBSrQ==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.0.4.tgz", + "integrity": "sha512-sL04Mgvf+FmyvZeYfNu1EPAaaxD+aw7cYeIB9L9Fvq8+urhltTRaEo5ysKOpHuKPclsZcSUMKlN05x4u+CINpA==", "requires": { "@babel/runtime": "^7.13.10", "@radix-ui/react-compose-refs": "1.0.1", @@ -12217,9 +12505,9 @@ } }, "@radix-ui/react-menu": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-2.0.5.tgz", - "integrity": "sha512-Gw4f9pwdH+w5w+49k0gLjN0PfRDHvxmAgG16AbyJZ7zhwZ6PBHKtWohvnSwfusfnK3L68dpBREHpVkj8wEM7ZA==", + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-2.0.6.tgz", + "integrity": "sha512-BVkFLS+bUC8HcImkRKPSiVumA1VPOOEC5WBMiT+QAVsPzW1FJzI9KnqgGxVDPBcql5xXrHkD3JOVoXWEXD8SYA==", "requires": { "@babel/runtime": "^7.13.10", "@radix-ui/primitive": "1.0.1", @@ -12227,12 +12515,12 @@ "@radix-ui/react-compose-refs": "1.0.1", "@radix-ui/react-context": "1.0.1", "@radix-ui/react-direction": "1.0.1", - "@radix-ui/react-dismissable-layer": "1.0.4", + "@radix-ui/react-dismissable-layer": "1.0.5", "@radix-ui/react-focus-guards": "1.0.1", - "@radix-ui/react-focus-scope": "1.0.3", + "@radix-ui/react-focus-scope": "1.0.4", "@radix-ui/react-id": "1.0.1", - "@radix-ui/react-popper": "1.1.2", - "@radix-ui/react-portal": "1.0.3", + "@radix-ui/react-popper": "1.1.3", + "@radix-ui/react-portal": "1.0.4", "@radix-ui/react-presence": "1.0.1", "@radix-ui/react-primitive": "1.0.3", "@radix-ui/react-roving-focus": "1.0.4", @@ -12243,9 +12531,9 @@ } }, "@radix-ui/react-menubar": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-menubar/-/react-menubar-1.0.3.tgz", - "integrity": "sha512-GqjdxzYCjjKhcgEODDP8SrYfbWNh/Hm3lyuFkP5Q5IbX0QfXklLF1o1AqA3oTV2kulUgN/kOZVS92hIIShEgpA==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-menubar/-/react-menubar-1.0.4.tgz", + "integrity": "sha512-bHgUo9gayKZfaQcWSSLr++LyS0rgh+MvD89DE4fJ6TkGHvjHgPaBZf44hdka7ogOxIOdj9163J+5xL2Dn4qzzg==", "requires": { "@babel/runtime": "^7.13.10", "@radix-ui/primitive": "1.0.1", @@ -12254,27 +12542,27 @@ "@radix-ui/react-context": "1.0.1", "@radix-ui/react-direction": "1.0.1", "@radix-ui/react-id": "1.0.1", - "@radix-ui/react-menu": "2.0.5", + "@radix-ui/react-menu": "2.0.6", "@radix-ui/react-primitive": "1.0.3", "@radix-ui/react-roving-focus": "1.0.4", "@radix-ui/react-use-controllable-state": "1.0.1" } }, "@radix-ui/react-popover": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-popover/-/react-popover-1.0.6.tgz", - "integrity": "sha512-cZ4defGpkZ0qTRtlIBzJLSzL6ht7ofhhW4i1+pkemjV1IKXm0wgCRnee154qlV6r9Ttunmh2TNZhMfV2bavUyA==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-popover/-/react-popover-1.0.7.tgz", + "integrity": "sha512-shtvVnlsxT6faMnK/a7n0wptwBD23xc1Z5mdrtKLwVEfsEMXodS0r5s0/g5P0hX//EKYZS2sxUjqfzlg52ZSnQ==", "requires": { "@babel/runtime": "^7.13.10", "@radix-ui/primitive": "1.0.1", "@radix-ui/react-compose-refs": "1.0.1", "@radix-ui/react-context": "1.0.1", - "@radix-ui/react-dismissable-layer": "1.0.4", + "@radix-ui/react-dismissable-layer": "1.0.5", "@radix-ui/react-focus-guards": "1.0.1", - "@radix-ui/react-focus-scope": "1.0.3", + "@radix-ui/react-focus-scope": "1.0.4", "@radix-ui/react-id": "1.0.1", - "@radix-ui/react-popper": "1.1.2", - "@radix-ui/react-portal": "1.0.3", + "@radix-ui/react-popper": "1.1.3", + "@radix-ui/react-portal": "1.0.4", "@radix-ui/react-presence": "1.0.1", "@radix-ui/react-primitive": "1.0.3", "@radix-ui/react-slot": "1.0.2", @@ -12284,9 +12572,9 @@ } }, "@radix-ui/react-popper": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.1.2.tgz", - "integrity": "sha512-1CnGGfFi/bbqtJZZ0P/NQY20xdG3E0LALJaLUEoKwPLwl6PPPfbeiCqMVQnhoFRAxjJj4RpBRJzDmUgsex2tSg==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.1.3.tgz", + "integrity": "sha512-cKpopj/5RHZWjrbF2846jBNacjQVwkP068DfmgrNJXpvVWrOvlAmE9xSiy5OqeE+Gi8D9fP+oDhUnPqNMY8/5w==", "requires": { "@babel/runtime": "^7.13.10", "@floating-ui/react-dom": "^2.0.0", @@ -12302,9 +12590,9 @@ } }, "@radix-ui/react-portal": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.0.3.tgz", - "integrity": "sha512-xLYZeHrWoPmA5mEKEfZZevoVRK/Q43GfzRXkWV6qawIWWK8t6ifIiLQdd7rmQ4Vk1bmI21XhqF9BN3jWf+phpA==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.0.4.tgz", + "integrity": "sha512-Qki+C/EuGUVCQTOTD5vzJzJuMUlewbzuKyUy+/iHM2uwGiru9gZeBJtHAPKAEkB5KWGi9mP/CHKcY0wt1aW45Q==", "requires": { "@babel/runtime": "^7.13.10", "@radix-ui/react-primitive": "1.0.3" @@ -12383,6 +12671,59 @@ "@radix-ui/react-visually-hidden": "1.0.3", "aria-hidden": "^1.1.1", "react-remove-scroll": "2.5.5" + }, + "dependencies": { + "@radix-ui/react-dismissable-layer": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.4.tgz", + "integrity": "sha512-7UpBa/RKMoHJYjie1gkF1DlK8l1fdU/VKDpoS3rCCo8YBJR294GwcEHyxHw72yvphJ7ld0AXEcSLAzY2F/WyCg==", + "requires": { + "@babel/runtime": "^7.13.10", + "@radix-ui/primitive": "1.0.1", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-use-callback-ref": "1.0.1", + "@radix-ui/react-use-escape-keydown": "1.0.3" + } + }, + "@radix-ui/react-focus-scope": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.0.3.tgz", + "integrity": "sha512-upXdPfqI4islj2CslyfUBNlaJCPybbqRHAi1KER7Isel9Q2AtSJ0zRBZv8mWQiFXD2nyAJ4BhC3yXgZ6kMBSrQ==", + "requires": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-use-callback-ref": "1.0.1" + } + }, + "@radix-ui/react-popper": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.1.2.tgz", + "integrity": "sha512-1CnGGfFi/bbqtJZZ0P/NQY20xdG3E0LALJaLUEoKwPLwl6PPPfbeiCqMVQnhoFRAxjJj4RpBRJzDmUgsex2tSg==", + "requires": { + "@babel/runtime": "^7.13.10", + "@floating-ui/react-dom": "^2.0.0", + "@radix-ui/react-arrow": "1.0.3", + "@radix-ui/react-compose-refs": "1.0.1", + "@radix-ui/react-context": "1.0.1", + "@radix-ui/react-primitive": "1.0.3", + "@radix-ui/react-use-callback-ref": "1.0.1", + "@radix-ui/react-use-layout-effect": "1.0.1", + "@radix-ui/react-use-rect": "1.0.1", + "@radix-ui/react-use-size": "1.0.1", + "@radix-ui/rect": "1.0.1" + } + }, + "@radix-ui/react-portal": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.0.3.tgz", + "integrity": "sha512-xLYZeHrWoPmA5mEKEfZZevoVRK/Q43GfzRXkWV6qawIWWK8t6ifIiLQdd7rmQ4Vk1bmI21XhqF9BN3jWf+phpA==", + "requires": { + "@babel/runtime": "^7.13.10", + "@radix-ui/react-primitive": "1.0.3" + } + } } }, "@radix-ui/react-separator": { @@ -12435,18 +12776,18 @@ } }, "@radix-ui/react-tooltip": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.0.6.tgz", - "integrity": "sha512-DmNFOiwEc2UDigsYj6clJENma58OelxD24O4IODoZ+3sQc3Zb+L8w1EP+y9laTuKCLAysPw4fD6/v0j4KNV8rg==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.0.7.tgz", + "integrity": "sha512-lPh5iKNFVQ/jav/j6ZrWq3blfDJ0OH9R6FlNUHPMqdLuQ9vwDgFsRxvl8b7Asuy5c8xmoojHUxKHQSOAvMHxyw==", "requires": { "@babel/runtime": "^7.13.10", "@radix-ui/primitive": "1.0.1", "@radix-ui/react-compose-refs": "1.0.1", "@radix-ui/react-context": "1.0.1", - "@radix-ui/react-dismissable-layer": "1.0.4", + "@radix-ui/react-dismissable-layer": "1.0.5", "@radix-ui/react-id": "1.0.1", - "@radix-ui/react-popper": "1.1.2", - "@radix-ui/react-portal": "1.0.3", + "@radix-ui/react-popper": "1.1.3", + "@radix-ui/react-portal": "1.0.4", "@radix-ui/react-presence": "1.0.1", "@radix-ui/react-primitive": "1.0.3", "@radix-ui/react-slot": "1.0.2", @@ -12532,29 +12873,29 @@ } }, "@reactflow/background": { - "version": "11.2.7", - "resolved": "https://registry.npmjs.org/@reactflow/background/-/background-11.2.7.tgz", - "integrity": "sha512-cI9nE4B/1BCASbFpCsyjy45qOWBCnog0rsDqI0Vrv41rDj4MVYHZ2ZegyvgdPPQluoGB9fwwVtRQA4m1I9RSSw==", + "version": "11.3.1", + "resolved": "https://registry.npmjs.org/@reactflow/background/-/background-11.3.1.tgz", + "integrity": "sha512-FjYHXvzwwaCJJS5rRxXbU4r8xbAqEvLQkO1l4czjWgrE0VQELC93RKVI4lRpkv2RkkIwRNMvoGb6kXNbJWiU/g==", "requires": { - "@reactflow/core": "11.8.2", + "@reactflow/core": "11.9.1", "classcat": "^5.0.3", "zustand": "^4.4.1" } }, "@reactflow/controls": { - "version": "11.1.18", - "resolved": "https://registry.npmjs.org/@reactflow/controls/-/controls-11.1.18.tgz", - "integrity": "sha512-sBfS7mZyyEzJNhCu0Zg4PDSoYI6XzWybyVRE0n2YKTwpXxE2Yy7eag35mloHjbDV+knNZRjsXl87bALmq08AoA==", + "version": "11.2.1", + "resolved": "https://registry.npmjs.org/@reactflow/controls/-/controls-11.2.1.tgz", + "integrity": "sha512-mW8kz/J77tc5uRwSTSUH1OtkksaVc52Dbz7aTuF0aKZXwPHoAfRPFeet2VLQV1IdKeSab82Mqy0WTSJj6Hl15w==", "requires": { - "@reactflow/core": "11.8.2", + "@reactflow/core": "11.9.1", "classcat": "^5.0.3", "zustand": "^4.4.1" } }, "@reactflow/core": { - "version": "11.8.2", - "resolved": "https://registry.npmjs.org/@reactflow/core/-/core-11.8.2.tgz", - "integrity": "sha512-Wywa5jDlojorOkET+PxLqlFnIt0PrnbYqrfQEu4I9+ZWII0TWp84YPxF5j63Cd+92vTWAzgFrVMVNwda6nU/rg==", + "version": "11.9.1", + "resolved": "https://registry.npmjs.org/@reactflow/core/-/core-11.9.1.tgz", + "integrity": "sha512-qXWwB2KSWkp3MH3fhY1HL4yFBvMZ+7y9mkfk0we7wCuGNgQzWVJDwvVEpPTkyp5xyxX53sSF3UDC+UaFCB4ElA==", "requires": { "@types/d3": "^7.4.0", "@types/d3-drag": "^3.0.1", @@ -12568,11 +12909,11 @@ } }, "@reactflow/minimap": { - "version": "11.6.2", - "resolved": "https://registry.npmjs.org/@reactflow/minimap/-/minimap-11.6.2.tgz", - "integrity": "sha512-rJvIDv6NkfNgHEEr+znGVo5gVhgkXNImzo5FNCKenMQI4x8BSGxQGaADfr+f/3KOmMq9PpvjEHsrLqB0VdPaCg==", + "version": "11.7.1", + "resolved": "https://registry.npmjs.org/@reactflow/minimap/-/minimap-11.7.1.tgz", + "integrity": "sha512-ZNHx1wURS055kYYaiFhrmGYI2UXA7Cwcjxfww4Mq3PWvIIaEFL9t34aUWoXCYm+NPFHElwC8jM7Vyp7xDR9oyg==", "requires": { - "@reactflow/core": "11.8.2", + "@reactflow/core": "11.9.1", "@types/d3-selection": "^3.0.3", "@types/d3-zoom": "^3.0.1", "classcat": "^5.0.3", @@ -12582,11 +12923,11 @@ } }, "@reactflow/node-resizer": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/@reactflow/node-resizer/-/node-resizer-2.1.4.tgz", - "integrity": "sha512-9oNnNvlylnsyBBvZQITw1MyMZfYWHhu+GkqboIsrgOdaYwURkuRfhMFaQb+j0rCSjoq6AhbJbGWkZiuonAgELA==", + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/@reactflow/node-resizer/-/node-resizer-2.2.1.tgz", + "integrity": "sha512-nUh73WHVAYsJLvgSKqKAxIPU9vUkEM5iold2zQsEOHqjfnk86m1r33fTEiw5+MPqCFHMoGzyZe2gxXUMFqg/3A==", "requires": { - "@reactflow/core": "11.8.2", + "@reactflow/core": "11.9.1", "classcat": "^5.0.4", "d3-drag": "^3.0.0", "d3-selection": "^3.0.0", @@ -12594,24 +12935,24 @@ } }, "@reactflow/node-toolbar": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/@reactflow/node-toolbar/-/node-toolbar-1.2.6.tgz", - "integrity": "sha512-f+zKVS30XOQStXj+Ry7Yf4FlIB1XWHhrRNG2f2pNUH6FTpDBOvbxiIsQh99LH9vykMJ54fRJwbAjTg24x0UnyA==", + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/@reactflow/node-toolbar/-/node-toolbar-1.3.1.tgz", + "integrity": "sha512-YkrpB2+/9ymZ2h1nmuKytmrFlIXdiDzeDqmZnyJF2UNH/4sSo/GswPJmqiA8ETOP2oEdIkNmO9dwVpiDvWtZ5A==", "requires": { - "@reactflow/core": "11.8.2", + "@reactflow/core": "11.9.1", "classcat": "^5.0.3", "zustand": "^4.4.1" } }, "@remix-run/router": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.8.0.tgz", - "integrity": "sha512-mrfKqIHnSZRyIzBcanNJmVQELTnX+qagEDlcKO90RgRBVOZGSGvZKeDihTRfWcqoDn5N/NkUcwWTccnpN18Tfg==" + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.9.0.tgz", + "integrity": "sha512-bV63itrKBC0zdT27qYm6SDZHlkXwFL1xMBuhkn+X7l0+IIhNaH5wuuvZKp6eKhCD4KFhujhfhCT1YxXW6esUIA==" }, "@rollup/pluginutils": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.0.3.tgz", - "integrity": "sha512-hfllNN4a80rwNQ9QCxhxuHCGHMAvabXqxNdaChUSSadMre7t4iEUI6fFAhBOn/eIYTgYVhBv7vCLsAJ4u3lf3g==", + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.0.4.tgz", + "integrity": "sha512-0KJnIoRI8A+a1dqOYLxH8vBf8bphDmty5QvIm2hqm7oFCFYKCAZWWd2hXgMibaPsNDhI0AtpYfQZJG47pt/k4g==", "requires": { "@types/estree": "^1.0.0", "estree-walker": "^2.0.2", @@ -12625,96 +12966,97 @@ "dev": true }, "@svgr/babel-plugin-add-jsx-attribute": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-7.0.0.tgz", - "integrity": "sha512-khWbXesWIP9v8HuKCl2NU2HNAyqpSQ/vkIl36Nbn4HIwEYSRWL0H7Gs6idJdha2DkpFDWlsqMELvoCE8lfFY6Q==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-8.0.0.tgz", + "integrity": "sha512-b9MIk7yhdS1pMCZM8VeNfUlSKVRhsHZNMl5O9SfaX0l0t5wjdgu4IDzGB8bpnGBBOjGST3rRFVsaaEtI4W6f7g==", "requires": {} }, "@svgr/babel-plugin-remove-jsx-attribute": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-7.0.0.tgz", - "integrity": "sha512-iiZaIvb3H/c7d3TH2HBeK91uI2rMhZNwnsIrvd7ZwGLkFw6mmunOCoVnjdYua662MqGFxlN9xTq4fv9hgR4VXQ==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-8.0.0.tgz", + "integrity": "sha512-BcCkm/STipKvbCl6b7QFrMh/vx00vIP63k2eM66MfHJzPr6O2U0jYEViXkHJWqXqQYjdeA9cuCl5KWmlwjDvbA==", "requires": {} }, "@svgr/babel-plugin-remove-jsx-empty-expression": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-7.0.0.tgz", - "integrity": "sha512-sQQmyo+qegBx8DfFc04PFmIO1FP1MHI1/QEpzcIcclo5OAISsOJPW76ZIs0bDyO/DBSJEa/tDa1W26pVtt0FRw==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-8.0.0.tgz", + "integrity": "sha512-5BcGCBfBxB5+XSDSWnhTThfI9jcO5f0Ai2V24gZpG+wXF14BzwxxdDb4g6trdOux0rhibGs385BeFMSmxtS3uA==", "requires": {} }, "@svgr/babel-plugin-replace-jsx-attribute-value": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-7.0.0.tgz", - "integrity": "sha512-i6MaAqIZXDOJeikJuzocByBf8zO+meLwfQ/qMHIjCcvpnfvWf82PFvredEZElErB5glQFJa2KVKk8N2xV6tRRA==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-8.0.0.tgz", + "integrity": "sha512-KVQ+PtIjb1BuYT3ht8M5KbzWBhdAjjUPdlMtpuw/VjT8coTrItWX6Qafl9+ji831JaJcu6PJNKCV0bp01lBNzQ==", "requires": {} }, "@svgr/babel-plugin-svg-dynamic-title": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-7.0.0.tgz", - "integrity": "sha512-BoVSh6ge3SLLpKC0pmmN9DFlqgFy4NxNgdZNLPNJWBUU7TQpDWeBuyVuDW88iXydb5Cv0ReC+ffa5h3VrKfk1w==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-8.0.0.tgz", + "integrity": "sha512-omNiKqwjNmOQJ2v6ge4SErBbkooV2aAWwaPFs2vUY7p7GhVkzRkJ00kILXQvRhA6miHnNpXv7MRnnSjdRjK8og==", "requires": {} }, "@svgr/babel-plugin-svg-em-dimensions": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-7.0.0.tgz", - "integrity": "sha512-tNDcBa+hYn0gO+GkP/AuNKdVtMufVhU9fdzu+vUQsR18RIJ9RWe7h/pSBY338RO08wArntwbDk5WhQBmhf2PaA==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-8.0.0.tgz", + "integrity": "sha512-mURHYnu6Iw3UBTbhGwE/vsngtCIbHE43xCRK7kCw4t01xyGqb2Pd+WXekRRoFOBIY29ZoOhUCTEweDMdrjfi9g==", "requires": {} }, "@svgr/babel-plugin-transform-react-native-svg": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-7.0.0.tgz", - "integrity": "sha512-qw54u8ljCJYL2KtBOjI5z7Nzg8LnSvQOP5hPKj77H4VQL4+HdKbAT5pnkkZLmHKYwzsIHSYKXxHouD8zZamCFQ==", + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-8.1.0.tgz", + "integrity": "sha512-Tx8T58CHo+7nwJ+EhUwx3LfdNSG9R2OKfaIXXs5soiy5HtgoAEkDay9LIimLOcG8dJQH1wPZp/cnAv6S9CrR1Q==", "requires": {} }, "@svgr/babel-plugin-transform-svg-component": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-7.0.0.tgz", - "integrity": "sha512-CcFECkDj98daOg9jE3Bh3uyD9kzevCAnZ+UtzG6+BQG/jOQ2OA3jHnX6iG4G1MCJkUQFnUvEv33NvQfqrb/F3A==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-8.0.0.tgz", + "integrity": "sha512-DFx8xa3cZXTdb/k3kfPeaixecQLgKh5NVBMwD0AQxOzcZawK4oo1Jh9LbrcACUivsCA7TLG8eeWgrDXjTMhRmw==", "requires": {} }, "@svgr/babel-preset": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/babel-preset/-/babel-preset-7.0.0.tgz", - "integrity": "sha512-EX/NHeFa30j5UjldQGVQikuuQNHUdGmbh9kEpBKofGUtF0GUPJ4T4rhoYiqDAOmBOxojyot36JIFiDUHUK1ilQ==", + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@svgr/babel-preset/-/babel-preset-8.1.0.tgz", + "integrity": "sha512-7EYDbHE7MxHpv4sxvnVPngw5fuR6pw79SkcrILHJ/iMpuKySNCl5W1qcwPEpU+LgyRXOaAFgH0KhwD18wwg6ug==", "requires": { - "@svgr/babel-plugin-add-jsx-attribute": "^7.0.0", - "@svgr/babel-plugin-remove-jsx-attribute": "^7.0.0", - "@svgr/babel-plugin-remove-jsx-empty-expression": "^7.0.0", - "@svgr/babel-plugin-replace-jsx-attribute-value": "^7.0.0", - "@svgr/babel-plugin-svg-dynamic-title": "^7.0.0", - "@svgr/babel-plugin-svg-em-dimensions": "^7.0.0", - "@svgr/babel-plugin-transform-react-native-svg": "^7.0.0", - "@svgr/babel-plugin-transform-svg-component": "^7.0.0" + "@svgr/babel-plugin-add-jsx-attribute": "8.0.0", + "@svgr/babel-plugin-remove-jsx-attribute": "8.0.0", + "@svgr/babel-plugin-remove-jsx-empty-expression": "8.0.0", + "@svgr/babel-plugin-replace-jsx-attribute-value": "8.0.0", + "@svgr/babel-plugin-svg-dynamic-title": "8.0.0", + "@svgr/babel-plugin-svg-em-dimensions": "8.0.0", + "@svgr/babel-plugin-transform-react-native-svg": "8.1.0", + "@svgr/babel-plugin-transform-svg-component": "8.0.0" } }, "@svgr/core": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/core/-/core-7.0.0.tgz", - "integrity": "sha512-ztAoxkaKhRVloa3XydohgQQCb0/8x9T63yXovpmHzKMkHO6pkjdsIAWKOS4bE95P/2quVh1NtjSKlMRNzSBffw==", + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@svgr/core/-/core-8.1.0.tgz", + "integrity": "sha512-8QqtOQT5ACVlmsvKOJNEaWmRPmcojMOzCz4Hs2BGG/toAp/K38LcsMRyLp349glq5AzJbCEeimEoxaX6v/fLrA==", "requires": { "@babel/core": "^7.21.3", - "@svgr/babel-preset": "^7.0.0", + "@svgr/babel-preset": "8.1.0", "camelcase": "^6.2.0", - "cosmiconfig": "^8.1.3" + "cosmiconfig": "^8.1.3", + "snake-case": "^3.0.4" }, "dependencies": { "cosmiconfig": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.2.0.tgz", - "integrity": "sha512-3rTMnFJA1tCOPwRxtgF4wd7Ab2qvDbL8jX+3smjIbS4HlZBagTlpERbdN7iAbWlrfxE3M8c27kTwTawQ7st+OQ==", + "version": "8.3.6", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", + "integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==", "requires": { - "import-fresh": "^3.2.1", + "import-fresh": "^3.3.0", "js-yaml": "^4.1.0", - "parse-json": "^5.0.0", + "parse-json": "^5.2.0", "path-type": "^4.0.0" } } } }, "@svgr/hast-util-to-babel-ast": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-7.0.0.tgz", - "integrity": "sha512-42Ej9sDDEmsJKjrfQ1PHmiDiHagh/u9AHO9QWbeNx4KmD9yS5d1XHmXUNINfUcykAU+4431Cn+k6Vn5mWBYimQ==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-8.0.0.tgz", + "integrity": "sha512-EbDKwO9GpfWP4jN9sGdYwPBU0kdomaPIL2Eu4YwmgP+sJeXT+L7bMwJUBnhzfH8Q2qMBqZ4fJwpCyYsAN3mt2Q==", "requires": { "@babel/types": "^7.21.3", "entities": "^4.4.0" @@ -12728,13 +13070,13 @@ } }, "@svgr/plugin-jsx": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@svgr/plugin-jsx/-/plugin-jsx-7.0.0.tgz", - "integrity": "sha512-SWlTpPQmBUtLKxXWgpv8syzqIU8XgFRvyhfkam2So8b3BE0OS0HPe5UfmlJ2KIC+a7dpuuYovPR2WAQuSyMoPw==", + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@svgr/plugin-jsx/-/plugin-jsx-8.1.0.tgz", + "integrity": "sha512-0xiIyBsLlr8quN+WyuxooNW9RJ0Dpr8uOnH/xrCVO8GLUcwHISwj1AG0k+LFzteTkAA0GbX0kj9q6Dk70PTiPA==", "requires": { "@babel/core": "^7.21.3", - "@svgr/babel-preset": "^7.0.0", - "@svgr/hast-util-to-babel-ast": "^7.0.0", + "@svgr/babel-preset": "8.1.0", + "@svgr/hast-util-to-babel-ast": "8.0.0", "svg-parser": "^2.0.4" } }, @@ -12761,93 +13103,107 @@ } }, "@swc/core": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.3.77.tgz", - "integrity": "sha512-CiLD2NGTdhE8JnWFHeRAglaCAcvwOxvpeWNtCIT261GrxTKCXHPAn4eqIWiBzXnwWDmZ6XdyrCL4/GmPESNnrg==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.3.89.tgz", + "integrity": "sha512-+FchWateF57g50ChX6++QQDwgVd6iWZX5HA6m9LRIdJIB56bIqbwRQDwVL3Q8Rlbry4kmw+RxiOW2FjAx9mQOQ==", "dev": true, "requires": { - "@swc/core-darwin-arm64": "1.3.77", - "@swc/core-darwin-x64": "1.3.77", - "@swc/core-linux-arm-gnueabihf": "1.3.77", - "@swc/core-linux-arm64-gnu": "1.3.77", - "@swc/core-linux-arm64-musl": "1.3.77", - "@swc/core-linux-x64-gnu": "1.3.77", - "@swc/core-linux-x64-musl": "1.3.77", - "@swc/core-win32-arm64-msvc": "1.3.77", - "@swc/core-win32-ia32-msvc": "1.3.77", - "@swc/core-win32-x64-msvc": "1.3.77" + "@swc/core-darwin-arm64": "1.3.89", + "@swc/core-darwin-x64": "1.3.89", + "@swc/core-linux-arm-gnueabihf": "1.3.89", + "@swc/core-linux-arm64-gnu": "1.3.89", + "@swc/core-linux-arm64-musl": "1.3.89", + "@swc/core-linux-x64-gnu": "1.3.89", + "@swc/core-linux-x64-musl": "1.3.89", + "@swc/core-win32-arm64-msvc": "1.3.89", + "@swc/core-win32-ia32-msvc": "1.3.89", + "@swc/core-win32-x64-msvc": "1.3.89", + "@swc/counter": "^0.1.1", + "@swc/types": "^0.1.5" } }, "@swc/core-darwin-arm64": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.77.tgz", - "integrity": "sha512-l4KGQAGB4Ih1Al2tWoUBrtVJCF/xZRjH3jCMCRD52KZDRAnRVDq42JKek7+aHjjH8juzTISaqzsI8Ipv6zvKhA==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.89.tgz", + "integrity": "sha512-LVCZQ2yGrX2678uMvW66IF1bzcOMqiABi+ioNDnJtAIsE/zRVMEYp1ivbOrH32FmPplBby6CGgJIOT3P4VaP1g==", "dev": true, "optional": true }, "@swc/core-darwin-x64": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.3.77.tgz", - "integrity": "sha512-eFCkZg/BzObOn5IWn7t/Ywz+jlZKff/1XBymT7Arh/UkO39Agh+rYdBqjbylp4JQMl0qGRBfxD3wPgDRoViNVQ==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.3.89.tgz", + "integrity": "sha512-IwKlX65YrPBF3urOxBJia0PjnZeaICnCkSwGLiYyV1RhM8XwZ/XyEDTBEsdph3WxUM5wCZQSk8UY/d0saIsX9w==", "dev": true, "optional": true }, "@swc/core-linux-arm-gnueabihf": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.77.tgz", - "integrity": "sha512-+1BueyGcCQAtxSORJml0CU8aKQNssQ5E3ABMFJwCbcec+lUCiGYK1fBfqj4FmWQMbXuQ+mn1SMeXSZAtaXoQ3w==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.89.tgz", + "integrity": "sha512-u5qAPh7NkKoDJYwfaB5zuRvzW2+A89CQQHp5xcYjpctRsk3sUrPmC7vNeE12xipBNKLujIG59ppbrf6Pkp5XIg==", "dev": true, "optional": true }, "@swc/core-linux-arm64-gnu": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.77.tgz", - "integrity": "sha512-3smbzVcuuCiWWPFeUIp1c0aAXd+fGsc8x8rUcYvoJAWBgLJ45JymOI5WSUjIybl3rk0prdkbFylZuR0t1Rue3A==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.89.tgz", + "integrity": "sha512-eykuO7XtPltk600HvnnRr1nU5qGk7PeqLmztHA7R2bu2SbtcbCGsewPNcAX5eP8by2VwpGcLPdxaKyqeUwCgoA==", "dev": true, "optional": true }, "@swc/core-linux-arm64-musl": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.77.tgz", - "integrity": "sha512-e81+i4ef5vDeu9AkMY2AamPcmtPVPUqeqq3aNWM1tcHCaUej1DwY4xhRxrd1OvEoYyVBLtiMb5nenF3V9OzXIQ==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.89.tgz", + "integrity": "sha512-i/65Vt3ljfd6EyR+WWZ5aAjZLTQMIHoR+Ay97jE0kysRn8MEOINu0SWyiEwcdXzRGlt+zkrKYfOxp745sWPDAw==", "dev": true, "optional": true }, "@swc/core-linux-x64-gnu": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.77.tgz", - "integrity": "sha512-gl3+9VESckZ/GYCmGClGgXqB2tAA2MivEV/51Wde+2alo2lPSSujEhxE6Q3TNYkXOLAHSupYyDZ0ou9RfXufOw==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.89.tgz", + "integrity": "sha512-ERETXe68CJRdNkL3EIN62gErh3p6+/6hmz4C0epnYJ4F7QspdW/EOluL1o9bl4dux4Xz0nmBPSZsqfHq/nl1KA==", "dev": true, "optional": true }, "@swc/core-linux-x64-musl": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.77.tgz", - "integrity": "sha512-AqQLZAMYTaNrA4i/Nv/GhXdildDZyRv6xsK8u2actevv5PPjD/69yYB3Z4uaptwh/4ys4W/Y2vnt+OPCNH4OQg==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.89.tgz", + "integrity": "sha512-EXiwgU5E/yC5zuJtOXXWv+wMwpe5DR380XhVxIOBG6nFi6MR3O2X37KxeEdQZX8RwN7/KU6kNHeifzEiSvixfA==", "dev": true, "optional": true }, "@swc/core-win32-arm64-msvc": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.77.tgz", - "integrity": "sha512-Wdw++6w7WyavxZ3WruElCrRJ6EO0iHS0Mts4qHnbKgD08GJqIMTZPtZ5qhRe9zCf6sj2rQqhAMf/HKhYrHoF+w==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.89.tgz", + "integrity": "sha512-j7GvkgeOrZlB55MpEwX+6E6KjxwOmwRXpIqMjF11JDIZ0wEwHlBxZhlnQQ58iuI6jL6AJgDH/ktDhMyELoBiHw==", "dev": true, "optional": true }, "@swc/core-win32-ia32-msvc": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.77.tgz", - "integrity": "sha512-ObNVpdtLdXDpmVKuMZh87yBYL4ti64WX95o2j5Oq3r0e0RqwIGqGvPDxvJVEiyCnaXHfl8eSNKWuiOxPHPkMNQ==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.89.tgz", + "integrity": "sha512-n57nE7d3FXBa3Y2+VoJdPulcUAS0ZGAGVGxFpeM/tZt1MBEN5OvpOSOIp35dK5HAAxAzTPlmqj9KUYnVxLMVKw==", "dev": true, "optional": true }, "@swc/core-win32-x64-msvc": { - "version": "1.3.77", - "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.77.tgz", - "integrity": "sha512-Ew6jg/qr0v/2ixeJXvIUBuAPMKTz8HRoDBO/nHkvlnDFmkhsyH7h5YwJS1rLBwAEhWuJaVYjYi7cibZTI/QRYQ==", + "version": "1.3.89", + "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.89.tgz", + "integrity": "sha512-6yMAmqgseAwEXFIwurP7CL8yIH8n7/Rg62ooOVSLSWL5O/Pwlpy1WrpoA0eKhgMLLkIrPvNuKaE/rG7c2iNQHA==", "dev": true, "optional": true }, + "@swc/counter": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.1.tgz", + "integrity": "sha512-xVRaR4u9hcYjFvcSg71Lz5Bo4//CyjAAfMxa7UsaDSYxAshflUkVJWiyVWrfxC59z2kP1IzI4/1BEpnhI9o3Mw==", + "dev": true + }, + "@swc/types": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.5.tgz", + "integrity": "sha512-myfUej5naTBWnqOCc/MdVOLVjXUXtIA+NpDrDBKJtLLg2shUjBu3cZmB/85RyitKc55+lUUyl7oRfLOvkr2hsw==", + "dev": true + }, "@szmarczak/http-timer": { "version": "4.0.6", "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-4.0.6.tgz", @@ -12858,23 +13214,23 @@ } }, "@tabler/icons": { - "version": "2.30.0", - "resolved": "https://registry.npmjs.org/@tabler/icons/-/icons-2.30.0.tgz", - "integrity": "sha512-tvtmkI4ALjKThVVORh++sB9JnkFY7eGInKxNy+Df7WVQiF7T85tlvGADzlgX4Ic+CK5MIUzZ0jhOlQ/RRlgXpg==" + "version": "2.35.0", + "resolved": "https://registry.npmjs.org/@tabler/icons/-/icons-2.35.0.tgz", + "integrity": "sha512-qW/itKdmFvfGw6mAQ+cZy+2MYTXb0XdGAVhO3obYLJEfsSPMwQRO0S9ckFk1xMQX/Tj7REC3TEmWUBWNi3/o3g==" }, "@tabler/icons-react": { - "version": "2.30.0", - "resolved": "https://registry.npmjs.org/@tabler/icons-react/-/icons-react-2.30.0.tgz", - "integrity": "sha512-aYggXusHW133L4KujJkVf4GIIrjg7tIRHgNf/n37mnoHqMjwNP+PjmVdrBM1Z8Ywx9PKFRlrwM0eUMDcG+I4HA==", + "version": "2.35.0", + "resolved": "https://registry.npmjs.org/@tabler/icons-react/-/icons-react-2.35.0.tgz", + "integrity": "sha512-jK2zgtMF2+LSjtbjYsfer+ryz72TwyJqv3MsmCfEpQwP37u01xvmTlVhJ+ox3bV+trsgjojsldVDuB05JuXLaw==", "requires": { - "@tabler/icons": "2.30.0", + "@tabler/icons": "2.35.0", "prop-types": "^15.7.2" } }, "@tailwindcss/forms": { - "version": "0.5.4", - "resolved": "https://registry.npmjs.org/@tailwindcss/forms/-/forms-0.5.4.tgz", - "integrity": "sha512-YAm12D3R7/9Mh4jFbYSMnsd6jG++8KxogWgqs7hbdo/86aWjjlIEvL7+QYdVELmAI0InXTpZqFIg5e7aDVWI2Q==", + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/@tailwindcss/forms/-/forms-0.5.6.tgz", + "integrity": "sha512-Fw+2BJ0tmAwK/w01tEFL5TiaJBX1NLT1/YbWgvm7ws3Qcn11kiXxzNTEQDMs5V3mQemhB56l3u0i9dwdzSQldA==", "requires": { "mini-svg-data-uri": "^1.2.3" } @@ -12886,9 +13242,9 @@ "requires": {} }, "@tailwindcss/typography": { - "version": "0.5.9", - "resolved": "https://registry.npmjs.org/@tailwindcss/typography/-/typography-0.5.9.tgz", - "integrity": "sha512-t8Sg3DyynFysV9f4JDOVISGsjazNb48AeIYQwcL+Bsq5uf4RYL75C1giZ43KISjeDGBaTN3Kxh7Xj/vRSMJUUg==", + "version": "0.5.10", + "resolved": "https://registry.npmjs.org/@tailwindcss/typography/-/typography-0.5.10.tgz", + "integrity": "sha512-Pe8BuPJQJd3FfRnm6H0ulKIGoMEQS+Vq01R6M5aCrFB/ccR/shT+0kXLjouGC1gFLm9hopTFN+DMP0pfwRWzPw==", "dev": true, "requires": { "lodash.castarray": "^4.4.0", @@ -12898,9 +13254,9 @@ } }, "@testing-library/dom": { - "version": "9.3.1", - "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-9.3.1.tgz", - "integrity": "sha512-0DGPd9AR3+iDTjGoMpxIkAsUihHZ3Ai6CneU6bRRrffXMgzCdlNk43jTrD2/5LT6CBb3MWTP8v510JzYtahD2w==", + "version": "9.3.3", + "resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-9.3.3.tgz", + "integrity": "sha512-fB0R+fa3AUqbLHWyxXa2kGVtf1Fe1ZZFr0Zp6AIbIAzXb2mKbEXl+PCQNUOaq5lbTab5tfctfXRNsWXxa2f7Aw==", "dev": true, "peer": true, "requires": { @@ -13022,9 +13378,9 @@ } }, "@types/aria-query": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-5.0.1.tgz", - "integrity": "sha512-XTIieEY+gvJ39ChLcB4If5zHtPxt3Syj5rgZR+e1ctpmK8NjPf0zFqsz4JpLJT0xla9GFDKjy8Cpu331nrmE1Q==", + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-5.0.2.tgz", + "integrity": "sha512-PHKZuMN+K5qgKIWhBodXzQslTo5P+K/6LqeKXS6O/4liIDdZqaX5RXrCK++LAw+y/nptN48YmUMFiQHRSWYwtQ==", "dev": true }, "@types/axios": { @@ -13053,9 +13409,9 @@ "integrity": "sha512-LKVP3cgXBT9RYj+t+9FDKwS5tdI+rPBXaNSkma7hvqy35lc7mAokC2zsqWJH0LaqIt3B962nuYI77hsJoT1gow==" }, "@types/d3": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/@types/d3/-/d3-7.4.0.tgz", - "integrity": "sha512-jIfNVK0ZlxcuRDKtRS/SypEyOQ6UHaFQBKv032X45VvxSJ6Yi5G9behy9h6tNTHTDGh5Vq+KbmBjUWLgY4meCA==", + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/@types/d3/-/d3-7.4.1.tgz", + "integrity": "sha512-lBpYmbHTCtFKO1DB1R7E9dXp9/g1F3JXSGOF7iKPZ+wRmYg/Q6tCRHODGOc5Qk25fJRe2PI60EDRf2HLPUncMA==", "requires": { "@types/d3-array": "*", "@types/d3-axis": "*", @@ -13090,67 +13446,67 @@ } }, "@types/d3-array": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.0.5.tgz", - "integrity": "sha512-Qk7fpJ6qFp+26VeQ47WY0mkwXaiq8+76RJcncDEfMc2ocRzXLO67bLFRNI4OX1aGBoPzsM5Y2T+/m1pldOgD+A==" + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.0.8.tgz", + "integrity": "sha512-2xAVyAUgaXHX9fubjcCbGAUOqYfRJN1em1EKR2HfzWBpObZhwfnZKvofTN4TplMqJdFQao61I+NVSai/vnBvDQ==" }, "@types/d3-axis": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-axis/-/d3-axis-3.0.2.tgz", - "integrity": "sha512-uGC7DBh0TZrU/LY43Fd8Qr+2ja1FKmH07q2FoZFHo1eYl8aj87GhfVoY1saJVJiq24rp1+wpI6BvQJMKgQm8oA==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-axis/-/d3-axis-3.0.4.tgz", + "integrity": "sha512-ySnjI/7qm+J602VjcejXcqs1hEuu5UBbGaJGp+Cn/yKVc1iS3JueLVpToGdQsS2sqta7tqA/kG4ore/+LH90UA==", "requires": { "@types/d3-selection": "*" } }, "@types/d3-brush": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-brush/-/d3-brush-3.0.2.tgz", - "integrity": "sha512-2TEm8KzUG3N7z0TrSKPmbxByBx54M+S9lHoP2J55QuLU0VSQ9mE96EJSAOVNEqd1bbynMjeTS9VHmz8/bSw8rA==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-brush/-/d3-brush-3.0.4.tgz", + "integrity": "sha512-Kg5uIsdJNMCs5lTqeZFsTKqj9lBvpiFRDkYN3j2CDlPhonNDg9/gXVpv1E/MKh3tEqArryIj9o6RBGE/MQe+6Q==", "requires": { "@types/d3-selection": "*" } }, "@types/d3-chord": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-chord/-/d3-chord-3.0.2.tgz", - "integrity": "sha512-abT/iLHD3sGZwqMTX1TYCMEulr+wBd0SzyOQnjYNLp7sngdOHYtNkMRI5v3w5thoN+BWtlHVDx2Osvq6fxhZWw==" + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-chord/-/d3-chord-3.0.4.tgz", + "integrity": "sha512-p4PvN1N+7GL3Y/NI9Ug1TKwowUV6h664kmxL79ctp1HRYCk1mhP0+SXhjRsoWXCdnJfbLLLmpV99rt8dMrHrzg==" }, "@types/d3-color": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.0.tgz", - "integrity": "sha512-HKuicPHJuvPgCD+np6Se9MQvS6OCbJmOjGvylzMJRlDwUXjKTTXs6Pwgk79O09Vj/ho3u1ofXnhFOaEWWPrlwA==" + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.1.tgz", + "integrity": "sha512-CSAVrHAtM9wfuLJ2tpvvwCU/F22sm7rMHNN+yh9D6O6hyAms3+O0cgMpC1pm6UEUMOntuZC8bMt74PteiDUdCg==" }, "@types/d3-contour": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-contour/-/d3-contour-3.0.2.tgz", - "integrity": "sha512-k6/bGDoAGJZnZWaKzeB+9glgXCYGvh6YlluxzBREiVo8f/X2vpTEdgPy9DN7Z2i42PZOZ4JDhVdlTSTSkLDPlQ==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-contour/-/d3-contour-3.0.4.tgz", + "integrity": "sha512-B0aeX8Xg3MNUglULxqDvlgY1SVXuN2xtEleYSAY0iMhl/SMVT7snzgAveejjwM3KaWuNXIoXEJ7dmXE8oPq/jA==", "requires": { "@types/d3-array": "*", "@types/geojson": "*" } }, "@types/d3-delaunay": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/@types/d3-delaunay/-/d3-delaunay-6.0.1.tgz", - "integrity": "sha512-tLxQ2sfT0p6sxdG75c6f/ekqxjyYR0+LwPrsO1mbC9YDBzPJhs2HbJJRrn8Ez1DBoHRo2yx7YEATI+8V1nGMnQ==" + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-delaunay/-/d3-delaunay-6.0.2.tgz", + "integrity": "sha512-WplUJ/OHU7eITneDqNnzK+2pgR+WDzUHG6XAUVo+oWHPQq74VcgUdw8a4ODweaZzF56OVYK+x9GxCyuq6hSu1A==" }, "@types/d3-dispatch": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-dispatch/-/d3-dispatch-3.0.2.tgz", - "integrity": "sha512-rxN6sHUXEZYCKV05MEh4z4WpPSqIw+aP7n9ZN6WYAAvZoEAghEK1WeVZMZcHRBwyaKflU43PCUAJNjFxCzPDjg==" + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-dispatch/-/d3-dispatch-3.0.4.tgz", + "integrity": "sha512-NApHpGHRNxUy7e2Lfzl/cwOucmn4Xdx6FdmXzAoomo8T81LyGmlBjjko/vP0TVzawlvEFLDq8OCRLulW6DDzKw==" }, "@types/d3-drag": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-drag/-/d3-drag-3.0.2.tgz", - "integrity": "sha512-qmODKEDvyKWVHcWWCOVcuVcOwikLVsyc4q4EBJMREsoQnR2Qoc2cZQUyFUPgO9q4S3qdSqJKBsuefv+h0Qy+tw==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-drag/-/d3-drag-3.0.4.tgz", + "integrity": "sha512-/t53K1erTuUbP7WIX9SE0hlmytpTYRbIthlhbGkBHzCV5vPO++7yrk8OlisWPyIJO5TGowTmqCtGH2tokY5T/g==", "requires": { "@types/d3-selection": "*" } }, "@types/d3-dsv": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@types/d3-dsv/-/d3-dsv-3.0.1.tgz", - "integrity": "sha512-76pBHCMTvPLt44wFOieouXcGXWOF0AJCceUvaFkxSZEu4VDUdv93JfpMa6VGNFs01FHfuP4a5Ou68eRG1KBfTw==" + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-dsv/-/d3-dsv-3.0.4.tgz", + "integrity": "sha512-YxfUVJ55HxR8oq88136w09mBMPNhgH7PZjteq72onWXWOohGif/cLQnQv8V4A5lEGjXF04LhwSTpmzpY9wyVyA==" }, "@types/d3-ease": { "version": "3.0.0", @@ -13158,40 +13514,40 @@ "integrity": "sha512-aMo4eaAOijJjA6uU+GIeW018dvy9+oH5Y2VPPzjjfxevvGQ/oRDs+tfYC9b50Q4BygRR8yE2QCLsrT0WtAVseA==" }, "@types/d3-fetch": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-fetch/-/d3-fetch-3.0.2.tgz", - "integrity": "sha512-gllwYWozWfbep16N9fByNBDTkJW/SyhH6SGRlXloR7WdtAaBui4plTP+gbUgiEot7vGw/ZZop1yDZlgXXSuzjA==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-fetch/-/d3-fetch-3.0.4.tgz", + "integrity": "sha512-RleYajubALkGjrvatxWhlygfvB1KNF0Uzz9guRUeeA+M/2B7l8rxObYdktaX9zU1st04lMCHjZWe4vbl+msH2Q==", "requires": { "@types/d3-dsv": "*" } }, "@types/d3-force": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@types/d3-force/-/d3-force-3.0.4.tgz", - "integrity": "sha512-q7xbVLrWcXvSBBEoadowIUJ7sRpS1yvgMWnzHJggFy5cUZBq2HZL5k/pBSm0GdYWS1vs5/EDwMjSKF55PDY4Aw==" + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-force/-/d3-force-3.0.6.tgz", + "integrity": "sha512-G9wbOvCxkNlLrppoHLZ6oFpbm3z7ibfkXwLD8g5/4Aa7iTEV0Z7TQ0OL8UxAtvdOhCa2VZcSuqn1NQqyCEqmiw==" }, "@types/d3-format": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@types/d3-format/-/d3-format-3.0.1.tgz", - "integrity": "sha512-5KY70ifCCzorkLuIkDe0Z9YTf9RR2CjBX1iaJG+rgM/cPP+sO+q9YdQ9WdhQcgPj1EQiJ2/0+yUkkziTG6Lubg==" + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-format/-/d3-format-3.0.2.tgz", + "integrity": "sha512-9oQWvKk2qVBo49FQq8yD/et8Lx0W5Ac2FdGSOUecqOFKqh0wkpyHqf9Qc7A06ftTR+Lz13Pi3jHIQis0aCueOA==" }, "@types/d3-geo": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@types/d3-geo/-/d3-geo-3.0.3.tgz", - "integrity": "sha512-bK9uZJS3vuDCNeeXQ4z3u0E7OeJZXjUgzFdSOtNtMCJCLvDtWDwfpRVWlyt3y8EvRzI0ccOu9xlMVirawolSCw==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@types/d3-geo/-/d3-geo-3.0.5.tgz", + "integrity": "sha512-ysEEU93Wv9p2UZBxTK3kUP7veHgyhTA0qYtI7bxK5EMXb3JxGv0D4IH54PxprAF26n+uHci24McVmzwIdLgvgQ==", "requires": { "@types/geojson": "*" } }, "@types/d3-hierarchy": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@types/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz", - "integrity": "sha512-9hjRTVoZjRFR6xo8igAJyNXQyPX6Aq++Nhb5ebrUF414dv4jr2MitM2fWiOY475wa3Za7TOS2Gh9fmqEhLTt0A==" + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/@types/d3-hierarchy/-/d3-hierarchy-3.1.4.tgz", + "integrity": "sha512-wrvjpRFdmEu6yAqgjGy8MSud9ggxJj+I9XLuztLeSf/E0j0j6RQYtxH2J8U0Cfbgiw9ZDHyhpmaVuWhxscYaAQ==" }, "@types/d3-interpolate": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.1.tgz", - "integrity": "sha512-jx5leotSeac3jr0RePOH1KdR9rISG91QIE4Q2PYTu4OymLTZfA3SrnURSLzKH48HmXVUru50b8nje4E79oQSQw==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.2.tgz", + "integrity": "sha512-zAbCj9lTqW9J9PlF4FwnvEjXZUy75NQqPm7DMHZXuxCFTpuTrdK2NMYGQekf4hlasL78fCYOLu4EE3/tXElwow==", "requires": { "@types/d3-color": "*" } @@ -13207,9 +13563,9 @@ "integrity": "sha512-D49z4DyzTKXM0sGKVqiTDTYr+DHg/uxsiWDAkNrwXYuiZVd9o9wXZIo+YsHkifOiyBkmSWlEngHCQme54/hnHw==" }, "@types/d3-quadtree": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-quadtree/-/d3-quadtree-3.0.2.tgz", - "integrity": "sha512-QNcK8Jguvc8lU+4OfeNx+qnVy7c0VrDJ+CCVFS9srBo2GL9Y18CnIxBdTF3v38flrGy5s1YggcoAiu6s4fLQIw==" + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/d3-quadtree/-/d3-quadtree-3.0.3.tgz", + "integrity": "sha512-GDWaR+rGEk4ToLQSGugYnoh9AYYblsg/8kmdpa1KAJMwcdZ0v8rwgnldURxI5UrzxPlCPzF7by/Tjmv+Jn21Dg==" }, "@types/d3-random": { "version": "3.0.1", @@ -13217,9 +13573,9 @@ "integrity": "sha512-IIE6YTekGczpLYo/HehAy3JGF1ty7+usI97LqraNa8IiDur+L44d0VOjAvFQWJVdZOJHukUJw+ZdZBlgeUsHOQ==" }, "@types/d3-scale": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.3.tgz", - "integrity": "sha512-PATBiMCpvHJSMtZAMEhc2WyL+hnzarKzI6wAHYjhsonjWJYGq5BXTzQjv4l8m2jO183/4wZ90rKvSeT7o72xNQ==", + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.5.tgz", + "integrity": "sha512-w/C++3W394MHzcLKO2kdsIn5KKNTOqeQVzyPSGPLzQbkPw/jpeaGtSRlakcKevGgGsjJxGsbqS0fPrVFDbHrDA==", "requires": { "@types/d3-time": "*" } @@ -13230,27 +13586,27 @@ "integrity": "sha512-dsoJGEIShosKVRBZB0Vo3C8nqSDqVGujJU6tPznsBJxNJNwMF8utmS83nvCBKQYPpjCzaaHcrf66iTRpZosLPw==" }, "@types/d3-selection": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/@types/d3-selection/-/d3-selection-3.0.5.tgz", - "integrity": "sha512-xCB0z3Hi8eFIqyja3vW8iV01+OHGYR2di/+e+AiOcXIOrY82lcvWW8Ke1DYE/EUVMsBl4Db9RppSBS3X1U6J0w==" + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@types/d3-selection/-/d3-selection-3.0.7.tgz", + "integrity": "sha512-qoj2O7KjfqCobmtFOth8FMvjwMVPUAAmn6xiUbLl1ld7vQCPgffvyV5BBcEFfqWdilAUm+3zciU/3P3vZrUMlg==" }, "@types/d3-shape": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.1.tgz", - "integrity": "sha512-6Uh86YFF7LGg4PQkuO2oG6EMBRLuW9cbavUW46zkIO5kuS2PfTqo2o9SkgtQzguBHbLgNnU90UNsITpsX1My+A==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.3.tgz", + "integrity": "sha512-cHMdIq+rhF5IVwAV7t61pcEXfEHsEsrbBUPkFGBwTXuxtTAkBBrnrNA8++6OWm3jwVsXoZYQM8NEekg6CPJ3zw==", "requires": { "@types/d3-path": "*" } }, "@types/d3-time": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.0.tgz", - "integrity": "sha512-sZLCdHvBUcNby1cB6Fd3ZBrABbjz3v1Vm90nysCQ6Vt7vd6e/h9Lt7SiJUoEX0l4Dzc7P5llKyhqSi1ycSf1Hg==" + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.1.tgz", + "integrity": "sha512-5j/AnefKAhCw4HpITmLDTPlf4vhi8o/dES+zbegfPb7LaGfNyqkLxBR6E+4yvTAgnJLmhe80EXFMzUs38fw4oA==" }, "@types/d3-time-format": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@types/d3-time-format/-/d3-time-format-4.0.0.tgz", - "integrity": "sha512-yjfBUe6DJBsDin2BMIulhSHmr5qNR5Pxs17+oW4DoVPyVIXZ+m6bs7j1UVKP08Emv6jRmYrYqxYzO63mQxy1rw==" + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@types/d3-time-format/-/d3-time-format-4.0.1.tgz", + "integrity": "sha512-Br6EFeu9B1Zrem7KaYbr800xCmEDyq8uE60kEU8rWhC/XpFYX6ocGMZuRJDQfFCq6SyakQxNHFqIfJbFLf4x6Q==" }, "@types/d3-timer": { "version": "3.0.0", @@ -13258,61 +13614,61 @@ "integrity": "sha512-HNB/9GHqu7Fo8AQiugyJbv6ZxYz58wef0esl4Mv828w1ZKpAshw/uFWVDUcIB9KKFeFKoxS3cHY07FFgtTRZ1g==" }, "@types/d3-transition": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@types/d3-transition/-/d3-transition-3.0.3.tgz", - "integrity": "sha512-/S90Od8Id1wgQNvIA8iFv9jRhCiZcGhPd2qX0bKF/PS+y0W5CrXKgIiELd2CvG1mlQrWK/qlYh3VxicqG1ZvgA==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@types/d3-transition/-/d3-transition-3.0.5.tgz", + "integrity": "sha512-dcfjP6prFxj3ziFOJrnt4W2P0oXNj/sGxsJXH8286sHtVZ4qWGbjuZj+RRCYx4YZ4C0izpeE8OqXVCtoWEtzYg==", "requires": { "@types/d3-selection": "*" } }, "@types/d3-zoom": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@types/d3-zoom/-/d3-zoom-3.0.3.tgz", - "integrity": "sha512-OWk1yYIIWcZ07+igN6BeoG6rqhnJ/pYe+R1qWFM2DtW49zsoSjgb9G5xB0ZXA8hh2jAzey1XuRmMSoXdKw8MDA==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@types/d3-zoom/-/d3-zoom-3.0.5.tgz", + "integrity": "sha512-mIefdTLtxuWUWTbBupCUXPAXVPmi8/Uwrq41gQpRh0rD25GMU1ku+oTELqNY2NuuiI0F3wXC5e1liBQi7YS7XQ==", "requires": { "@types/d3-interpolate": "*", "@types/d3-selection": "*" } }, "@types/debug": { - "version": "4.1.8", - "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.8.tgz", - "integrity": "sha512-/vPO1EPOs306Cvhwv7KfVfYvOJqA/S/AXjaHQiJboCZzcNDb+TIJFN9/2C9DZ//ijSKWioNyUxD792QmDJ+HKQ==", + "version": "4.1.9", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.9.tgz", + "integrity": "sha512-8Hz50m2eoS56ldRlepxSBa6PWEVCtzUo/92HgLc2qTMnotJNIm7xP+UZhyWoYsyOdd5dxZ+NZLb24rsKyFs2ow==", "requires": { "@types/ms": "*" } }, "@types/estree": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.1.tgz", - "integrity": "sha512-LG4opVs2ANWZ1TJoKc937iMmNstM/d0ae1vNbnBvBhqCSezgVUOzcLCqbI5elV8Vy6WKwKjaqR+zO9VKirBBCA==" + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.2.tgz", + "integrity": "sha512-VeiPZ9MMwXjO32/Xu7+OwflfmeoRwkE/qzndw42gGtgJwZopBnzy2gD//NN1+go1mADzkDcqf/KnFRSjTJ8xJA==" }, "@types/geojson": { - "version": "7946.0.10", - "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.10.tgz", - "integrity": "sha512-Nmh0K3iWQJzniTuPRcJn5hxXkfB1T1pgB89SBig5PlJQU5yocazeu4jATJlaA0GYFKWMqDdvYemoSnF2pXgLVA==" + "version": "7946.0.11", + "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.11.tgz", + "integrity": "sha512-L7A0AINMXQpVwxHJ4jxD6/XjZ4NDufaRlUJHjNIFKYUFBH1SvOW+neaqb0VTRSLW5suSrSu19ObFEFnfNcr+qg==" }, "@types/hast": { - "version": "2.3.5", - "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.5.tgz", - "integrity": "sha512-SvQi0L/lNpThgPoleH53cdjB3y9zpLlVjRbqB3rH8hx1jiRSBGAhyjV3H+URFjNVRqt2EdYNrbZE5IsGlNfpRg==", + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.6.tgz", + "integrity": "sha512-47rJE80oqPmFdVDCD7IheXBrVdwuBgsYwoczFvKmwfo2Mzsnt+V9OONsYauFmICb6lQPpCuXYJWejBNs4pDJRg==", "requires": { "@types/unist": "^2" } }, "@types/hoist-non-react-statics": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.1.tgz", - "integrity": "sha512-iMIqiko6ooLrTh1joXodJK5X9xeEALT1kM5G3ZLhD3hszxBdIEd5C75U834D9mLcINgD4OyZf5uQXjkuYydWvA==", + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", + "integrity": "sha512-YIQtIg4PKr7ZyqNPZObpxfHsHEmuB8dXCxd6qVcGuQVDK2bpsF7bYNnBJ4Nn7giuACZg+WewExgrtAJ3XnA4Xw==", "requires": { "@types/react": "*", "hoist-non-react-statics": "^3.3.0" } }, "@types/http-cache-semantics": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz", - "integrity": "sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.2.tgz", + "integrity": "sha512-FD+nQWA2zJjh4L9+pFXqWOi0Hs1ryBCfI+985NjluQ1p8EYtoLvjLOKidXBtZ4/IcxDX4o8/E8qDS3540tNliw==", "dev": true }, "@types/jest": { @@ -13326,9 +13682,9 @@ } }, "@types/katex": { - "version": "0.16.2", - "resolved": "https://registry.npmjs.org/@types/katex/-/katex-0.16.2.tgz", - "integrity": "sha512-dHsSjSlU/EWEEbeNADr3FtZZOAXPkFPUO457QCnoNqcZQXNqNEu/svQd0Nritvd3wNff4vvC/f4e6xgX3Llt8A==" + "version": "0.16.3", + "resolved": "https://registry.npmjs.org/@types/katex/-/katex-0.16.3.tgz", + "integrity": "sha512-CeVMX9EhVUW8MWnei05eIRks4D5Wscw/W9Byz1s3PA+yJvcdvq9SaDjiUKvRvEgjpdTyJMjQA43ae4KTwsvOPg==" }, "@types/keyv": { "version": "3.1.4", @@ -13340,9 +13696,9 @@ } }, "@types/lodash": { - "version": "4.14.197", - "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.197.tgz", - "integrity": "sha512-BMVOiWs0uNxHVlHBgzTIqJYmj+PgCo4euloGF+5m4okL3rEYzM2EEv78mw8zWSMM57dM7kVIgJ2QDvwHSoCI5g==", + "version": "4.14.199", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.199.tgz", + "integrity": "sha512-Vrjz5N5Ia4SEzWWgIVwnHNEnb1UE1XMkvY5DGXrAeOGE9imk0hgTHh5GyDjLDJi9OTCn9oo9dXH1uToK1VRfrg==", "dev": true }, "@types/mathjax": { @@ -13364,9 +13720,9 @@ "integrity": "sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==" }, "@types/node": { - "version": "16.18.40", - "resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.40.tgz", - "integrity": "sha512-+yno3ItTEwGxXiS/75Q/aHaa5srkpnJaH+kdkTVJ3DtJEwv92itpKbxU+FjPoh2m/5G9zmUQfrL4A4C13c+iGA==", + "version": "16.18.54", + "resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.54.tgz", + "integrity": "sha512-oTmGy68gxZZ21FhTJVVvZBYpQHEBZxHKTsGshobMqm9qWpbqdZsA5jvsuPZcHu0KwpmLrOHWPdEfg7XDpNT9UA==", "devOptional": true }, "@types/parse-json": { @@ -13375,14 +13731,14 @@ "integrity": "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==" }, "@types/prop-types": { - "version": "15.7.5", - "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz", - "integrity": "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==" + "version": "15.7.7", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.7.tgz", + "integrity": "sha512-FbtmBWCcSa2J4zL781Zf1p5YUBXQomPEcep9QZCfRfQgTxz3pJWiDFLebohZ9fFntX5ibzOkSsrJ0TEew8cAog==" }, "@types/react": { - "version": "18.2.20", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.20.tgz", - "integrity": "sha512-WKNtmsLWJM/3D5mG4U84cysVY31ivmyw85dE84fOCk5Hx78wezB/XEjVPWl2JTZ5FkEeaTJf+VgUAUn3PE7Isw==", + "version": "18.2.22", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.22.tgz", + "integrity": "sha512-60fLTOLqzarLED2O3UQImc/lsNRgG0jE/a1mPW9KjMemY0LMITWEsbS4VvZ4p6rorEHd5YKxxmMKSDK505GHpA==", "requires": { "@types/prop-types": "*", "@types/scheduler": "*", @@ -13398,14 +13754,6 @@ "@types/react": "*" } }, - "@types/react-is": { - "version": "18.2.1", - "resolved": "https://registry.npmjs.org/@types/react-is/-/react-is-18.2.1.tgz", - "integrity": "sha512-wyUkmaaSZEzFZivD8F2ftSyAfk6L+DfFliVj/mYdOXbVjRcS87fQJLTnhk6dRZPuJjI+9g6RZJO4PNCngUrmyw==", - "requires": { - "@types/react": "*" - } - }, "@types/react-transition-group": { "version": "4.4.6", "resolved": "https://registry.npmjs.org/@types/react-transition-group/-/react-transition-group-4.4.6.tgz", @@ -13424,9 +13772,9 @@ } }, "@types/scheduler": { - "version": "0.16.3", - "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.3.tgz", - "integrity": "sha512-5cJ8CB4yAx7BH1oMvdU0Jh9lrEXyPkar6F9G/ERswkCuvP4KQZfZkSjcMbAICCpQTN4OuZn8tz0HiKv9TGZgrQ==" + "version": "0.16.4", + "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.4.tgz", + "integrity": "sha512-2L9ifAGl7wmXwP4v3pN4p2FLhD0O1qsJpvKmNin5VA8+UvNVb447UDaAEV6UdrkA+m/Xs58U1RFps44x6TFsVQ==" }, "@types/testing-library__jest-dom": { "version": "5.14.9", @@ -13438,23 +13786,23 @@ } }, "@types/unist": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.7.tgz", - "integrity": "sha512-cputDpIbFgLUaGQn6Vqg3/YsJwxUwHLO13v3i5ouxT4lat0khip9AEWxtERujXV9wxIB1EyF97BSJFt6vpdI8g==" + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.8.tgz", + "integrity": "sha512-d0XxK3YTObnWVp6rZuev3c49+j4Lo8g4L1ZRm9z5L0xpoZycUPshHgczK5gsUMaZOstjVYYi09p5gYvUtfChYw==" }, "@types/uuid": { - "version": "9.0.2", - "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.2.tgz", - "integrity": "sha512-kNnC1GFBLuhImSnV7w4njQkUiJi0ZXUycu1rUaouPqiKlXkh77JKgdRnTAp1x5eBwcIwbtI+3otwzuIDEuDoxQ==", + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.4.tgz", + "integrity": "sha512-zAuJWQflfx6dYJM62vna+Sn5aeSWhh3OB+wfUEACNcqUSc0AGc5JKl+ycL1vrH7frGTXhJchYjE1Hak8L819dA==", "dev": true }, "@vitejs/plugin-react-swc": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-3.3.2.tgz", - "integrity": "sha512-VJFWY5sfoZerQRvJrh518h3AcQt6f/yTuWn4/TRB+dqmYU0NX1qz7qM5Wfd+gOQqUzQW4gxKqKN3KpE/P3+zrA==", + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-3.4.0.tgz", + "integrity": "sha512-m7UaA4Uvz82N/0EOVpZL4XsFIakRqrFKeSNxa1FBLSXGvWrWRBwmZb4qxk+ZIVAZcW3c3dn5YosomDgx62XWcQ==", "dev": true, "requires": { - "@swc/core": "^1.3.61" + "@swc/core": "^1.3.85" } }, "abab": { @@ -13468,9 +13816,9 @@ "integrity": "sha512-jbQfFaw+57OBwPt7qSNHuW+RA8smmRwkWRS1Ozh6K/QxUspBgBV/LpdSzlY7vee8TomS6j3D33B9rIeH1qMwsA==" }, "ace-builds": { - "version": "1.24.1", - "resolved": "https://registry.npmjs.org/ace-builds/-/ace-builds-1.24.1.tgz", - "integrity": "sha512-TLcxMxiTRX5Eq9bBVSd/bTJlanCBULiv/IULLohJDDaCAfcpZKJBVSd4OWfN/j2c2jCLc+jhpNWGELiJZw3wPw==" + "version": "1.28.0", + "resolved": "https://registry.npmjs.org/ace-builds/-/ace-builds-1.28.0.tgz", + "integrity": "sha512-wkJp+Wz8MRHtCVdt65L/jPFLAQ0iqJZ2EeD2XWOvKGbIi4mZNwHlpHRLRB8ZnQ07VoiB0TLFWwIjjm2FL9gUcQ==" }, "acorn": { "version": "8.10.0", @@ -13604,14 +13952,14 @@ "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "autoprefixer": { - "version": "10.4.15", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.15.tgz", - "integrity": "sha512-KCuPB8ZCIqFdA4HwKXsvz7j6gvSDNhDP7WnUjBleRkKjPdvCmHFuQ77ocavI8FT6NdvlBnE2UFr2H4Mycn8Vew==", + "version": "10.4.16", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.16.tgz", + "integrity": "sha512-7vd3UC6xKp0HLfua5IjZlcXvGAGy7cBAXTg2lyQ/8WpNhd6SiZ8Be+xm3FyBSYJx5GKcpRCzBh7RH4/0dnY+uQ==", "dev": true, "requires": { "browserslist": "^4.21.10", - "caniuse-lite": "^1.0.30001520", - "fraction.js": "^4.2.0", + "caniuse-lite": "^1.0.30001538", + "fraction.js": "^4.3.6", "normalize-range": "^0.1.2", "picocolors": "^1.0.0", "postcss-value-parser": "^4.2.0" @@ -13624,9 +13972,9 @@ "dev": true }, "axios": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.4.0.tgz", - "integrity": "sha512-S4XCWMEmzvo64T9GfvQDOXgYRDJ/wsSZc7Jvdgx5u1sd0JwsuPLqb3SYmusag+edF6ziyMensPVqLTSc1PiSEA==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.5.0.tgz", + "integrity": "sha512-D4DdjDo5CY50Qms0qGQTTw6Q44jl7zRwY7bthds06pUGfChBCTcQs+N743eFWGEd6pRTMd6A+I87aWyFV5wiZQ==", "requires": { "follow-redirects": "^1.15.0", "form-data": "^4.0.0", @@ -13803,14 +14151,14 @@ } }, "browserslist": { - "version": "4.21.10", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.10.tgz", - "integrity": "sha512-bipEBdZfVH5/pwrvqc+Ub0kUPVfGUhlKxbvfD+z1BDnPEO/X98ruXGA1WP5ASpAFKan7Qr6j736IacbZQuAlKQ==", + "version": "4.21.11", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.11.tgz", + "integrity": "sha512-xn1UXOKUz7DjdGlg9RrUr0GGiWzI97UQJnugHtH0OLDfJB7jMgoIkYvRIEO1l9EeEERVqeqLYOcFBW9ldjypbQ==", "requires": { - "caniuse-lite": "^1.0.30001517", - "electron-to-chromium": "^1.4.477", + "caniuse-lite": "^1.0.30001538", + "electron-to-chromium": "^1.4.526", "node-releases": "^2.0.13", - "update-browserslist-db": "^1.0.11" + "update-browserslist-db": "^1.0.13" } }, "buffer": { @@ -13880,9 +14228,9 @@ "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==" }, "caniuse-lite": { - "version": "1.0.30001521", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001521.tgz", - "integrity": "sha512-fnx1grfpEOvDGH+V17eccmNjucGUnCbP6KL+l5KqBIerp26WK/+RQ7CIDE37KGJjaPyqWXXlFUyKiWmvdNNKmQ==" + "version": "1.0.30001539", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001539.tgz", + "integrity": "sha512-hfS5tE8bnNiNvEOEkm8HElUHroYwlqMMENEzELymy77+tJ6m+gA2krtHl5hxJaj71OlpC2cHZbdSMX1/YEqEkA==" }, "ccount": { "version": "2.0.1", @@ -13956,9 +14304,9 @@ } }, "cli-spinners": { - "version": "2.9.0", - "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.0.tgz", - "integrity": "sha512-4/aL9X3Wh0yiMQlE+eeRhWP6vclO3QRtw1JHKIT0FFUs5FjpFmESqtMvYZ0+lbzBw900b95mS0hohy+qn2VK/g==" + "version": "2.9.1", + "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.1.tgz", + "integrity": "sha512-jHgecW0pxkonBJdrKsqxgRX9AcG+u/5k0Q7WPDfi8AogLAdwxEkyYYNWwZ5GvVFoFx2uiY1eNcSK00fh+1+FyQ==" }, "client-only": { "version": "0.0.1", @@ -14187,9 +14535,9 @@ } }, "daisyui": { - "version": "3.5.1", - "resolved": "https://registry.npmjs.org/daisyui/-/daisyui-3.5.1.tgz", - "integrity": "sha512-7GG+9QXnr2qQMCqnyFU8TxpaOYJigXiEtmzoivmiiZZHvxqIwYdaMAkgivqTVxEgy3Hot3m1suzZjmt1zUrvmA==", + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/daisyui/-/daisyui-3.8.0.tgz", + "integrity": "sha512-VsWD//XlHkOBFSiRNTOZTxTJ/W8xI65erowErfbDWrPTkMYqf0ee/FTaqn4rquZoCcumENdFegAL8eELMnztxQ==", "dev": true, "requires": { "colord": "^2.9", @@ -14299,12 +14647,24 @@ "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==", "dev": true }, - "define-properties": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.0.tgz", - "integrity": "sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA==", + "define-data-property": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.0.tgz", + "integrity": "sha512-UzGwzcjyv3OtAvolTj1GoyNYzfFR+iqbGjcnBEENZVCpM4/Ng1yhGNvS3lR/xDS74Tb2wGG9WzNSNIOS9UVb2g==", "dev": true, "requires": { + "get-intrinsic": "^1.2.1", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.0" + } + }, + "define-properties": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", + "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", + "dev": true, + "requires": { + "define-data-property": "^1.0.1", "has-property-descriptors": "^1.0.0", "object-keys": "^1.1.1" } @@ -14378,10 +14738,19 @@ "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.0.5.tgz", "integrity": "sha512-F9e6wPGtY+8KNMRAVfxeCOHU0/NPWMSENNq4pQctuXRqqdEPW7q3CrLbR5Nse044WwacyjHGOMlvNsBe1y6z9A==" }, + "dot-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz", + "integrity": "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==", + "requires": { + "no-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, "electron-to-chromium": { - "version": "1.4.495", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.495.tgz", - "integrity": "sha512-mwknuemBZnoOCths4GtpU/SDuVMp3uQHKa2UNJT9/aVD6WVRjGpXOxRGX7lm6ILIenTdGXPSTCTDaWos5tEU8Q==" + "version": "1.4.529", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.529.tgz", + "integrity": "sha512-6uyPyXTo8lkv8SWAmjKFbG42U073TXlzD4R8rW3EzuznhFS2olCIAfjjQtV2dV2ar/vRF55KUd3zQYnCB0dd3A==" }, "emoji-regex": { "version": "8.0.0", @@ -14657,9 +15026,9 @@ } }, "follow-redirects": { - "version": "1.15.2", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz", - "integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==" + "version": "1.15.3", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz", + "integrity": "sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==" }, "for-each": { "version": "0.3.3", @@ -14694,9 +15063,9 @@ } }, "fraction.js": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.2.0.tgz", - "integrity": "sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.6.tgz", + "integrity": "sha512-n2aZ9tNfYDwaHhvFTkhFErqOMIb8uyzSQ+vGJBjZyanAKZVbGUQ1sngfk9FdkBw7G26O7AgNjLcecLffD1c7eg==", "dev": true }, "fs-extra": { @@ -15376,9 +15745,9 @@ } }, "jiti": { - "version": "1.19.1", - "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.19.1.tgz", - "integrity": "sha512-oVhqoRDaBXf7sjkll95LHVS6Myyyb1zaunVwk4Z0+WPSW4gjS0pl01zYKHScTuyEhQsFxV5L4DR5r+YqSyqyyg==" + "version": "1.20.0", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.20.0.tgz", + "integrity": "sha512-3TV69ZbrvV6U5DfQimop50jE9Dl6J8O1ja1dvBbMba/sZ3YBEQqJ2VZRoQPVnhlzjNtU1vaXRZVrVjU4qtm8yA==" }, "js-tokens": { "version": "4.0.0", @@ -15567,6 +15936,14 @@ "js-tokens": "^3.0.0 || ^4.0.0" } }, + "lower-case": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", + "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", + "requires": { + "tslib": "^2.0.3" + } + }, "lowercase-keys": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", @@ -16181,6 +16558,11 @@ "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-2.1.6.tgz", "integrity": "sha512-+hEnITedc8LAtIP9u3HJDFIdcLV2vXP33sqLLIzkv1Db1zO/1OxbvYf0Y1OC/S/Qo5dxHXepofhmxL02PsKe+A==" }, + "moment": { + "version": "2.29.4", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.4.tgz", + "integrity": "sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w==" + }, "mri": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", @@ -16206,6 +16588,15 @@ "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.6.tgz", "integrity": "sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==" }, + "no-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", + "integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==", + "requires": { + "lower-case": "^2.0.2", + "tslib": "^2.0.3" + } + }, "node-domexception": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", @@ -16460,10 +16851,26 @@ "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz", "integrity": "sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==" }, + "playwright": { + "version": "1.38.1", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.38.1.tgz", + "integrity": "sha512-oRMSJmZrOu1FP5iu3UrCx8JEFRIMxLDM0c/3o4bpzU5Tz97BypefWf7TuTNPWeCe279TPal5RtPPZ+9lW/Qkow==", + "dev": true, + "requires": { + "fsevents": "2.3.2", + "playwright-core": "1.38.1" + } + }, + "playwright-core": { + "version": "1.38.1", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.38.1.tgz", + "integrity": "sha512-tQqNFUKa3OfMf4b2jQ7aGLB8o9bS3bOY0yMEtldtC2+spf8QXG9zvXLTXUeRsoNuxEYMgLYR+NXfAa1rjKRcrg==", + "dev": true + }, "postcss": { - "version": "8.4.28", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.28.tgz", - "integrity": "sha512-Z7V5j0cq8oEKyejIKfpD8b4eBy9cwW2JWPk0+fB1HOAMsfHbnAXLLS+PfVWlzMSLQaWttKDt607I0XHmpE67Vw==", + "version": "8.4.30", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.30.tgz", + "integrity": "sha512-7ZEao1g4kd68l97aWG/etQKPKq07us0ieSZ2TnFDk11i0ZfDW2AwKHYU8qv4MZKqN2fdBfg+7q0ES06UA73C1g==", "requires": { "nanoid": "^3.3.6", "picocolors": "^1.0.0", @@ -16498,9 +16905,9 @@ }, "dependencies": { "yaml": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.1.tgz", - "integrity": "sha512-2eHWfjaoXgTBC2jNM1LRef62VQa0umtvRiDSk6HSzW7RvS5YtkabJrwYLLEKWBc8a5U2PTSCs+dJjUTJdlHsWQ==" + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.2.tgz", + "integrity": "sha512-N/lyzTPaJasoDmfV7YTrYCI0G/3ivm/9wdG0aHuheKowWQwGTsK0Eoiw6utmzAnI6pkJa0DUVygvp3spqqEKXg==" } } }, @@ -16615,9 +17022,9 @@ } }, "property-information": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.2.0.tgz", - "integrity": "sha512-kma4U7AFCTwpqq5twzC1YVIDXSqg6qQK6JN0smOw8fgRy1OkMi0CYSzFmsy6dnqSenamAtj0CyXMUJ1Mf6oROg==" + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.3.0.tgz", + "integrity": "sha512-gVNZ74nqhRMiIUYWGQdosYetaKc83x8oT41a0LlV3AAFCAZwCpg4vmGkq8t34+cUhp3cnM4XDiU/7xlgK7HGrg==" }, "proxy-from-env": { "version": "1.1.0", @@ -16714,9 +17121,9 @@ } }, "react-icons": { - "version": "4.10.1", - "resolved": "https://registry.npmjs.org/react-icons/-/react-icons-4.10.1.tgz", - "integrity": "sha512-/ngzDP/77tlCfqthiiGNZeYFACw85fUjZtLbedmJ5DTlNDIwETxhwBzdOJ21zj4iJdvc0J3y7yOsX3PpxAJzrw==", + "version": "4.11.0", + "resolved": "https://registry.npmjs.org/react-icons/-/react-icons-4.11.0.tgz", + "integrity": "sha512-V+4khzYcE5EBk/BvcuYRq6V/osf11ODUM2J8hg2FDSswRrGvqiYUYPRy4OdrWaQOBj4NcpJfmHZLNaD+VH0TyA==", "requires": {} }, "react-is": { @@ -16776,20 +17183,20 @@ } }, "react-router": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.15.0.tgz", - "integrity": "sha512-NIytlzvzLwJkCQj2HLefmeakxxWHWAP+02EGqWEZy+DgfHHKQMUoBBjUQLOtFInBMhWtb3hiUy6MfFgwLjXhqg==", + "version": "6.16.0", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.16.0.tgz", + "integrity": "sha512-VT4Mmc4jj5YyjpOi5jOf0I+TYzGpvzERy4ckNSvSh2RArv8LLoCxlsZ2D+tc7zgjxcY34oTz2hZaeX5RVprKqA==", "requires": { - "@remix-run/router": "1.8.0" + "@remix-run/router": "1.9.0" } }, "react-router-dom": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.15.0.tgz", - "integrity": "sha512-aR42t0fs7brintwBGAv2+mGlCtgtFQeOzK0BM1/OiqEzRejOZtpMZepvgkscpMUnKb8YO84G7s3LsHnnDNonbQ==", + "version": "6.16.0", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.16.0.tgz", + "integrity": "sha512-aTfBLv3mk/gaKLxgRDUPbPw+s4Y/O+ma3rEN1u8EgEpLpPe6gNjIsWt9rxushMHHMb7mSwxRGdGlGdvmFsyPIg==", "requires": { - "@remix-run/router": "1.8.0", - "react-router": "6.15.0" + "@remix-run/router": "1.9.0", + "react-router": "6.16.0" } }, "react-style-singleton": { @@ -16831,9 +17238,9 @@ } }, "react-tooltip": { - "version": "5.21.1", - "resolved": "https://registry.npmjs.org/react-tooltip/-/react-tooltip-5.21.1.tgz", - "integrity": "sha512-wJqF/yzK1wuJuy5/zAkVErFA609fVv1ZukhGjw44PcMvg9wL0jomnpQyz3qH1H7TWjz/wqO/OMc3ipQNjZ8zYg==", + "version": "5.21.4", + "resolved": "https://registry.npmjs.org/react-tooltip/-/react-tooltip-5.21.4.tgz", + "integrity": "sha512-LZsllEbiu63zNwuCalq3gIFcBu2Xf0I0fMg7uuF7/5ROo5//uHe8Sum7v9L1Rtp6IozcoU9YAjkNUZdrxutsNg==", "requires": { "@floating-ui/dom": "^1.0.0", "classnames": "^2.3.0" @@ -16850,17 +17257,23 @@ "prop-types": "^15.6.2" } }, + "react18-json-view": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/react18-json-view/-/react18-json-view-0.2.5.tgz", + "integrity": "sha512-BiCWyRUCVbnaK4kfNay8crOXZnWsZ6XsnY3fwOf5C+ZaY9w9FSTawo2p+h2UG/KcDP8meZuGlkP95klfFG9GfQ==", + "requires": {} + }, "reactflow": { - "version": "11.8.2", - "resolved": "https://registry.npmjs.org/reactflow/-/reactflow-11.8.2.tgz", - "integrity": "sha512-qqjg4x1yFtyzAu2ldyadRhoTzQjFRnuS+iPUrxaaPaBqGfuwkjm3KnFUrbbgincpOaRVMlIuTuCu5sIRQPcYBQ==", + "version": "11.9.1", + "resolved": "https://registry.npmjs.org/reactflow/-/reactflow-11.9.1.tgz", + "integrity": "sha512-lKWWWFne/o8/SShIeXXP4J/15384L0KDohH0It83gNKFgF6Yo7/EJE9p6k3CnYnOfyYETFRYb1KNOY04cSmtVA==", "requires": { - "@reactflow/background": "11.2.7", - "@reactflow/controls": "11.1.18", - "@reactflow/core": "11.8.2", - "@reactflow/minimap": "11.6.2", - "@reactflow/node-resizer": "2.1.4", - "@reactflow/node-toolbar": "1.2.6" + "@reactflow/background": "11.3.1", + "@reactflow/controls": "11.2.1", + "@reactflow/core": "11.9.1", + "@reactflow/minimap": "11.7.1", + "@reactflow/node-resizer": "2.2.1", + "@reactflow/node-toolbar": "1.3.1" } }, "read-cache": { @@ -16931,14 +17344,14 @@ "integrity": "sha512-srw17NI0TUWHuGa5CFGGmhfNIeja30WMBfbslPNhf6JrqQlLN5gcrvig1oqPxiVaXb0oW0XRKtH6Nngs5lKCIA==" }, "regexp.prototype.flags": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.0.tgz", - "integrity": "sha512-0SutC3pNudRKgquxGoRGIz946MZVHqbNfPjBdxeOhBrdgDKlRoXmYLQN9xRbrR09ZXWeGAdPuif7egofn6v5LA==", + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz", + "integrity": "sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg==", "dev": true, "requires": { "call-bind": "^1.0.2", "define-properties": "^1.2.0", - "functions-have-names": "^1.2.3" + "set-function-name": "^2.0.0" } }, "rehype-mathjax": { @@ -17010,9 +17423,9 @@ "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==" }, "resolve": { - "version": "1.22.4", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.4.tgz", - "integrity": "sha512-PXNdCiPqDqeUou+w1C2eTQbNfxKSuMxqTCuvlmmMsk1NWHL5fRrhY6Pl0qEYYc6+QqGClco1Qj8XnjPego4wfg==", + "version": "1.22.6", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.6.tgz", + "integrity": "sha512-njhxM7mV12JfufShqGy3Rz8j11RPdLy4xi15UurGJeoHLfJpVXKdh3ueuOqbYUcDZnffr6X739JBo5LzyahEsw==", "requires": { "is-core-module": "^2.13.0", "path-parse": "^1.0.7", @@ -17054,9 +17467,9 @@ "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==" }, "rollup": { - "version": "3.28.0", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.28.0.tgz", - "integrity": "sha512-d7zhvo1OUY2SXSM6pfNjgD5+d0Nz87CUp4mt8l/GgVP3oBsPwzNvSzyu1me6BSG9JIgWNTVcafIXBIyM8yQ3yw==", + "version": "3.29.3", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.29.3.tgz", + "integrity": "sha512-T7du6Hum8jOkSWetjRgbwpM6Sy0nECYrYRSmZjayFcOddtKJWU4d17AC3HNUk7HRuqy4p+G7aEZclSHytqUmEg==", "requires": { "fsevents": "~2.3.2" } @@ -17144,6 +17557,17 @@ "semver": "^7.3.5" } }, + "set-function-name": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", + "integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==", + "dev": true, + "requires": { + "define-data-property": "^1.0.1", + "functions-have-names": "^1.2.3", + "has-property-descriptors": "^1.0.0" + } + }, "shadcn-ui": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/shadcn-ui/-/shadcn-ui-0.2.3.tgz", @@ -17184,13 +17608,13 @@ "integrity": "sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==" }, "cosmiconfig": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.2.0.tgz", - "integrity": "sha512-3rTMnFJA1tCOPwRxtgF4wd7Ab2qvDbL8jX+3smjIbS4HlZBagTlpERbdN7iAbWlrfxE3M8c27kTwTawQ7st+OQ==", + "version": "8.3.6", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", + "integrity": "sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==", "requires": { - "import-fresh": "^3.2.1", + "import-fresh": "^3.3.0", "js-yaml": "^4.1.0", - "parse-json": "^5.0.0", + "parse-json": "^5.2.0", "path-type": "^4.0.0" } }, @@ -17362,6 +17786,15 @@ "is-fullwidth-code-point": "^3.0.0" } }, + "snake-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/snake-case/-/snake-case-3.0.4.tgz", + "integrity": "sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg==", + "requires": { + "dot-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, "sort-keys": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-1.1.2.tgz", @@ -17653,9 +18086,9 @@ } }, "tailwindcss-animate": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/tailwindcss-animate/-/tailwindcss-animate-1.0.6.tgz", - "integrity": "sha512-4WigSGMvbl3gCCact62ZvOngA+PRqhAn7si3TQ3/ZuPuQZcIEtVap+ENSXbzWhpojKB8CpvnIsrwBu8/RnHtuw==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/tailwindcss-animate/-/tailwindcss-animate-1.0.7.tgz", + "integrity": "sha512-bl6mpH3T7I3UFxuvDEXLxy/VuFxBk5bbzplh7tXI68mwMokNYd1t9qPBHlnyTwfa4JGC4zP516I1hYYtQ/vspA==", "requires": {} }, "thenify": { @@ -17780,15 +18213,15 @@ } }, "tslib": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.1.tgz", - "integrity": "sha512-t0hLfiEKfMUoqhG+U1oid7Pva4bbDPHYfJNiB7BiIjRkj1pyC++4N3huJfqY6aRH6VTB0rvtzQwjM4K6qpfOig==" + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", + "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" }, "typescript": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.1.6.tgz", - "integrity": "sha512-zaWCozRZ6DLEWAWFrVDz1H6FVXzUSfTy5FUMWsQlU8Ym5JP9eO4xkTIROFCQvhQf61z6O/G6ugw3SgAnvvm+HA==", - "dev": true + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz", + "integrity": "sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==", + "devOptional": true }, "unified": { "version": "10.1.2", @@ -17883,9 +18316,9 @@ "integrity": "sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==" }, "update-browserslist-db": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz", - "integrity": "sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA==", + "version": "1.0.13", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz", + "integrity": "sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg==", "requires": { "escalade": "^3.1.1", "picocolors": "^1.0.0" @@ -17937,9 +18370,9 @@ "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, "uuid": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.0.tgz", - "integrity": "sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg==" + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==" }, "uvu": { "version": "0.5.6", @@ -18154,13 +18587,13 @@ } }, "vite-plugin-svgr": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/vite-plugin-svgr/-/vite-plugin-svgr-3.2.0.tgz", - "integrity": "sha512-Uvq6niTvhqJU6ga78qLKBFJSDvxWhOnyfQSoKpDPMAGxJPo5S3+9hyjExE5YDj6Lpa4uaLkGc1cBgxXov+LjSw==", + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/vite-plugin-svgr/-/vite-plugin-svgr-3.3.0.tgz", + "integrity": "sha512-vWZMCcGNdPqgziYFKQ3Y95XP0d0YGp28+MM3Dp9cTa/px5CKcHHrIoPl2Jw81rgVm6/ZUNONzjXbZQZ7Kw66og==", "requires": { - "@rollup/pluginutils": "^5.0.2", - "@svgr/core": "^7.0.0", - "@svgr/plugin-jsx": "^7.0.0" + "@rollup/pluginutils": "^5.0.4", + "@svgr/core": "^8.1.0", + "@svgr/plugin-jsx": "^8.1.0" } }, "w3c-xmlserializer": { @@ -18279,9 +18712,9 @@ "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, "ws": { - "version": "8.13.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.13.0.tgz", - "integrity": "sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==", + "version": "8.14.2", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.14.2.tgz", + "integrity": "sha512-wEBG1ftX4jcglPxgFCMJmZ2PLtSbJ2Peg6TmpJFTbe9GZYOQCDPdMYu/Tm0/bGZkw8paZnJY45J4K2PZrLYq8g==", "requires": {} }, "xml-name-validator": { @@ -18316,9 +18749,9 @@ "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==" }, "zod": { - "version": "3.22.1", - "resolved": "https://registry.npmjs.org/zod/-/zod-3.22.1.tgz", - "integrity": "sha512-+qUhAMl414+Elh+fRNtpU+byrwjDFOS1N7NioLY+tSlcADTx4TkCUua/hxJvxwDXcV4397/nZ420jy4n4+3WUg==" + "version": "3.22.2", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.22.2.tgz", + "integrity": "sha512-wvWkphh5WQsJbVk1tbx1l1Ly4yg+XecD+Mq280uBGt9wa5BKSWf4Mhp6GmrkPixhMxmabYY7RbzlwVP32pbGCg==" }, "zustand": { "version": "4.4.1", diff --git a/src/frontend/package.json b/src/frontend/package.json index 65b5acb6c..1c64b6333 100644 --- a/src/frontend/package.json +++ b/src/frontend/package.json @@ -3,11 +3,11 @@ "version": "0.1.2", "private": true, "dependencies": { - "@emotion/react": "^11.10.5", - "@emotion/styled": "^11.10.5", - "@headlessui/react": "^1.7.10", - "@heroicons/react": "^2.0.15", - "@mui/material": "^5.11.9", + "@emotion/react": "^11.11.1", + "@emotion/styled": "^11.11.0", + "@headlessui/react": "^1.7.17", + "@heroicons/react": "^2.0.18", + "@mui/material": "^5.14.7", "@radix-ui/react-accordion": "^1.1.2", "@radix-ui/react-checkbox": "^1.0.4", "@radix-ui/react-dialog": "^1.0.4", @@ -24,44 +24,46 @@ "@radix-ui/react-switch": "^1.0.3", "@radix-ui/react-tabs": "^1.0.4", "@radix-ui/react-tooltip": "^1.0.6", - "@tabler/icons-react": "^2.18.0", - "@tailwindcss/forms": "^0.5.3", + "@tabler/icons-react": "^2.32.0", + "@tailwindcss/forms": "^0.5.6", "@tailwindcss/line-clamp": "^0.4.4", "@types/axios": "^0.14.0", "accordion": "^3.0.2", - "ace-builds": "^1.16.0", + "ace-builds": "^1.24.1", "add": "^2.0.6", "ansi-to-html": "^0.7.2", - "axios": "^1.3.2", + "axios": "^1.5.0", "base64-js": "^1.5.1", - "class-variance-authority": "^0.6.0", + "class-variance-authority": "^0.6.1", "clsx": "^1.2.1", - "dompurify": "^3.0.4", - "esbuild": "^0.17.18", + "dompurify": "^3.0.5", + "esbuild": "^0.17.19", "lodash": "^4.17.21", "lucide-react": "^0.233.0", + "moment": "^2.29.4", "react": "^18.2.0", "react-ace": "^10.1.0", "react-cookie": "^4.1.1", "react-dom": "^18.2.0", - "react-error-boundary": "^4.0.2", - "react-icons": "^4.8.0", + "react-error-boundary": "^4.0.11", + "react-icons": "^4.10.1", "react-laag": "^2.0.5", "react-markdown": "^8.0.7", - "react-router-dom": "^6.8.1", + "react-router-dom": "^6.15.0", "react-syntax-highlighter": "^15.5.0", - "react-tabs": "^6.0.0", - "react-tooltip": "^5.13.1", - "reactflow": "^11.5.5", - "rehype-mathjax": "^4.0.2", + "react-tabs": "^6.0.2", + "react-tooltip": "^5.21.1", + "react18-json-view": "^0.2.3", + "reactflow": "^11.8.3", + "rehype-mathjax": "^4.0.3", "remark-gfm": "^3.0.1", "remark-math": "^5.1.1", - "shadcn-ui": "^0.2.2", + "shadcn-ui": "^0.2.3", "short-unique-id": "^4.4.4", "switch": "^0.0.0", "table": "^6.8.1", - "tailwind-merge": "^1.13.0", - "tailwindcss-animate": "^1.0.5", + "tailwind-merge": "^1.14.0", + "tailwindcss-animate": "^1.0.7", "uuid": "^9.0.0", "vite-plugin-svgr": "^3.2.0", "web-vitals": "^2.1.4" @@ -71,7 +73,7 @@ "start": "vite", "build": "vite build", "serve": "vite preview", - "format": "npx prettier --write \"**/*.{js,jsx,ts,tsx,json,md}\"", + "format": "npx prettier --write \"./**/*.{js,jsx,ts,tsx,json,md}\"", "type-check": "tsc --noEmit --pretty --project tsconfig.json && vite" }, "eslintConfig": { @@ -94,27 +96,28 @@ }, "proxy": "http://127.0.0.1:7860", "devDependencies": { + "@playwright/test": "^1.38.0", "@swc/cli": "^0.1.62", - "@swc/core": "^1.3.62", + "@swc/core": "^1.3.80", "@tailwindcss/typography": "^0.5.9", - "@testing-library/jest-dom": "^5.16.5", + "@testing-library/jest-dom": "^5.17.0", "@testing-library/react": "^13.4.0", "@testing-library/user-event": "^13.5.0", "@types/jest": "^27.5.2", - "@types/lodash": "^4.14.194", - "@types/node": "^16.18.12", - "@types/react": "^18.0.28", - "@types/react-dom": "^18.0.11", - "@types/uuid": "^9.0.1", - "@vitejs/plugin-react-swc": "^3.0.0", - "autoprefixer": "^10.4.14", - "daisyui": "^3.1.1", - "postcss": "^8.4.23", + "@types/lodash": "^4.14.197", + "@types/node": "^16.18.46", + "@types/react": "^18.2.21", + "@types/react-dom": "^18.2.7", + "@types/uuid": "^9.0.2", + "@vitejs/plugin-react-swc": "^3.3.2", + "autoprefixer": "^10.4.15", + "daisyui": "^3.6.3", + "postcss": "^8.4.29", "prettier": "^2.8.8", - "prettier-plugin-organize-imports": "^3.2.2", + "prettier-plugin-organize-imports": "^3.2.3", "prettier-plugin-tailwindcss": "^0.3.0", - "tailwindcss": "^3.3.2", - "typescript": "^5.0.2", - "vite": "^4.3.9" + "tailwindcss": "^3.3.3", + "typescript": "^5.2.2", + "vite": "^4.4.9" } } diff --git a/src/frontend/playwright.config.ts b/src/frontend/playwright.config.ts new file mode 100644 index 000000000..6af6cb8d7 --- /dev/null +++ b/src/frontend/playwright.config.ts @@ -0,0 +1,77 @@ +import { defineConfig, devices } from "@playwright/test"; + +/** + * Read environment variables from file. + * https://github.com/motdotla/dotenv + */ +// require('dotenv').config(); + +/** + * See https://playwright.dev/docs/test-configuration. + */ +export default defineConfig({ + testDir: "./tests", + /* Run tests in files in parallel */ + fullyParallel: true, + /* Fail the build on CI if you accidentally left test.only in the source code. */ + forbidOnly: !!process.env.CI, + /* Retry on CI only */ + retries: process.env.CI ? 2 : 0, + /* Opt out of parallel tests on CI. */ + workers: process.env.CI ? 1 : undefined, + /* Reporter to use. See https://playwright.dev/docs/test-reporters */ + reporter: "html", + /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */ + use: { + /* Base URL to use in actions like `await page.goto('/')`. */ + // baseURL: 'http://127.0.0.1:3000', + + /* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */ + trace: "on-first-retry", + }, + + /* Configure projects for major browsers */ + projects: [ + { + name: "chromium", + use: { ...devices["Desktop Chrome"] }, + }, + + { + name: "firefox", + use: { ...devices["Desktop Firefox"] }, + }, + + { + name: "webkit", + use: { ...devices["Desktop Safari"] }, + }, + + /* Test against mobile viewports. */ + // { + // name: 'Mobile Chrome', + // use: { ...devices['Pixel 5'] }, + // }, + // { + // name: 'Mobile Safari', + // use: { ...devices['iPhone 12'] }, + // }, + + /* Test against branded browsers. */ + // { + // name: 'Microsoft Edge', + // use: { ...devices['Desktop Edge'], channel: 'msedge' }, + // }, + // { + // name: 'Google Chrome', + // use: { ...devices['Desktop Chrome'], channel: 'chrome' }, + // }, + ], + + /* Run your local dev server before starting the tests */ + // webServer: { + // command: 'npm run start', + // url: 'http://127.0.0.1:3000', + // reuseExistingServer: !process.env.CI, + // }, +}); diff --git a/src/frontend/public/favicon.ico b/src/frontend/public/favicon.ico index 0612ba9b3..3bc197aa8 100644 Binary files a/src/frontend/public/favicon.ico and b/src/frontend/public/favicon.ico differ diff --git a/src/frontend/set_proxy.sh b/src/frontend/set_proxy.sh old mode 100755 new mode 100644 diff --git a/src/frontend/src/App.css b/src/frontend/src/App.css index be7173d7f..a7a8bad51 100644 --- a/src/frontend/src/App.css +++ b/src/frontend/src/App.css @@ -3,45 +3,85 @@ @tailwind utilities; .App { - text-align: center; + text-align: center; +} + +.react-flow__node { + width: auto; + height: auto; + border-radius: auto; + min-width: inherit; } .App-logo { - height: 40vmin; - pointer-events: none; + height: 40vmin; + pointer-events: none; } @media (prefers-reduced-motion: no-preference) { - .App-logo { - animation: App-logo-spin infinite 20s linear; - } + .App-logo { + animation: App-logo-spin infinite 20s linear; + } } .App-header { - background-color: #282c34; - min-height: 100vh; - display: flex; - flex-direction: column; - align-items: center; - justify-content: center; - font-size: calc(10px + 2vmin); - color: white; + background-color: #282c34; + min-height: 100vh; + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + font-size: calc(10px + 2vmin); + color: white; } .App-link { - color: #61dafb; + color: #61dafb; } @keyframes App-logo-spin { - from { - transform: rotate(0deg); - } - to { - transform: rotate(360deg); - } + from { + transform: rotate(0deg); + } + to { + transform: rotate(360deg); + } } @font-face { - font-family: text-security-disc; - src: url("assets/text-security-disc.woff") format("woff"); + font-family: text-security-disc; + src: url("assets/text-security-disc.woff") format("woff"); +} + +.json-view { + height: 370px !important; + background-color: #2c2c2c !important; + border-radius: 10px !important; + padding: 10px !important; +} + +.jv-indent { + overflow-y: auto !important; + max-height: 310px !important; + border-radius: 10px; +} + +.jv-indent::-webkit-scrollbar { + width: 8px !important; + height: 8px !important; + border-radius: 10px; +} + +.jv-indent::-webkit-scrollbar-track { + background-color: #f1f1f1 !important; + border-radius: 10px; +} + +.jv-indent::-webkit-scrollbar-thumb { + background-color: #ccc !important; + border-radius: 999px !important; +} + +.jv-indent::-webkit-scrollbar-thumb:hover { + background-color: #bbb !important; } diff --git a/src/frontend/src/App.tsx b/src/frontend/src/App.tsx index 467e15ed8..20f2a18f9 100644 --- a/src/frontend/src/App.tsx +++ b/src/frontend/src/App.tsx @@ -1,6 +1,6 @@ import _ from "lodash"; import { useContext, useEffect, useState } from "react"; -import { useLocation } from "react-router-dom"; +import { useLocation, useNavigate } from "react-router-dom"; import "reactflow/dist/style.css"; import "./App.css"; @@ -9,10 +9,16 @@ import ErrorAlert from "./alerts/error"; import NoticeAlert from "./alerts/notice"; import SuccessAlert from "./alerts/success"; import CrashErrorComponent from "./components/CrashErrorComponent"; +import FetchErrorComponent from "./components/fetchErrorComponent"; import LoadingComponent from "./components/loadingComponent"; +import { + FETCH_ERROR_DESCRIPION, + FETCH_ERROR_MESSAGE, +} from "./constants/constants"; import { alertContext } from "./contexts/alertContext"; import { locationContext } from "./contexts/locationContext"; import { TabsContext } from "./contexts/tabsContext"; +import { typesContext } from "./contexts/typesContext"; import Router from "./routes"; export default function App() { @@ -36,8 +42,12 @@ export default function App() { successData, successOpen, setSuccessOpen, + setErrorData, loading, + setLoading, } = useContext(alertContext); + const navigate = useNavigate(); + const { fetchError } = useContext(typesContext); // Initialize state variable for the list of alerts const [alertsList, setAlertsList] = useState< @@ -137,7 +147,14 @@ export default function App() { > {loading ? (
- + {fetchError ? ( + + ) : ( + + )}
) : ( <> diff --git a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx index 94cc939ff..6968d6472 100644 --- a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx @@ -9,6 +9,7 @@ import React, { import { Handle, Position, useUpdateNodeInternals } from "reactflow"; import ShadTooltip from "../../../../components/ShadTooltipComponent"; import CodeAreaComponent from "../../../../components/codeAreaComponent"; +import DictComponent from "../../../../components/dictComponent"; import Dropdown from "../../../../components/dropdownComponent"; import FloatComponent from "../../../../components/floatComponent"; import IconComponent from "../../../../components/genericIconComponent"; @@ -16,15 +17,22 @@ import InputComponent from "../../../../components/inputComponent"; import InputFileComponent from "../../../../components/inputFileComponent"; import InputListComponent from "../../../../components/inputListComponent"; import IntComponent from "../../../../components/intComponent"; +import KeypairListComponent from "../../../../components/keypairListComponent"; import PromptAreaComponent from "../../../../components/promptComponent"; import TextAreaComponent from "../../../../components/textAreaComponent"; import ToggleShadComponent from "../../../../components/toggleShadComponent"; +import { Button } from "../../../../components/ui/button"; import { TOOLTIP_EMPTY } from "../../../../constants/constants"; import { TabsContext } from "../../../../contexts/tabsContext"; import { typesContext } from "../../../../contexts/typesContext"; import { ParameterComponentType } from "../../../../types/components"; import { TabsState } from "../../../../types/tabs"; -import { isValidConnection } from "../../../../utils/reactflowUtils"; +import { + convertObjToArray, + convertValuesToNumbers, + hasDuplicateKeys, + isValidConnection, +} from "../../../../utils/reactflowUtils"; import { nodeColors, nodeIconsLucide, @@ -45,6 +53,7 @@ export default function ParameterComponent({ required = false, optionalHandle = null, info = "", + showNode, }: ParameterComponentType): JSX.Element { const ref = useRef(null); const refHtml = useRef(null); @@ -67,20 +76,27 @@ export default function ParameterComponent({ updateNodeInternals(data.id); }, [data.id, position, updateNodeInternals]); - const { reactFlowInstance } = useContext(typesContext); + const groupedEdge = useRef(null); + + const { reactFlowInstance, setFilterEdge } = useContext(typesContext); let disabled = reactFlowInstance?.getEdges().some((edge) => edge.targetHandle === id) ?? false; const { data: myData } = useContext(typesContext); - const handleOnNewValue = (newValue: string | string[] | boolean): void => { + const handleOnNewValue = ( + newValue: string | string[] | boolean | Object[] + ): void => { let newData = cloneDeep(data); newData.node!.template[name].value = newValue; setData(newData); // Set state to pending //@ts-ignore setTabsState((prev: TabsState) => { + if (!prev[tabId]) { + return prev; + } return { ...prev, [tabId]: { @@ -93,6 +109,8 @@ export default function ParameterComponent({ renderTooltips(); }; + const [errorDuplicateKey, setErrorDuplicateKey] = useState(false); + useEffect(() => { if (name === "openai_api_base") console.log(info); // @ts-ignore @@ -108,55 +126,64 @@ export default function ParameterComponent({ }, [info]); function renderTooltips() { - let groupedObj = groupByFamily(myData, tooltipTitle!, left, flow!); + let groupedObj: any = groupByFamily(myData, tooltipTitle!, left, flow!); + groupedEdge.current = groupedObj; if (groupedObj && groupedObj.length > 0) { - //@ts-ignore //@ts-ignore refHtml.current = groupedObj.map((item, index) => { const Icon: any = nodeIconsLucide[item.family] ?? nodeIconsLucide["unknown"]; return ( - 0 ? "mt-2 flex items-center" : "flex items-center" + <> + {index === 0 && ( + + {left + ? "Avaliable input components:" + : "Avaliable output components:"} + )} - > -
0 ? "mt-2 flex items-center" : "mt-3 flex items-center" + )} > - -
- - {nodeNames[item.family] ?? "Other"} - - {" "} - {item.type === "" ? "" : " - "} - {item.type.split(", ").length > 2 - ? item.type.split(", ").map((el, index) => ( - - - {index === item.type.split(", ").length - 1 - ? el - : (el += `, `)} - - - )) - : item.type} + > + + + + {nodeNames[item.family] ?? "Other"}{" "} + + {" "} + {item.type === "" ? "" : " - "} + {item.type.split(", ").length > 2 + ? item.type.split(", ").map((el, index) => ( + + + {index === item.type.split(", ").length - 1 + ? el + : (el += `, `)} + + + )) + : item.type} + - + ); }); } else { @@ -169,7 +196,43 @@ export default function ParameterComponent({ renderTooltips(); }, [tooltipTitle, flow]); - return ( + return !showNode ? ( + left && + (type === "str" || + type === "bool" || + type === "float" || + type === "code" || + type === "prompt" || + type === "file" || + type === "int") && + !optionalHandle ? ( + <> + ) : ( + + + isValidConnection(connection, reactFlowInstance!) + } + className={classNames( + left ? "my-12 -ml-0.5 " : " my-12 -mr-0.5 ", + "h-3 w-3 rounded-full border-2 bg-background" + )} + style={{ + borderColor: color, + top: position, + }} + > + + ) + ) : (
) : ( - - - isValidConnection(connection, reactFlowInstance!) - } - className={classNames( - left ? "-ml-0.5 " : "-mr-0.5 ", - "h-3 w-3 rounded-full border-2 bg-background" - )} - style={{ - borderColor: color, - top: position, - }} - > - + )} {left === true && @@ -334,11 +406,54 @@ export default function ParameterComponent({ field_name={name} setNodeClass={(nodeClass) => { data.node = nodeClass; + const clone = cloneDeep(data); + clone.node = nodeClass; + setData(clone); }} nodeClass={data.node} disabled={disabled} value={data.node?.template[name].value ?? ""} - onChange={handleOnNewValue} + onChange={(e) => { + handleOnNewValue(e); + }} + /> +
+ ) : left === true && type === "NestedDict" ? ( +
+ { + data.node!.template[name].value = newValue; + handleOnNewValue(newValue); + }} + /> +
+ ) : left === true && type === "dict" ? ( +
+ { + const valueToNumbers = convertValuesToNumbers(newValue); + data.node!.template[name].value = valueToNumbers; + setErrorDuplicateKey(hasDuplicateKeys(valueToNumbers)); + handleOnNewValue(valueToNumbers); + }} />
) : ( diff --git a/src/frontend/src/CustomNodes/GenericNode/index.tsx b/src/frontend/src/CustomNodes/GenericNode/index.tsx index 30dda4034..524b54ead 100644 --- a/src/frontend/src/CustomNodes/GenericNode/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/index.tsx @@ -25,10 +25,48 @@ export default function GenericNode({ const [data, setData] = useState(olddata); const { updateFlow, flows, tabId } = useContext(TabsContext); const updateNodeInternals = useUpdateNodeInternals(); - const { types, deleteNode, reactFlowInstance } = useContext(typesContext); + const { types, deleteNode, reactFlowInstance, setFilterEdge, getFilterEdge } = + useContext(typesContext); const name = nodeIconsLucide[data.type] ? data.type : types[data.type]; const [validationStatus, setValidationStatus] = useState(null); + const [showNode, setShowNode] = useState(true); + const [handles, setHandles] = useState([]); + let numberOfInputs: boolean[] = []; + + function countHandles(): void { + numberOfInputs = Object.keys(data.node!.template) + .filter((templateField) => templateField.charAt(0) !== "_") + .map((templateCamp) => { + const { template } = data.node!; + if (template[templateCamp].input_types) return true; + if (!template[templateCamp].show) return false; + switch (template[templateCamp].type) { + case "str": + return false; + case "bool": + return false; + case "float": + return false; + case "code": + return false; + case "prompt": + return false; + case "file": + return false; + case "int": + return false; + default: + return true; + } + }); + setHandles(numberOfInputs); + } + + useEffect(() => { + countHandles(); + }, []); + // State for outline color const { sseData, isBuilding } = useSSE(); useEffect(() => { @@ -50,8 +88,15 @@ export default function GenericNode({ }); updateFlow(flow); } + countHandles(); }, [data]); + useEffect(() => { + setTimeout(() => { + updateNodeInternals(data.id); + }, 300); + }, [showNode]); + // New useEffect to watch for changes in sseData and update validation status useEffect(() => { const relevantData = sseData[data.id]; @@ -62,7 +107,6 @@ export default function GenericNode({ setValidationStatus(null); } }, [sseData, data.id]); - return ( <> @@ -70,183 +114,325 @@ export default function GenericNode({ data={data} setData={setData} deleteNode={deleteNode} + setShowNode={setShowNode} + numberOfHandles={handles} + showNode={showNode} >
- {data.node?.beta && ( + {data.node?.beta && showNode && (
BETA
)} -
-
- -
- -
- {data.node?.display_name} -
-
-
-
-
-
- Building... - ) : !validationStatus ? ( - - Build{" "} - {" "} - flow to validate status. - - ) : ( -
- {typeof validationStatus.params === "string" - ? validationStatus.params - .split("\n") - .map((line: string, index: number) => ( -
{line}
- )) - : ""} -
- ) - } - > -
-
-
-
-
-
-
-
-
- -
-
{data.node?.description}
- - <> - {Object.keys(data.node!.template) - .filter((templateField) => templateField.charAt(0) !== "_") - .map((templateField: string, idx) => ( -
- {data.node!.template[templateField].show && - !data.node!.template[templateField].advanced ? ( - - ) : ( - <> - )} -
- ))} +
+
- {" "} -
- 0 - ? data.node.output_types.join("|") - : data.type + className={ + "generic-node-title-arrangement rounded-full" + + (!showNode && "justify-center") } - tooltipTitle={data.node?.base_classes.join("\n")} - id={[data.type, data.id, ...data.node!.base_classes].join("|")} - type={data.node?.base_classes.join("|")} - left={false} - /> - + > + + {showNode && ( +
+ +
+ {data.node?.display_name} +
+
+
+ )} +
+
+ {!showNode && ( + <> + {Object.keys(data.node!.template) + .filter((templateField) => templateField.charAt(0) !== "_") + .map( + (templateField: string, idx) => + data.node!.template[templateField].show && + !data.node!.template[templateField].advanced && ( + + ) + )} + 0 + ? data.node.output_types.join("|") + : data.type + } + tooltipTitle={data.node?.base_classes.join("\n")} + id={[data.type, data.id, ...data.node!.base_classes].join( + "|" + )} + type={data.node?.base_classes.join("|")} + left={false} + showNode={showNode} + /> + + )} +
+ + {showNode && ( +
+
+ Building... + ) : !validationStatus ? ( + + Build{" "} + {" "} + flow to validate status. + + ) : ( +
+ {typeof validationStatus.params === "string" + ? validationStatus.params + .split("\n") + .map((line: string, index: number) => ( +
{line}
+ )) + : ""} +
+ ) + } + > +
+
+
+
+
+
+
+
+ )} +
+ + {showNode && ( +
+ {data.node?.description !== "" && showNode && ( +
+ {data.node?.description} +
+ )} + + <> + {Object.keys(data.node!.template) + .filter((templateField) => templateField.charAt(0) !== "_") + .map((templateField: string, idx) => ( +
+ {data.node!.template[templateField].show && + !data.node!.template[templateField].advanced ? ( + + ) : ( + <> + )} +
+ ))} +
+ {" "} +
+ 0 + ? data.node.output_types.join("|") + : data.type + } + tooltipTitle={data.node?.base_classes.join("\n")} + id={[data.type, data.id, ...data.node!.base_classes].join("|")} + type={data.node?.base_classes.join("|")} + left={false} + showNode={showNode} + /> + +
+ )}
); diff --git a/src/frontend/src/components/AccordionComponent/index.tsx b/src/frontend/src/components/AccordionComponent/index.tsx index e19bd3d29..e00932c76 100644 --- a/src/frontend/src/components/AccordionComponent/index.tsx +++ b/src/frontend/src/components/AccordionComponent/index.tsx @@ -32,13 +32,6 @@ export default function AccordionComponent({ value === "" ? setValue(keyValue!) : setValue(""); } - const handleKeyDown = (event) => { - if (event.key === "Backspace") { - event.preventDefault(); - event.stopPropagation(); - } - }; - return ( <> (false); + + return ( +
+ + + + + { + event.stopPropagation(); + event.preventDefault(); + setShowOptions(!showOptions); + }} + > + {options.map(({ name, onBtnClick }, index) => ( + + {name} + + ))} + + +
+ ); +} diff --git a/src/frontend/src/components/EditFlowSettingsComponent/index.tsx b/src/frontend/src/components/EditFlowSettingsComponent/index.tsx index 77fd64819..8f9907129 100644 --- a/src/frontend/src/components/EditFlowSettingsComponent/index.tsx +++ b/src/frontend/src/components/EditFlowSettingsComponent/index.tsx @@ -36,32 +36,21 @@ export const EditFlowSettings: React.FC = ({ } if (invalidName !== undefined) { if (!nameLists.current.includes(value)) { - setInvalidName(false); + setInvalidName!(false); } else { - setInvalidName(true); + setInvalidName!(true); + } + + if (!nameLists.current.includes(value)) { + setInvalidName!(false); + } else { + setInvalidName!(true); } } - if (!nameLists.current.includes(value)) { - setInvalidName!(false); - } else { - setInvalidName!(true); - } setName(value); - setCurrentName(value); }; - const [currentName, setCurrentName] = useState(name); - - const [currentDescription, setCurrentDescription] = useState(description); - - useEffect(() => { - setCurrentName(name); - setCurrentDescription(description); - }, [name, description]); - const handleDescriptionChange = (event: ChangeEvent) => { - flows.find((f) => f.id === tabId).description = event.target.value; - setCurrentDescription(flows.find((f) => f.id === tabId).description); setDescription(event.target.value); }; @@ -82,7 +71,7 @@ export const EditFlowSettings: React.FC = ({ onChange={handleNameChange} type="text" name="name" - value={currentName ?? ""} + value={name ?? ""} placeholder="File name" id="name" maxLength={maxLength} @@ -97,7 +86,7 @@ export const EditFlowSettings: React.FC = ({ name="description" id="description" onChange={handleDescriptionChange} - value={currentDescription} + value={description} placeholder="Flow description" className="mt-2 max-h-[100px] font-normal" rows={3} diff --git a/src/frontend/src/components/PaginatorComponent/index.tsx b/src/frontend/src/components/PaginatorComponent/index.tsx index 8fcd8683f..fa759a8bb 100644 --- a/src/frontend/src/components/PaginatorComponent/index.tsx +++ b/src/frontend/src/components/PaginatorComponent/index.tsx @@ -1,4 +1,4 @@ -import { useState } from "react"; +import { useEffect, useState } from "react"; import { Select, SelectContent, @@ -13,17 +13,19 @@ import { Button } from "../ui/button"; export default function PaginatorComponent({ pageSize = 10, pageIndex = 1, - rowsCount = [10, 20, 30], + rowsCount = [10, 20, 50, 100], totalRowsCount = 0, paginate, }: PaginatorComponentType) { const [size, setPageSize] = useState(pageSize); - const [index, setPageIndex] = useState(pageIndex); - const [maxIndex, setMaxPageIndex] = useState( Math.ceil(totalRowsCount / pageSize) ); + useEffect(() => { + setMaxPageIndex(Math.ceil(totalRowsCount / size)); + }, [totalRowsCount]); + return ( <>
@@ -35,8 +37,9 @@ export default function PaginatorComponent({ onValueChange={(pageSize: string) => { setPageSize(Number(pageSize)); setMaxPageIndex(Math.ceil(totalRowsCount / Number(pageSize))); - paginate(Number(pageSize), index); + paginate(Number(pageSize), 1); }} + value={pageSize.toString()} > @@ -51,14 +54,14 @@ export default function PaginatorComponent({
- Page {index} of {maxIndex} + Page {pageIndex} of {maxIndex}
- {tabs.map((tab, index) => ( + {tabs.map((tab, idx) => ( - {index < 4 ? ( - <> + {idx < 4 ? ( +
{tab.description && (
{tab.code} - - ) : index === 4 ? ( +
+ ) : idx === 4 ? ( <>
- {data?.map((node: any, index) => ( -
+ {data?.map((node: any, i) => ( +
{tweaks?.tweaksList!.current.includes( node["data"]["id"] ) && ( @@ -234,12 +255,16 @@ export default function CodeTabsComponent({ node.data.node.template[templateField] .type === "file" || node.data.node.template[templateField] - .type === "int") + .type === "int" || + node.data.node.template[templateField] + .type === "dict" || + node.data.node.template[templateField] + .type === "NestedDict") ) - .map((templateField, index) => { + .map((templateField, indx) => { return ( @@ -278,7 +303,7 @@ export default function CodeTabsComponent({ let newInputList = cloneDeep(old); newInputList![ - index + i ].data.node.template[ templateField ].value = target; @@ -296,55 +321,46 @@ export default function CodeTabsComponent({ ) : node.data.node.template[ templateField ].multiline ? ( - -
- + { + setData((old) => { + let newInputList = + cloneDeep(old); + newInputList![ + i + ].data.node.template[ templateField - ].value || + ].value = target; + return newInputList; + }); + tweaks.buildTweakObject!( + node["data"]["id"], + target, node.data.node .template[ templateField - ].value === "" - ? "" - : node.data.node - .template[ - templateField - ].value - } - onChange={(target) => { - setData((old) => { - let newInputList = - cloneDeep(old); - newInputList![ - index - ].data.node.template[ - templateField - ].value = target; - return newInputList; - }); - tweaks.buildTweakObject!( - node["data"]["id"], - target, - node.data.node - .template[ - templateField - ] - ); - }} - /> -
-
+ ] + ); + }} + /> +
) : ( + -
- {}} - fileTypes={ - node.data.node.template[ - templateField - ].fileTypes - } - suffixes={ - node.data.node.template[ - templateField - ].suffixes - } - onFileChange={( - value: any - ) => { - node.data.node.template[ - templateField - ].file_path = value; - }} - > -
- + ].value ?? "" + } + onChange={(target: any) => {}} + fileTypes={ + node.data.node.template[ + templateField + ].fileTypes + } + suffixes={ + node.data.node.template[ + templateField + ].suffixes + } + onFileChange={( + value: any + ) => { + node.data.node.template[ + templateField + ].file_path = value; + }} + >
+
) : node.data.node.template[ templateField ].type === "float" ? ( @@ -496,7 +495,7 @@ export default function CodeTabsComponent({ let newInputList = cloneDeep(old); newInputList![ - index + i ].data.node.template[ templateField ].value = target; @@ -532,7 +531,7 @@ export default function CodeTabsComponent({ let newInputList = cloneDeep(old); newInputList![ - index + i ].data.node.template[ templateField ].value = target; @@ -584,7 +583,7 @@ export default function CodeTabsComponent({ let newInputList = cloneDeep(old); newInputList![ - index + i ].data.node.template[ templateField ].value = target; @@ -603,114 +602,181 @@ export default function CodeTabsComponent({ ) : node.data.node.template[ templateField ].type === "prompt" ? ( - + { + setData((old) => { + let newInputList = + cloneDeep(old); + newInputList![ + i + ].data.node.template[ templateField - ].value - )} - > -
- { - setData((old) => { - let newInputList = - cloneDeep(old); - newInputList![ - index - ].data.node.template[ - templateField - ].value = target; - return newInputList; - }); - tweaks.buildTweakObject!( - node["data"]["id"], - target, - node.data.node.template[ - templateField - ] - ); - }} - /> -
-
+ ] + ); + }} + /> +
) : node.data.node.template[ templateField ].type === "code" ? ( - + -
- { + setData((old) => { + let newInputList = + cloneDeep(old); + newInputList![ + i + ].data.node.template[ templateField - ].value || + ].value = target; + return newInputList; + }); + tweaks.buildTweakObject!( + node["data"]["id"], + target, node.data.node.template[ templateField - ].value === "" - ? "" - : node.data.node + ] + ); + }} + /> +
+ ) : node.data.node.template[ + templateField + ].type === "dict" ? ( +
+ { - setData((old) => { - let newInputList = - cloneDeep(old); - newInputList![ - index - ].data.node.template[ - templateField - ].value = target; - return newInputList; - }); - tweaks.buildTweakObject!( - node["data"]["id"], - target, - node.data.node.template[ - templateField - ] + ) + } + duplicateKey={ + errorDuplicateKey + } + onChange={(target) => { + const valueToNumbers = + convertValuesToNumbers( + target ); - }} - /> -
-
+ node.data.node!.template[ + templateField + ].value = valueToNumbers; + setErrorDuplicateKey( + hasDuplicateKeys( + valueToNumbers + ) + ); + setData((old) => { + let newInputList = + cloneDeep(old); + newInputList![ + i + ].data.node.template[ + templateField + ].value = target; + return newInputList; + }); + tweaks.buildTweakObject!( + node["data"]["id"], + target, + node.data.node.template[ + templateField + ] + ); + }} + /> +
+ ) : node.data.node.template[ + templateField + ].type === "NestedDict" ? ( +
+ { + setData((old) => { + let newInputList = + cloneDeep(old); + newInputList![ + i + ].data.node.template[ + templateField + ].value = target; + return newInputList; + }); + tweaks.buildTweakObject!( + node["data"]["id"], + target, + node.data.node.template[ + templateField + ] + ); + }} + /> +
) : node.data.node.template[ templateField ].type === "Any" ? ( diff --git a/src/frontend/src/components/dictComponent/index.tsx b/src/frontend/src/components/dictComponent/index.tsx new file mode 100644 index 000000000..29082f466 --- /dev/null +++ b/src/frontend/src/components/dictComponent/index.tsx @@ -0,0 +1,55 @@ +import { useEffect, useRef } from "react"; +import { DictComponentType } from "../../types/components"; + +import DictAreaModal from "../../modals/dictAreaModal"; +import { classNames } from "../../utils/utils"; +import { Input } from "../ui/input"; + +export default function DictComponent({ + value, + onChange, + disabled, + editNode = false, +}: DictComponentType): JSX.Element { + useEffect(() => { + if (disabled) { + onChange({}); + } + }, [disabled]); + + useEffect(() => { + if (value) onChange(value); + }, [value]); + + const ref = useRef(value); + + return ( +
1 && editNode ? "my-1" : "", + "flex flex-col gap-3" + )} + > + { +
+ { + onChange(obj); + }} + > + + +
+ } +
+ ); +} diff --git a/src/frontend/src/components/fetchErrorComponent/index.tsx b/src/frontend/src/components/fetchErrorComponent/index.tsx new file mode 100644 index 000000000..6004d9dfc --- /dev/null +++ b/src/frontend/src/components/fetchErrorComponent/index.tsx @@ -0,0 +1,16 @@ +import { fetchErrorComponentType } from "../../types/components"; +import IconComponent from "../genericIconComponent"; + +export default function FetchErrorComponent({ + message, + description, +}: fetchErrorComponentType) { + return ( +
+ +

+ {message} + {description} +
+ ); +} diff --git a/src/frontend/src/components/floatComponent/index.tsx b/src/frontend/src/components/floatComponent/index.tsx index 88e958fc9..454d3b601 100644 --- a/src/frontend/src/components/floatComponent/index.tsx +++ b/src/frontend/src/components/floatComponent/index.tsx @@ -10,8 +10,8 @@ export default function FloatComponent({ editNode = false, }: FloatComponentType): JSX.Element { const step = 0.1; - const min = 0; - const max = 1; + const min = -2; + const max = 2; // Clear component state useEffect(() => { @@ -27,10 +27,10 @@ export default function FloatComponent({ step={step} min={min} onInput={(event: React.ChangeEvent) => { - if (event.target.value < min.toString()) { + if (Number(event.target.value) < min) { event.target.value = min.toString(); } - if (event.target.value > max.toString()) { + if (Number(event.target.value) > max) { event.target.value = max.toString(); } }} @@ -39,7 +39,7 @@ export default function FloatComponent({ disabled={disabled} className={editNode ? "input-edit-node" : ""} placeholder={ - editNode ? "Number 0 to 1" : "Type a number from zero to one" + editNode ? "Number -2 to 2" : "Type a number from minus two to two" } onChange={(event) => { onChange(event.target.value); diff --git a/src/frontend/src/components/genericIconComponent/index.tsx b/src/frontend/src/components/genericIconComponent/index.tsx index 186c2e253..5cb6bdeca 100644 --- a/src/frontend/src/components/genericIconComponent/index.tsx +++ b/src/frontend/src/components/genericIconComponent/index.tsx @@ -1,17 +1,19 @@ +import { forwardRef } from "react"; import { IconComponentProps } from "../../types/components"; import { nodeIconsLucide } from "../../utils/styleUtils"; -export default function IconComponent({ - name, - className, - iconColor, -}: IconComponentProps): JSX.Element { - const TargetIcon = nodeIconsLucide[name] ?? nodeIconsLucide["unknown"]; - return ( - - ); -} +const ForwardedIconComponent = forwardRef( + ({ name, className, iconColor }: IconComponentProps, ref) => { + const TargetIcon = nodeIconsLucide[name] ?? nodeIconsLucide["unknown"]; + return ( + + ); + } +); + +export default ForwardedIconComponent; diff --git a/src/frontend/src/components/gradientChooserComponent/index.tsx b/src/frontend/src/components/gradientChooserComponent/index.tsx new file mode 100644 index 000000000..bff1fbd51 --- /dev/null +++ b/src/frontend/src/components/gradientChooserComponent/index.tsx @@ -0,0 +1,21 @@ +import { gradients } from "../../utils/styleUtils"; + +export default function GradientChooserComponent({ value, onChange }) { + return ( +
+ {gradients.map((gradient, idx) => ( +
{ + onChange(gradient); + }} + className={ + "duration-400 h-12 w-12 cursor-pointer rounded-full transition-all " + + gradient + + (value === gradient ? " shadow-lg ring-2 ring-primary" : "") + } + key={idx} + >
+ ))} +
+ ); +} diff --git a/src/frontend/src/components/headerComponent/index.tsx b/src/frontend/src/components/headerComponent/index.tsx index e9a5a36a9..5827588e1 100644 --- a/src/frontend/src/components/headerComponent/index.tsx +++ b/src/frontend/src/components/headerComponent/index.tsx @@ -1,14 +1,23 @@ -import { useContext, useEffect, useState } from "react"; +import { useContext } from "react"; import { FaDiscord, FaGithub, FaTwitter } from "react-icons/fa"; -import { Link, useLocation } from "react-router-dom"; +import { Link, useLocation, useNavigate } from "react-router-dom"; import AlertDropdown from "../../alerts/alertDropDown"; import { USER_PROJECTS_HEADER } from "../../constants/constants"; import { alertContext } from "../../contexts/alertContext"; +import { AuthContext } from "../../contexts/authContext"; import { darkContext } from "../../contexts/darkContext"; import { TabsContext } from "../../contexts/tabsContext"; -import { getRepoStars } from "../../controllers/API"; +import { gradients } from "../../utils/styleUtils"; import IconComponent from "../genericIconComponent"; import { Button } from "../ui/button"; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuLabel, + DropdownMenuSeparator, + DropdownMenuTrigger, +} from "../ui/dropdown-menu"; import { Separator } from "../ui/separator"; import MenuBar from "./components/menuBar"; @@ -17,26 +26,17 @@ export default function Header(): JSX.Element { const { dark, setDark } = useContext(darkContext); const { notificationCenter } = useContext(alertContext); const location = useLocation(); + const { logout, autoLogin, isAdmin, userData } = useContext(AuthContext); + const { stars, gradientIndex } = useContext(darkContext); + const navigate = useNavigate(); - const [stars, setStars] = useState(null); - - // Get and set numbers of stars on header - useEffect(() => { - async function fetchStars() { - const starsCount = await getRepoStars("logspace-ai", "langflow"); - setStars(starsCount); - } - fetchStars(); - }, []); return (
โ›“๏ธ - + {flows.findIndex((f) => tabId === f.id) !== -1 && tabId !== "" && ( )} @@ -119,6 +119,60 @@ export default function Header(): JSX.Element { />
+ {!autoLogin && ( + + )} + {!autoLogin && ( + <> + + + +
diff --git a/src/frontend/src/components/inputComponent/index.tsx b/src/frontend/src/components/inputComponent/index.tsx index 287ceac63..490be0fb0 100644 --- a/src/frontend/src/components/inputComponent/index.tsx +++ b/src/frontend/src/components/inputComponent/index.tsx @@ -30,6 +30,7 @@ export default function InputComponent({ {isForm ? ( ) : ( { + onClick={(event) => { + event.preventDefault(); setPwdVisible(!pwdVisible); }} > diff --git a/src/frontend/src/components/inputListComponent/index.tsx b/src/frontend/src/components/inputListComponent/index.tsx index a1ef04970..06d2f0f6a 100644 --- a/src/frontend/src/components/inputListComponent/index.tsx +++ b/src/frontend/src/components/inputListComponent/index.tsx @@ -18,6 +18,11 @@ export default function InputListComponent({ } }, [disabled]); + // @TODO Recursive Character Text Splitter - the value might be in string format, whereas the InputListComponent specifically requires an array format. To ensure smooth operation and prevent potential errors, it's crucial that we handle the conversion from a string to an array with the string as its element. + if (typeof value === "string") { + value = [value]; + } + return (
) => { - if (event.target.value < min.toString()) { + if (Number(event.target.value) < min) { event.target.value = min.toString(); } }} diff --git a/src/frontend/src/components/keypairListComponent/index.tsx b/src/frontend/src/components/keypairListComponent/index.tsx new file mode 100644 index 000000000..f1e93e7fc --- /dev/null +++ b/src/frontend/src/components/keypairListComponent/index.tsx @@ -0,0 +1,117 @@ +import { useEffect, useRef } from "react"; +import { KeyPairListComponentType } from "../../types/components"; + +import _ from "lodash"; +import { classNames } from "../../utils/utils"; +import IconComponent from "../genericIconComponent"; +import { Input } from "../ui/input"; + +export default function KeypairListComponent({ + value, + onChange, + disabled, + editNode = false, + duplicateKey, +}: KeyPairListComponentType): JSX.Element { + useEffect(() => { + if (disabled) { + onChange([""]); + } + }, [disabled]); + + const ref = useRef(value.length === 0 ? [{ "": "" }] : value); + + useEffect(() => { + if (JSON.stringify(value) !== JSON.stringify(ref.current)) { + ref.current = value; + onChange(value); + } + }, [value]); + + const handleChangeKey = (event, idx) => { + const newInputList = _.cloneDeep(ref.current); + const oldKey = Object.keys(newInputList[idx])[0]; + const updatedObj = { [event.target.value]: newInputList[idx][oldKey] }; + newInputList[idx] = updatedObj; + onChange(newInputList); + }; + + const handleChangeValue = (newValue, idx) => { + const newInputList = _.cloneDeep(ref.current); + const key = Object.keys(newInputList[idx])[0]; + newInputList[idx][key] = newValue; + onChange(newInputList); + }; + + return ( +
1 && editNode ? "my-1" : "", + "flex h-full flex-col gap-3" + )} + > + {ref.current?.map((obj, index) => { + return Object.keys(obj).map((key, idx) => { + return ( +
+ handleChangeKey(event, index)} + onKeyDown={(e) => { + if (e.ctrlKey && e.key === "Backspace") { + e.preventDefault(); + e.stopPropagation(); + } + }} + /> + + + handleChangeValue(event.target.value, index) + } + /> + + {index === ref.current.length - 1 ? ( + + ) : ( + + )} +
+ ); + }); + })} +
+ ); +} diff --git a/src/frontend/src/components/loadingComponent/index.tsx b/src/frontend/src/components/loadingComponent/index.tsx index 0cd3c9ef0..3ef0f8ef5 100644 --- a/src/frontend/src/components/loadingComponent/index.tsx +++ b/src/frontend/src/components/loadingComponent/index.tsx @@ -4,10 +4,10 @@ export default function LoadingComponent({ remSize, }: LoadingComponentProps): JSX.Element { return ( -
+
+
+ + +
+
+ + +
+
+ ); +}; diff --git a/src/frontend/src/components/ui/dialog.tsx b/src/frontend/src/components/ui/dialog.tsx index 57dda0fef..551fa281c 100644 --- a/src/frontend/src/components/ui/dialog.tsx +++ b/src/frontend/src/components/ui/dialog.tsx @@ -1,7 +1,7 @@ import * as DialogPrimitive from "@radix-ui/react-dialog"; +import { Cross2Icon } from "@radix-ui/react-icons"; import * as React from "react"; import { cn } from "../../utils/utils"; -import IconComponent from "../genericIconComponent"; const Dialog = DialogPrimitive.Root; @@ -13,7 +13,7 @@ const DialogPortal = ({ ...props }: DialogPrimitive.DialogPortalProps) => ( -
+
{children}
@@ -27,7 +27,7 @@ const DialogOverlay = React.forwardRef< {children} - + Close diff --git a/src/frontend/src/components/ui/select-custom.tsx b/src/frontend/src/components/ui/select-custom.tsx new file mode 100644 index 000000000..5ed2fbd45 --- /dev/null +++ b/src/frontend/src/components/ui/select-custom.tsx @@ -0,0 +1,108 @@ +"use client"; + +import * as SelectPrimitive from "@radix-ui/react-select"; +import * as React from "react"; +import { cn } from "../../utils/utils"; + +const Select = SelectPrimitive.Root; + +const SelectGroup = SelectPrimitive.Group; + +const SelectValue = SelectPrimitive.Value; + +const SelectTrigger = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, ...props }, ref) => ( + + {children} + + +)); +SelectTrigger.displayName = SelectPrimitive.Trigger.displayName; + +const SelectContent = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, position = "popper", ...props }, ref) => ( + + + + {children} + + + +)); +SelectContent.displayName = SelectPrimitive.Content.displayName; + +const SelectLabel = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)); +SelectLabel.displayName = SelectPrimitive.Label.displayName; + +const SelectItem = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, ...props }, ref) => ( + + {children} + +)); +SelectItem.displayName = SelectPrimitive.Item.displayName; + +const SelectSeparator = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)); +SelectSeparator.displayName = SelectPrimitive.Separator.displayName; + +export { + Select, + SelectContent, + SelectGroup, + SelectItem, + SelectLabel, + SelectSeparator, + SelectTrigger, + SelectValue, +}; diff --git a/src/frontend/src/components/ui/skeleton.tsx b/src/frontend/src/components/ui/skeleton.tsx new file mode 100644 index 000000000..f7a1573f2 --- /dev/null +++ b/src/frontend/src/components/ui/skeleton.tsx @@ -0,0 +1,15 @@ +import { cn } from "../../utils/utils"; + +function Skeleton({ + className, + ...props +}: React.HTMLAttributes) { + return ( +
+ ); +} + +export { Skeleton }; diff --git a/src/frontend/src/constants/constants.ts b/src/frontend/src/constants/constants.ts index 7f6c7614c..d6ba802ee 100644 --- a/src/frontend/src/constants/constants.ts +++ b/src/frontend/src/constants/constants.ts @@ -115,6 +115,9 @@ export const EDIT_DIALOG_SUBTITLE = export const CODE_PROMPT_DIALOG_SUBTITLE = "Edit your Python code. This code snippet accepts module import and a single function definition. Make sure that your function returns a string."; +export const CODE_DICT_DIALOG_SUBTITLE = + "Edit your dictionary. This dialog allows you to create your own customized dictionary. You can add as many key-value pairs as you want. While in edit mode, you can enter ({}) or ([]), and this will result in adding a new object or array."; + /** * The base text for subtitle of Prompt Dialog * @constant @@ -499,6 +502,21 @@ export const NOUNS: string[] = [ */ export const USER_PROJECTS_HEADER = "My Collection"; +/** + * Header text for admin page + * @constant + * + */ +export const ADMIN_HEADER_TITLE = "Admin Page"; + +/** + * Header description for admin page + * @constant + * + */ +export const ADMIN_HEADER_DESCRIPTION = + "Navigate through this section to efficiently oversee all application users. From here, you can seamlessly manage user accounts."; + /** * URLs excluded from error retries. * @constant @@ -508,6 +526,7 @@ export const URL_EXCLUDED_FROM_ERROR_RETRIES = [ "/api/v1/validate/code", "/api/v1/custom_component", "/api/v1/validate/prompt", + "http://localhost:7860/login", ]; export const skipNodeUpdate = ["CustomComponent"]; @@ -518,10 +537,28 @@ export const CONTROL_INPUT_STATE = { username: "", }; +export const CONTROL_PATCH_USER_STATE = { + password: "", + cnfPassword: "", + gradient: "", +}; + export const CONTROL_LOGIN_STATE = { username: "", password: "", }; + +export const CONTROL_NEW_USER = { + username: "", + password: "", + is_active: false, + is_superuser: false, +}; + +export const CONTROL_NEW_API_KEY = { + apikeyname: "", +}; + export const tabsCode = []; export function tabsArray(codes: string[], method: number) { @@ -602,3 +639,24 @@ export function tabsArray(codes: string[], method: number) { }, ]; } +export const FETCH_ERROR_MESSAGE = "Couldn't establish a connection."; +export const FETCH_ERROR_DESCRIPION = + "Check if everything is working properly and try again."; + +export const BASE_URL_API = "/api/v1/"; + +export const SIGN_UP_SUCCESS = "Account created! Await admin activation. "; + +export const API_PAGE_PARAGRAPH_1 = + "Your secret API keys are listed below. Please note that we do not display your secret API keys again after you generate them."; + +export const API_PAGE_PARAGRAPH_2 = + "Do not share your API key with others, or expose it in the browser or other client-side code."; + +export const API_PAGE_USER_KEYS = + "This user does not have any keys assigned at the moment."; + +export const LAST_USED_SPAN_1 = "The last time this key was used."; + +export const LAST_USED_SPAN_2 = + "Accurate to within the hour from the most recent usage."; diff --git a/src/frontend/src/contexts/alertContext.tsx b/src/frontend/src/contexts/alertContext.tsx index 1741b88b6..0b183a297 100644 --- a/src/frontend/src/contexts/alertContext.tsx +++ b/src/frontend/src/contexts/alertContext.tsx @@ -48,6 +48,7 @@ export function AlertProvider({ children }: { children: ReactNode }) { const [successOpen, setSuccessOpen] = useState(false); const [notificationCenter, setNotificationCenter] = useState(false); const [notificationList, setNotificationList] = useState([]); + const [isTweakPage, setIsTweakPage] = useState(false); const pushNotificationList = (notification: AlertItemType) => { setNotificationList((old) => { let newNotificationList = _.cloneDeep(old); diff --git a/src/frontend/src/contexts/authContext.tsx b/src/frontend/src/contexts/authContext.tsx index 691ba90de..2d62fed07 100644 --- a/src/frontend/src/contexts/authContext.tsx +++ b/src/frontend/src/contexts/authContext.tsx @@ -1,74 +1,120 @@ -import { createContext, useEffect, useState } from "react"; -import { AuthContextType, userData } from "../types/contexts/auth"; +import { createContext, useContext, useEffect, useState } from "react"; +import Cookies from "universal-cookie"; +import { autoLogin as autoLoginApi, getLoggedUser } from "../controllers/API"; +import { Users } from "../types/api"; +import { AuthContextType } from "../types/contexts/auth"; +import { alertContext } from "./alertContext"; const initialValue: AuthContextType = { + isAdmin: false, + setIsAdmin: () => false, isAuthenticated: false, accessToken: null, + refreshToken: null, login: () => {}, logout: () => {}, - refreshAccessToken: () => Promise.resolve(), userData: null, setUserData: () => {}, + getAuthentication: () => false, + authenticationErrorCount: 0, + autoLogin: false, + setAutoLogin: () => {}, }; -const AuthContext = createContext(initialValue); +export const AuthContext = createContext(initialValue); export function AuthProvider({ children }): React.ReactElement { - const [accessToken, setAccessToken] = useState(null); - const [userData, setUserData] = useState(null); - + const cookies = new Cookies(); + const [accessToken, setAccessToken] = useState( + cookies.get("access_tkn_lflw") + ); + const [refreshToken, setRefreshToken] = useState( + cookies.get("refresh_tkn_lflw") + ); + const [isAuthenticated, setIsAuthenticated] = useState(false); + const [isAdmin, setIsAdmin] = useState(false); + const [userData, setUserData] = useState(null); + const [autoLogin, setAutoLogin] = useState(false); + const { setLoading } = useContext(alertContext); useEffect(() => { - const storedAccessToken = localStorage.getItem("access_token"); + const storedAccessToken = cookies.get("access_tkn_lflw"); if (storedAccessToken) { setAccessToken(storedAccessToken); } }, []); + useEffect(() => { + const isLoginPage = location.pathname.includes("login"); + + autoLoginApi() + .then((user) => { + if (user && user["access_token"]) { + user["refresh_token"] = "auto"; + login(user["access_token"], user["refresh_token"]); + setUserData(user); + setAutoLogin(true); + setLoading(false); + } + }) + .catch((error) => { + setAutoLogin(false); + if (getAuthentication() && !isLoginPage) { + getLoggedUser() + .then((user) => { + setUserData(user); + setLoading(false); + const isSuperUser = user!.is_superuser; + setIsAdmin(isSuperUser); + }) + .catch((error) => {}); + } else { + setLoading(false); + } + }); + }, []); + + function getAuthentication() { + const storedRefreshToken = cookies.get("refresh_tkn_lflw"); + const storedAccess = cookies.get("access_tkn_lflw"); + const auth = storedAccess && storedRefreshToken ? true : false; + return auth; + } + function login(newAccessToken: string, refreshToken: string) { - localStorage.setItem("access_token", newAccessToken); + cookies.set("access_tkn_lflw", newAccessToken, { path: "/" }); + cookies.set("refresh_tkn_lflw", refreshToken, { path: "/" }); setAccessToken(newAccessToken); - // Store refreshToken if needed + setRefreshToken(refreshToken); + setIsAuthenticated(true); } function logout() { - localStorage.removeItem("access_token"); - // Clear refreshToken if used + cookies.remove("access_tkn_lflw", { path: "/" }); + cookies.remove("refresh_tkn_lflw", { path: "/" }); + setIsAdmin(false); + setUserData(null); setAccessToken(null); - } - - async function refreshAccessToken(refreshToken: string) { - try { - // Call your API to refresh the access token using the refresh token - const response = await fetch("/api/refresh-token", { - method: "POST", - headers: { - "Content-Type": "application/json", - }, - body: JSON.stringify({ refreshToken }), - }); - - if (response.ok) { - const data = await response.json(); - login(data.accessToken, refreshToken); - } else { - logout(); - } - } catch (error) { - logout(); - } + setRefreshToken(null); + setIsAuthenticated(false); } return ( // !! to convert string to boolean {children} diff --git a/src/frontend/src/contexts/darkContext.tsx b/src/frontend/src/contexts/darkContext.tsx index 091b7577f..571a89b01 100644 --- a/src/frontend/src/contexts/darkContext.tsx +++ b/src/frontend/src/contexts/darkContext.tsx @@ -1,9 +1,14 @@ import { createContext, useEffect, useState } from "react"; +import { getRepoStars } from "../controllers/API"; import { darkContextType } from "../types/typesContext"; const initialValue = { dark: {}, setDark: () => {}, + stars: 0, + setStars: (stars) => 0, + gradientIndex: 0, + setGradientIndex: () => 0, }; export const darkContext = createContext(initialValue); @@ -12,6 +17,20 @@ export function DarkProvider({ children }) { const [dark, setDark] = useState( JSON.parse(window.localStorage.getItem("isDark")!) ?? false ); + const [stars, setStars] = useState(0); + const [gradientIndex, setGradientIndex] = useState(0); + + useEffect(() => { + async function fetchStars() { + const starsCount = await getRepoStars("logspace-ai", "langflow"); + setStars(starsCount); + } + fetchStars(); + const min = 0; + const max = 30; + setGradientIndex(Math.floor(Math.random() * (max - min + 1)) + min); + }, []); + useEffect(() => { if (dark) { document.getElementById("body")!.classList.add("dark"); @@ -20,11 +39,16 @@ export function DarkProvider({ children }) { } window.localStorage.setItem("isDark", dark.toString()); }, [dark]); + return ( {children} diff --git a/src/frontend/src/contexts/index.tsx b/src/frontend/src/contexts/index.tsx index 603a3516e..f90cb7812 100644 --- a/src/frontend/src/contexts/index.tsx +++ b/src/frontend/src/contexts/index.tsx @@ -1,8 +1,11 @@ import { ReactNode } from "react"; +import { BrowserRouter } from "react-router-dom"; import { ReactFlowProvider } from "reactflow"; import { TooltipProvider } from "../components/ui/tooltip"; +import { ApiInterceptor } from "../controllers/API/api"; import { SSEProvider } from "./SSEContext"; import { AlertProvider } from "./alertContext"; +import { AuthProvider } from "./authContext"; import { DarkProvider } from "./darkContext"; import { LocationProvider } from "./locationContext"; import { TabsProvider } from "./tabsContext"; @@ -13,23 +16,28 @@ export default function ContextWrapper({ children }: { children: ReactNode }) { //element to wrap all context return ( <> - - - - - - - - - {children} - - - - - - - - + + + + + + + + + + + + {children} + + + + + + + + + + ); } diff --git a/src/frontend/src/contexts/tabsContext.tsx b/src/frontend/src/contexts/tabsContext.tsx index 67231f4e5..88f5005fe 100644 --- a/src/frontend/src/contexts/tabsContext.tsx +++ b/src/frontend/src/contexts/tabsContext.tsx @@ -1,3 +1,4 @@ +import { AxiosError } from "axios"; import _ from "lodash"; import { ReactNode, @@ -21,7 +22,7 @@ import { import { APIClassType, APITemplateType } from "../types/api"; import { tweakType } from "../types/components"; import { FlowType, NodeDataType, NodeType } from "../types/flow"; -import { TabsContextType, TabsState, errorsVarType } from "../types/tabs"; +import { TabsContextType, TabsState } from "../types/tabs"; import { addVersionToDuplicates, updateIds, @@ -29,6 +30,7 @@ import { } from "../utils/reactflowUtils"; import { getRandomDescription, getRandomName } from "../utils/utils"; import { alertContext } from "./alertContext"; +import { AuthContext } from "./authContext"; import { typesContext } from "./typesContext"; const uid = new ShortUniqueId({ length: 5 }); @@ -37,6 +39,7 @@ const TabsContextInitialValue: TabsContextType = { save: () => {}, tabId: "", setTabId: (index: string) => {}, + isLoading: true, flows: [], removeFlow: (id: string) => {}, addFlow: async (flowData?: any) => "", @@ -45,11 +48,11 @@ const TabsContextInitialValue: TabsContextType = { downloadFlow: (flow: FlowType) => {}, downloadFlows: () => {}, uploadFlows: () => {}, - uploadFlow: () => {}, + uploadFlow: async () => "", isBuilt: false, setIsBuilt: (state: boolean) => {}, hardReset: () => {}, - saveFlow: async (flow: FlowType) => {}, + saveFlow: async (flow: FlowType, silent?: boolean) => {}, lastCopiedSelection: null, setLastCopiedSelection: (selection: any) => {}, tabsState: {}, @@ -68,10 +71,14 @@ export const TabsContext = createContext( ); export function TabsProvider({ children }: { children: ReactNode }) { - const { setErrorData, setNoticeData } = useContext(alertContext); + const { setErrorData, setNoticeData, setSuccessData } = + useContext(alertContext); + const { getAuthentication, isAuthenticated } = useContext(AuthContext); const [tabId, setTabId] = useState(""); + const [isLoading, setIsLoading] = useState(true); + const [flows, setFlows] = useState>([]); const [id, setId] = useState(uid()); const { templates, reactFlowInstance } = useContext(typesContext); @@ -82,6 +89,12 @@ export function TabsProvider({ children }: { children: ReactNode }) { const [tabsState, setTabsState] = useState({}); const [getTweak, setTweak] = useState([]); + useEffect(() => { + if (!isAuthenticated) { + hardReset(); + } + }, [isAuthenticated]); + const newNodeId = useRef(uid()); function incrementNodeId() { newNodeId.current = uid(); @@ -112,29 +125,33 @@ export function TabsProvider({ children }: { children: ReactNode }) { } function refreshFlows() { + setIsLoading(true); getTabsDataFromDB().then((DbData) => { if (DbData && Object.keys(templates).length > 0) { try { processDBData(DbData); updateStateWithDbData(DbData); - } catch (e) { - console.error(e); - } + setIsLoading(false); + } catch (e) {} } }); } useEffect(() => { - // get data from db - //get tabs locally saved - // let tabsData = getLocalStorageTabsData(); - refreshFlows(); - }, [templates]); + // If the user is authenticated, fetch the types. This code is important to check if the user is auth because of the execution order of the useEffect hooks. + if (getAuthentication() === true) { + // get data from db + //get tabs locally saved + // let tabsData = getLocalStorageTabsData(); + refreshFlows(); + } + }, [templates, getAuthentication()]); function getTabsDataFromDB() { //get tabs from db return readFlowsFromDatabase(); } + function processDBData(DbData: FlowType[]) { DbData.forEach((flow: FlowType) => { try { @@ -143,9 +160,7 @@ export function TabsProvider({ children }: { children: ReactNode }) { } processFlowEdges(flow); processFlowNodes(flow); - } catch (e) { - console.error(e); - } + } catch (e) {} }); } @@ -223,8 +238,8 @@ export function TabsProvider({ children }: { children: ReactNode }) { function hardReset() { newNodeId.current = uid(); setTabId(""); - setFlows([]); + setIsLoading(true); setId(uid()); } @@ -282,39 +297,40 @@ export function TabsProvider({ children }: { children: ReactNode }) { * If the file type is application/json, the file is read and parsed into a JSON object. * The resulting JSON object is passed to the addFlow function. */ - function uploadFlow(newProject?: boolean, file?: File) { + async function uploadFlow( + newProject?: boolean, + file?: File + ): Promise { + let id; if (file) { - file.text().then((text) => { - // parse the text into a JSON object - let flow: FlowType = JSON.parse(text); + let text = await file.text(); + // parse the text into a JSON object + let flow: FlowType = JSON.parse(text); - addFlow(flow, newProject); - }); + id = await addFlow(flow, newProject); } else { // create a file input const input = document.createElement("input"); input.type = "file"; input.accept = ".json"; // add a change event listener to the file input - input.onchange = (e: Event) => { - // check if the file type is application/json - if ( - (e.target as HTMLInputElement).files![0].type === "application/json" - ) { - // get the file from the file input - const currentfile = (e.target as HTMLInputElement).files![0]; - // read the file as text - currentfile.text().then((text) => { - // parse the text into a JSON object + id = await new Promise((resolve) => { + input.onchange = async (e: Event) => { + if ( + (e.target as HTMLInputElement).files![0].type === "application/json" + ) { + const currentfile = (e.target as HTMLInputElement).files![0]; + let text = await currentfile.text(); let flow: FlowType = JSON.parse(text); - - addFlow(flow, newProject); - }); - } - }; - // trigger the file input click event to open the file dialog - input.click(); + const flowId = await addFlow(flow, newProject); + resolve(flowId); + } + }; + // trigger the file input click event to open the file dialog + input.click(); + }); } + return id; } function uploadFlows() { @@ -478,7 +494,6 @@ export function TabsProvider({ children }: { children: ReactNode }) { return id; } catch (error) { // Handle the error if needed - console.error("Error while adding flow:", error); throw error; // Re-throw the error so the caller can handle it if needed } } else { @@ -573,12 +588,15 @@ export function TabsProvider({ children }: { children: ReactNode }) { }); } - async function saveFlow(newFlow: FlowType) { + async function saveFlow(newFlow: FlowType, silent?: boolean) { try { // updates flow in db const updatedFlow = await updateFlowInDatabase(newFlow); if (updatedFlow) { // updates flow in state + if (!silent) { + setSuccessData({ title: "Changes saved successfully" }); + } setFlows((prevState) => { const newFlows = [...prevState]; const index = newFlows.findIndex((flow) => flow.id === newFlow.id); @@ -601,7 +619,10 @@ export function TabsProvider({ children }: { children: ReactNode }) { }); } } catch (err) { - setErrorData(err as errorsVarType); + setErrorData({ + title: "Error while saving changes", + list: [(err as AxiosError).message], + }); } } @@ -634,6 +655,7 @@ export function TabsProvider({ children }: { children: ReactNode }) { paste, getTweak, setTweak, + isLoading, }} > {children} diff --git a/src/frontend/src/contexts/typesContext.tsx b/src/frontend/src/contexts/typesContext.tsx index 52a5eea72..7caf5eccc 100644 --- a/src/frontend/src/contexts/typesContext.tsx +++ b/src/frontend/src/contexts/typesContext.tsx @@ -6,10 +6,11 @@ import { useState, } from "react"; import { Node, ReactFlowInstance } from "reactflow"; -import { getAll } from "../controllers/API"; +import { getAll, getHealth } from "../controllers/API"; import { APIKindType } from "../types/api"; import { typesContextType } from "../types/typesContext"; import { alertContext } from "./alertContext"; +import { AuthContext } from "./authContext"; //context to share types adn functions from nodes to flow @@ -23,6 +24,10 @@ const initialValue: typesContextType = { setTemplates: () => {}, data: {}, setData: () => {}, + setFetchError: () => {}, + fetchError: false, + setFilterEdge: (filter) => {}, + getFilterEdge: [], }; export const typesContext = createContext(initialValue); @@ -33,67 +38,59 @@ export function TypesProvider({ children }: { children: ReactNode }) { useState(null); const [templates, setTemplates] = useState({}); const [data, setData] = useState({}); + const [fetchError, setFetchError] = useState(false); const { setLoading } = useContext(alertContext); + const { getAuthentication } = useContext(AuthContext); + const [getFilterEdge, setFilterEdge] = useState([]); useEffect(() => { - let delay = 1000; // Start delay of 1 second - let intervalId: NodeJS.Timer; - let retryCount = 0; // Count of retry attempts - const maxRetryCount = 5; // Max retry attempts + // If the user is authenticated, fetch the types. This code is important to check if the user is auth because of the execution order of the useEffect hooks. + if (getAuthentication() === true) { + getTypes(); + } + }, [getAuthentication()]); + async function getTypes(): Promise { // We will keep a flag to handle the case where the component is unmounted before the API call resolves. let isMounted = true; - - async function getTypes(): Promise { - try { - const result = await getAll(); - // Make sure to only update the state if the component is still mounted. - if (isMounted) { - setLoading(false); - setData(result.data); - setTemplates( - Object.keys(result.data).reduce((acc, curr) => { + try { + const result = await getAll(); + // Make sure to only update the state if the component is still mounted. + if (isMounted && result?.status === 200) { + setLoading(false); + setData(result.data); + setTemplates( + Object.keys(result.data).reduce((acc, curr) => { + Object.keys(result.data[curr]).forEach((c: keyof APIKindType) => { + acc[c] = result.data[curr][c]; + }); + return acc; + }, {}) + ); + // Set the types by reducing over the keys of the result data and updating the accumulator. + setTypes( + // Reverse the keys so the tool world does not overlap + Object.keys(result.data) + .reverse() + .reduce((acc, curr) => { Object.keys(result.data[curr]).forEach((c: keyof APIKindType) => { - acc[c] = result.data[curr][c]; + acc[c] = curr; + // Add the base classes to the accumulator as well. + result.data[curr][c].base_classes?.forEach((b) => { + acc[b] = curr; + }); }); return acc; }, {}) - ); - // Set the types by reducing over the keys of the result data and updating the accumulator. - setTypes( - // Reverse the keys so the tool world does not overlap - Object.keys(result.data) - .reverse() - .reduce((acc, curr) => { - Object.keys(result.data[curr]).forEach( - (c: keyof APIKindType) => { - acc[c] = curr; - // Add the base classes to the accumulator as well. - result.data[curr][c].base_classes?.forEach((b) => { - acc[b] = curr; - }); - } - ); - return acc; - }, {}) - ); - } - // Clear the interval if successful. - clearInterval(intervalId!); - } catch (error) { - console.error("An error has occurred while fetching types."); + ); } + } catch (error) { + console.error("An error has occurred while fetching types."); + await getHealth().catch((e) => { + setFetchError(true); + }); } - - // Start the initial interval. - intervalId = setInterval(getTypes, delay); - return () => { - // This will clear the interval when the component unmounts, or when the dependencies of the useEffect hook change. - clearInterval(intervalId!); - // Indicate that the component has been unmounted. - isMounted = false; - }; - }, []); + } function deleteNode(idx: string) { reactFlowInstance!.setNodes( @@ -117,6 +114,10 @@ export function TypesProvider({ children }: { children: ReactNode }) { templates, data, setData, + fetchError, + setFetchError, + setFilterEdge, + getFilterEdge, }} > {children} diff --git a/src/frontend/src/controllers/API/api.tsx b/src/frontend/src/controllers/API/api.tsx index ec0e8f329..75be74fac 100644 --- a/src/frontend/src/controllers/API/api.tsx +++ b/src/frontend/src/controllers/API/api.tsx @@ -1,60 +1,122 @@ import axios, { AxiosError, AxiosInstance } from "axios"; -import { useContext, useEffect, useRef } from "react"; +import { useContext, useEffect } from "react"; +import { Cookies } from "react-cookie"; +import { useNavigate } from "react-router-dom"; +import { renewAccessToken } from "."; import { alertContext } from "../../contexts/alertContext"; +import { AuthContext } from "../../contexts/authContext"; // Create a new Axios instance const api: AxiosInstance = axios.create({ baseURL: "", }); -function ApiInterceptor(): null { - const retryCounts = useRef([]); +function ApiInterceptor() { const { setErrorData } = useContext(alertContext); + let { accessToken, login, logout, authenticationErrorCount } = + useContext(AuthContext); + const navigate = useNavigate(); + const cookies = new Cookies(); useEffect(() => { const interceptor = api.interceptors.response.use( (response) => response, async (error: AxiosError) => { - // if (URL_EXCLUDED_FROM_ERROR_RETRIES.includes(error.config?.url)) { - // return Promise.reject(error); - // } - // let retryCount = 0; - // while (retryCount < 4) { - // await sleep(5000); // Sleep for 5 seconds - // retryCount++; - // try { - // const response = await axios.request(error.config); - // return response; - // } catch (error) { - // if (retryCount === 3) { - // setErrorData({ - // title: "There was an error on web connection, please: ", - // list: [ - // "Refresh the page", - // "Use a new flow tab", - // "Check if the backend is up", - // "Endpoint: " + error.config?.url, - // ], - // }); - // return Promise.reject(error); - // } - // } - // } + if (error.response?.status === 401) { + const refreshToken = cookies.get("refresh_tkn_lflw"); + if (refreshToken && refreshToken !== "auto") { + authenticationErrorCount = authenticationErrorCount + 1; + if (authenticationErrorCount > 3) { + authenticationErrorCount = 0; + logout(); + navigate("/login"); + } + + const res = await renewAccessToken(refreshToken); + if (res?.data?.access_token && res?.data?.refresh_token) { + login(res?.data?.access_token, res?.data?.refresh_token); + } + + try { + if (error?.config?.headers) { + delete error.config.headers["Authorization"]; + error.config.headers["Authorization"] = `Bearer ${cookies.get( + "access_tkn_lflw" + )}`; + const response = await axios.request(error.config); + return response; + } + } catch (error) { + if (axios.isAxiosError(error) && error.response?.status === 401) { + logout(); + navigate("/login"); + } + } + } + + if (!refreshToken && error?.config?.url?.includes("login")) { + return Promise.reject(error); + } else { + logout(); + navigate("/login"); + } + } else { + // if (URL_EXCLUDED_FROM_ERROR_RETRIES.includes(error.config?.url)) { + return Promise.reject(error); + // } + } + } + ); + + const isAuthorizedURL = (url) => { + const authorizedDomains = [ + "https://raw.githubusercontent.com/logspace-ai/langflow_examples/main/examples", + "https://api.github.com/repos/logspace-ai/langflow_examples/contents/examples", + "https://api.github.com/repos/logspace-ai/langflow", + "auto_login", + ]; + + const authorizedEndpoints = ["auto_login"]; + + try { + const parsedURL = new URL(url); + + const isDomainAllowed = authorizedDomains.some( + (domain) => parsedURL.origin === new URL(domain).origin + ); + const isEndpointAllowed = authorizedEndpoints.some((endpoint) => + parsedURL.pathname.includes(endpoint) + ); + + return isDomainAllowed || isEndpointAllowed; + } catch (e) { + // Invalid URL + return false; + } + }; + + // Request interceptor to add access token to every request + const requestInterceptor = api.interceptors.request.use( + (config) => { + if (accessToken && !isAuthorizedURL(config?.url)) { + config.headers["Authorization"] = `Bearer ${accessToken}`; + } + + return config; + }, + (error) => { + return Promise.reject(error); } ); return () => { - // Clean up the interceptor when the component unmounts + // Clean up the interceptors when the component unmounts api.interceptors.response.eject(interceptor); + api.interceptors.request.eject(requestInterceptor); }; - }, [retryCounts]); + }, [accessToken, setErrorData]); return null; } -// Function to sleep for a given duration in milliseconds -function sleep(ms: number) { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - export { ApiInterceptor, api }; diff --git a/src/frontend/src/controllers/API/index.ts b/src/frontend/src/controllers/API/index.ts index be3d4f2e9..80e97a73e 100644 --- a/src/frontend/src/controllers/API/index.ts +++ b/src/frontend/src/controllers/API/index.ts @@ -1,7 +1,16 @@ import { AxiosResponse } from "axios"; import { ReactFlowJsonObject } from "reactflow"; +import { BASE_URL_API } from "../../constants/constants"; import { api } from "../../controllers/API/api"; -import { APIObjectType, sendAllProps } from "../../types/api/index"; +import { + APIObjectType, + LoginType, + Users, + changeUser, + resetPasswordType, + sendAllProps, +} from "../../types/api/index"; +import { UserInputType } from "../../types/components"; import { FlowStyleType, FlowType } from "../../types/flow"; import { APIClassType, @@ -18,7 +27,7 @@ import { * @returns {Promise>} A promise that resolves to an AxiosResponse containing all the objects. */ export async function getAll(): Promise> { - return await api.get(`/api/v1/all`); + return await api.get(`${BASE_URL_API}all`); } const GITHUB_API_URL = "https://api.github.com"; @@ -40,13 +49,13 @@ export async function getRepoStars(owner: string, repo: string) { * @returns {AxiosResponse} The API response. */ export async function sendAll(data: sendAllProps) { - return await api.post(`/api/v1/predict`, data); + return await api.post(`${BASE_URL_API}predict`, data); } export async function postValidateCode( code: string ): Promise> { - return await api.post("/api/v1/validate/code", { code }); + return await api.post(`${BASE_URL_API}validate/code`, { code }); } /** @@ -61,7 +70,7 @@ export async function postValidatePrompt( template: string, frontend_node: APIClassType ): Promise> { - return await api.post("/api/v1/validate/prompt", { + return await api.post(`${BASE_URL_API}validate/prompt`, { name: name, template: template, frontend_node: frontend_node, @@ -105,7 +114,7 @@ export async function saveFlowToDatabase(newFlow: { style?: FlowStyleType; }): Promise { try { - const response = await api.post("/api/v1/flows/", { + const response = await api.post(`${BASE_URL_API}flows/`, { name: newFlow.name, data: newFlow.data, description: newFlow.description, @@ -131,7 +140,7 @@ export async function updateFlowInDatabase( updatedFlow: FlowType ): Promise { try { - const response = await api.patch(`/api/v1/flows/${updatedFlow.id}`, { + const response = await api.patch(`${BASE_URL_API}flows/${updatedFlow.id}`, { name: updatedFlow.name, data: updatedFlow.data, description: updatedFlow.description, @@ -155,9 +164,9 @@ export async function updateFlowInDatabase( */ export async function readFlowsFromDatabase() { try { - const response = await api.get("/api/v1/flows/"); - if (response.status !== 200) { - throw new Error(`HTTP error! status: ${response.status}`); + const response = await api.get(`${BASE_URL_API}flows/`); + if (response?.status !== 200) { + throw new Error(`HTTP error! status: ${response?.status}`); } return response.data; } catch (error) { @@ -168,9 +177,9 @@ export async function readFlowsFromDatabase() { export async function downloadFlowsFromDatabase() { try { - const response = await api.get("/api/v1/flows/download/"); - if (response.status !== 200) { - throw new Error(`HTTP error! status: ${response.status}`); + const response = await api.get(`${BASE_URL_API}flows/download/`); + if (response?.status !== 200) { + throw new Error(`HTTP error! status: ${response?.status}`); } return response.data; } catch (error) { @@ -181,10 +190,10 @@ export async function downloadFlowsFromDatabase() { export async function uploadFlowsToDatabase(flows: FormData) { try { - const response = await api.post(`/api/v1/flows/upload/`, flows); + const response = await api.post(`${BASE_URL_API}flows/upload/`, flows); - if (response.status !== 201) { - throw new Error(`HTTP error! status: ${response.status}`); + if (response?.status !== 201) { + throw new Error(`HTTP error! status: ${response?.status}`); } return response.data; } catch (error) { @@ -202,7 +211,7 @@ export async function uploadFlowsToDatabase(flows: FormData) { */ export async function deleteFlowFromDatabase(flowId: string) { try { - const response = await api.delete(`/api/v1/flows/${flowId}`); + const response = await api.delete(`${BASE_URL_API}flows/${flowId}`); if (response.status !== 200) { throw new Error(`HTTP error! status: ${response.status}`); } @@ -222,7 +231,7 @@ export async function deleteFlowFromDatabase(flowId: string) { */ export async function getFlowFromDatabase(flowId: number) { try { - const response = await api.get(`/api/v1/flows/${flowId}`); + const response = await api.get(`${BASE_URL_API}flows/${flowId}`); if (response.status !== 200) { throw new Error(`HTTP error! status: ${response.status}`); } @@ -241,7 +250,7 @@ export async function getFlowFromDatabase(flowId: number) { */ export async function getFlowStylesFromDatabase() { try { - const response = await api.get("/api/v1/flow_styles/"); + const response = await api.get(`${BASE_URL_API}flow_styles/`); if (response.status !== 200) { throw new Error(`HTTP error! status: ${response.status}`); } @@ -261,7 +270,7 @@ export async function getFlowStylesFromDatabase() { */ export async function saveFlowStyleToDatabase(flowStyle: FlowStyleType) { try { - const response = await api.post("/api/v1/flow_styles/", flowStyle, { + const response = await api.post(`${BASE_URL_API}flow_styles/`, flowStyle, { headers: { accept: "application/json", "Content-Type": "application/json", @@ -284,7 +293,7 @@ export async function saveFlowStyleToDatabase(flowStyle: FlowStyleType) { * @returns {Promise>} A promise that resolves to an AxiosResponse containing the version information. */ export async function getVersion() { - const respnose = await api.get("/api/v1/version"); + const respnose = await api.get(`${BASE_URL_API}version`); return respnose.data; } @@ -305,8 +314,8 @@ export async function getHealth() { */ export async function getBuildStatus( flowId: string -): Promise { - return await api.get(`/api/v1/build/${flowId}/status`); +): Promise> { + return await api.get(`${BASE_URL_API}build/${flowId}/status`); } //docs for postbuildinit @@ -319,7 +328,7 @@ export async function getBuildStatus( export async function postBuildInit( flow: FlowType ): Promise> { - return await api.post(`/api/v1/build/init/${flow.id}`, flow); + return await api.post(`${BASE_URL_API}build/init/${flow.id}`, flow); } // fetch(`/upload/${id}`, { @@ -337,12 +346,180 @@ export async function uploadFile( ): Promise> { const formData = new FormData(); formData.append("file", file); - return await api.post(`/api/v1/upload/${id}`, formData); + return await api.post(`${BASE_URL_API}upload/${id}`, formData); } export async function postCustomComponent( code: string, apiClass: APIClassType ): Promise> { - return await api.post(`/api/v1/custom_component`, { code }); + return await api.post(`${BASE_URL_API}custom_component`, { code }); +} + +export async function onLogin(user: LoginType) { + try { + const response = await api.post( + `${BASE_URL_API}login`, + new URLSearchParams({ + username: user.username, + password: user.password, + }).toString(), + { + headers: { + "Content-Type": "application/x-www-form-urlencoded", + }, + } + ); + + if (response.status === 200) { + const data = response.data; + return data; + } + } catch (error) { + throw error; + } +} + +export async function autoLogin() { + try { + const response = await api.get(`${BASE_URL_API}auto_login`); + + if (response.status === 200) { + const data = response.data; + return data; + } + } catch (error) { + throw error; + } +} + +export async function renewAccessToken(token: string) { + try { + if (token) { + return await api.post(`${BASE_URL_API}refresh?token=${token}`); + } + } catch (error) { + console.log("Error:", error); + throw error; + } +} + +export async function getLoggedUser(): Promise { + try { + const res = await api.get(`${BASE_URL_API}users/whoami`); + + if (res.status === 200) { + return res.data; + } + } catch (error) { + console.log("Error:", error); + throw error; + } + return null; +} + +export async function addUser(user: UserInputType): Promise> { + try { + const res = await api.post(`${BASE_URL_API}users/`, user); + if (res.status !== 201) { + throw new Error(res.data.detail); + } + return res.data; + } catch (error) { + console.log("Error:", error); + throw error; + } +} + +export async function getUsersPage( + skip: number, + limit: number +): Promise> { + try { + const res = await api.get( + `${BASE_URL_API}users/?skip=${skip}&limit=${limit}` + ); + if (res.status === 200) { + return res.data; + } + } catch (error) { + console.log("Error:", error); + throw error; + } + return []; +} + +export async function deleteUser(user_id: string) { + try { + const res = await api.delete(`${BASE_URL_API}users/${user_id}`); + if (res.status === 200) { + return res.data; + } + } catch (error) { + console.log("Error:", error); + throw error; + } +} + +export async function updateUser(user_id: string, user: changeUser) { + try { + const res = await api.patch(`${BASE_URL_API}users/${user_id}`, user); + if (res.status === 200) { + return res.data; + } + } catch (error) { + console.log("Error:", error); + throw error; + } +} + +export async function resetPassword(user_id: string, user: resetPasswordType) { + try { + const res = await api.patch( + `${BASE_URL_API}users/${user_id}/reset-password`, + user + ); + if (res.status === 200) { + return res.data; + } + } catch (error) { + console.log("Error:", error); + throw error; + } +} + +export async function getApiKey() { + try { + const res = await api.get(`${BASE_URL_API}api_key/`); + if (res.status === 200) { + return res.data; + } + } catch (error) { + console.log("Error:", error); + throw error; + } +} + +export async function createApiKey(name: string) { + try { + const res = await api.post(`${BASE_URL_API}api_key/`, { name }); + if (res.status === 200) { + return res.data; + } + } catch (error) { + console.log("Error:", error); + throw error; + } +} + +export async function deleteApiKey(api_key: string) { + try { + const res = await api.delete(`${BASE_URL_API}api_key/${api_key}`); + if (res.status === 200) { + return res.data; + } + } catch (error) { + console.log("Error:", error); + throw error; + } } diff --git a/src/frontend/src/icons/Airbyte/airbyte.svg b/src/frontend/src/icons/Airbyte/airbyte.svg index eefa1bceb..a7275179d 100644 --- a/src/frontend/src/icons/Airbyte/airbyte.svg +++ b/src/frontend/src/icons/Airbyte/airbyte.svg @@ -1,22 +1,22 @@ - - - - - - + + + + + + diff --git a/src/frontend/src/icons/GradientSparkles/index.tsx b/src/frontend/src/icons/GradientSparkles/index.tsx index b8f3534d5..6be7e7576 100644 --- a/src/frontend/src/icons/GradientSparkles/index.tsx +++ b/src/frontend/src/icons/GradientSparkles/index.tsx @@ -1,22 +1,21 @@ -import { Infinity } from "lucide-react"; +import { InfinityIcon } from "lucide-react"; import { forwardRef } from "react"; -const GradientSparkles = forwardRef>( - (props, ref) => { - return ( - <> - - - - - - - - - - - ); - } -); - -export default GradientSparkles; +export const GradientSparkles = forwardRef< + SVGSVGElement, + React.PropsWithChildren<{}> +>((props, ref) => { + return ( + <> + + + + + + + + + + + ); +}); diff --git a/src/frontend/src/index.tsx b/src/frontend/src/index.tsx index 2542f4903..78a57298f 100644 --- a/src/frontend/src/index.tsx +++ b/src/frontend/src/index.tsx @@ -1,10 +1,8 @@ import ReactDOM from "react-dom/client"; -import { BrowserRouter } from "react-router-dom"; import App from "./App"; import ContextWrapper from "./contexts"; import reportWebVitals from "./reportWebVitals"; -import { ApiInterceptor } from "./controllers/API/api"; // @ts-ignore import "./style/index.css"; // @ts-ignore @@ -17,10 +15,7 @@ const root = ReactDOM.createRoot( ); root.render( - - - - + ); reportWebVitals(); diff --git a/src/frontend/src/modals/ApiModal/index.tsx b/src/frontend/src/modals/ApiModal/index.tsx index ac3ccf3b7..446562f66 100644 --- a/src/frontend/src/modals/ApiModal/index.tsx +++ b/src/frontend/src/modals/ApiModal/index.tsx @@ -14,11 +14,12 @@ import { import CodeTabsComponent from "../../components/codeTabsComponent"; import IconComponent from "../../components/genericIconComponent"; import { EXPORT_CODE_DIALOG } from "../../constants/constants"; +import { AuthContext } from "../../contexts/authContext"; import { TabsContext } from "../../contexts/tabsContext"; import { TemplateVariableType } from "../../types/api"; import { tweakType, uniqueTweakType } from "../../types/components"; import { FlowType, NodeType } from "../../types/flow/index"; -import { buildTweaks } from "../../utils/reactflowUtils"; +import { buildTweaks, convertArrayToObj } from "../../utils/reactflowUtils"; import { getCurlCode, getPythonApiCode, @@ -33,23 +34,27 @@ const ApiModal = forwardRef( { flow, children, - disable, }: { flow: FlowType; children: ReactNode; - disable: boolean; }, ref ) => { + const { autoLogin } = useContext(AuthContext); const [open, setOpen] = useState(false); const [activeTab, setActiveTab] = useState("0"); const tweak = useRef([]); const tweaksList = useRef([]); const { setTweak, getTweak, tabsState } = useContext(TabsContext); - const pythonApiCode = getPythonApiCode(flow, tweak.current, tabsState); - const curl_code = getCurlCode(flow, tweak.current, tabsState); + const pythonApiCode = getPythonApiCode( + flow, + autoLogin, + tweak.current, + tabsState + ); + const curl_code = getCurlCode(flow, autoLogin, tweak.current, tabsState); const pythonCode = getPythonCode(flow, tweak.current, tabsState); - const widgetCode = getWidgetCode(flow, tabsState); + const widgetCode = getWidgetCode(flow, autoLogin, tabsState); const tweaksCode = buildTweaks(flow); const codesArray = [ curl_code, @@ -100,7 +105,9 @@ const ApiModal = forwardRef( node.data.node.template[templateField].type === "code" || node.data.node.template[templateField].type === "prompt" || node.data.node.template[templateField].type === "file" || - node.data.node.template[templateField].type === "int") + node.data.node.template[templateField].type === "int" || + node.data.node.template[templateField].type === "dict" || + node.data.node.template[templateField].type === "NestedDict") ) .map((n, i) => { arrNodesWithValues.push(node["id"]); @@ -113,7 +120,7 @@ const ApiModal = forwardRef( } function buildTweakObject( tw: string, - changes: string | string[] | boolean | number, + changes: string | string[] | boolean | number | Object[] | Object, template: TemplateVariableType ) { if (typeof changes === "string" && template.type === "float") { @@ -126,6 +133,14 @@ const ApiModal = forwardRef( changes = changes?.filter((x) => x !== ""); } + if (template.type === "dict" && Array.isArray(changes)) { + changes = convertArrayToObj(changes); + } + + if (template.type === "NestedDict") { + changes = JSON.stringify(changes); + } + const existingTweak = tweak.current.find((element) => element.hasOwnProperty(tw) ); @@ -152,10 +167,15 @@ const ApiModal = forwardRef( tweak.current.push(newTweak); } - const pythonApiCode = getPythonApiCode(flow, tweak.current, tabsState); - const curl_code = getCurlCode(flow, tweak.current, tabsState); + const pythonApiCode = getPythonApiCode( + flow, + autoLogin, + tweak.current, + tabsState + ); + const curl_code = getCurlCode(flow, autoLogin, tweak.current, tabsState); const pythonCode = getPythonCode(flow, tweak.current, tabsState); - const widgetCode = getWidgetCode(flow, tabsState); + const widgetCode = getWidgetCode(flow, autoLogin, tabsState); tabs![0].code = curl_code; tabs![1].code = pythonApiCode; @@ -201,8 +221,8 @@ const ApiModal = forwardRef( } return ( - - {children} + + {children} Code - {children} + {children} {title} void; nodeLength: number; children: ReactNode; + open?: boolean; + onClose?: (close: boolean) => void; }, ref ) => { - const [modalOpen, setModalOpen] = useState(false); - const [myData, setMyData] = useState(data); + const [modalOpen, setModalOpen] = useState(open ?? false); + + const myData = useRef(data); + const { setTabsState, tabId } = useContext(TabsContext); const { reactFlowInstance } = useContext(typesContext); - let disabled = reactFlowInstance ?.getEdges() .some((edge) => edge.targetHandle === data.id) ?? false; - function changeAdvanced(templateParam: string): void { - setMyData((old) => { - let newData = cloneDeep(old); - newData.node!.template[templateParam].advanced = - !newData.node!.template[templateParam].advanced; - return newData; - }); + function changeAdvanced(n) { + myData.current.node!.template[n].advanced = + !myData.current.node!.template[n].advanced; } - const handleOnNewValue = ( - newValue: string | string[] | boolean, - name: string - ) => { - setMyData((old) => { - let newData = cloneDeep(old); - newData.node!.template[name].value = newValue; - return newData; - }); + const handleOnNewValue = (newValue: any, name) => { + myData.current.node!.template[name].value = newValue; }; useEffect(() => { - setMyData(data); // reset data to what it is on node when opening modal + myData.current = data; // reset data to what it is on node when opening modal + onClose!(modalOpen); }, [modalOpen]); + const [errorDuplicateKey, setErrorDuplicateKey] = useState(false); + return ( - + { + myData.current = data; + }} + > {children} - - {myData.type} - ID: {myData.id} + + {myData.current.type} + ID: {myData.current.id}
@@ -116,64 +134,76 @@ const EditNodeModal = forwardRef( - {Object.keys(myData.node!.template) + {Object.keys(myData.current.node!.template) .filter( (templateParam) => templateParam.charAt(0) !== "_" && - myData.node?.template[templateParam].show && - (myData.node.template[templateParam].type === - "str" || - myData.node.template[templateParam].type === - "bool" || - myData.node.template[templateParam].type === - "float" || - myData.node.template[templateParam].type === - "code" || - myData.node.template[templateParam].type === - "prompt" || - myData.node.template[templateParam].type === - "file" || - myData.node.template[templateParam].type === - "int") + myData.current.node?.template[templateParam].show && + (myData.current.node.template[templateParam] + .type === "str" || + myData.current.node.template[templateParam] + .type === "bool" || + myData.current.node.template[templateParam] + .type === "float" || + myData.current.node.template[templateParam] + .type === "code" || + myData.current.node.template[templateParam] + .type === "prompt" || + myData.current.node.template[templateParam] + .type === "file" || + myData.current.node.template[templateParam] + .type === "int" || + myData.current.node.template[templateParam] + .type === "dict" || + myData.current.node.template[templateParam] + .type === "NestedDict") ) .map((templateParam, index) => ( - {myData.node?.template[templateParam].name - ? myData.node.template[templateParam].name - : myData.node?.template[templateParam] + {myData.current.node?.template[templateParam].name + ? myData.current.node.template[templateParam] + .name + : myData.current.node?.template[templateParam] .display_name} - {myData.node?.template[templateParam].type === - "str" && - !myData.node.template[templateParam].options ? ( + {myData.current.node?.template[templateParam] + .type === "str" && + !myData.current.node.template[templateParam] + .options ? (
- {myData.node.template[templateParam].list ? ( + {myData.current.node.template[templateParam] + .list ? ( { handleOnNewValue(value, templateParam); }} /> - ) : myData.node.template[templateParam] - .multiline ? ( + ) : myData.current.node.template[ + templateParam + ].multiline ? ( { handleOnNewValue(value, templateParam); @@ -184,12 +214,14 @@ const EditNodeModal = forwardRef( editNode={true} disabled={disabled} password={ - myData.node.template[templateParam] - .password ?? false + myData.current.node.template[ + templateParam + ].password ?? false } value={ - myData.node.template[templateParam] - .value ?? "" + myData.current.node.template[ + templateParam + ].value ?? "" } onChange={(value) => { handleOnNewValue(value, templateParam); @@ -197,14 +229,78 @@ const EditNodeModal = forwardRef( /> )}
- ) : myData.node?.template[templateParam].type === - "bool" ? ( + ) : myData.current.node?.template[templateParam] + .type === "NestedDict" ? ( +
+ { + myData.current.node!.template[ + templateParam + ].value = newValue; + handleOnNewValue(newValue, templateParam); + }} + /> +
+ ) : myData.current.node?.template[templateParam] + .type === "dict" ? ( +
+ { + const valueToNumbers = + convertValuesToNumbers(newValue); + myData.current.node!.template[ + templateParam + ].value = valueToNumbers; + setErrorDuplicateKey( + hasDuplicateKeys(valueToNumbers) + ); + handleOnNewValue( + valueToNumbers, + templateParam + ); + }} + /> +
+ ) : myData.current.node?.template[templateParam] + .type === "bool" ? (
{" "} { handleOnNewValue( @@ -215,76 +311,84 @@ const EditNodeModal = forwardRef( size="small" />
- ) : myData.node?.template[templateParam].type === - "float" ? ( + ) : myData.current.node?.template[templateParam] + .type === "float" ? (
{ handleOnNewValue(value, templateParam); }} />
- ) : myData.node?.template[templateParam].type === - "str" && - myData.node.template[templateParam].options ? ( + ) : myData.current.node?.template[templateParam] + .type === "str" && + myData.current.node.template[templateParam] + .options ? (
handleOnNewValue(value, templateParam) } value={ - myData.node.template[templateParam] - .value ?? "Choose an option" + myData.current.node.template[ + templateParam + ].value ?? "Choose an option" } >
- ) : myData.node?.template[templateParam].type === - "int" ? ( + ) : myData.current.node?.template[templateParam] + .type === "int" ? (
{ handleOnNewValue(value, templateParam); }} />
- ) : myData.node?.template[templateParam].type === - "file" ? ( + ) : myData.current.node?.template[templateParam] + .type === "file" ? (
{ handleOnNewValue(value, templateParam); }} fileTypes={ - myData.node.template[templateParam] - .fileTypes + myData.current.node.template[ + templateParam + ].fileTypes } suffixes={ - myData.node.template[templateParam] - .suffixes + myData.current.node.template[ + templateParam + ].suffixes } onFileChange={(filePath: string) => { data.node!.template[ @@ -293,28 +397,29 @@ const EditNodeModal = forwardRef( }} >
- ) : myData.node?.template[templateParam].type === - "prompt" ? ( + ) : myData.current.node?.template[templateParam] + .type === "prompt" ? (
{ - myData.node = nodeClass; + myData.current.node = nodeClass; }} value={ - myData.node.template[templateParam] - .value ?? "" + myData.current.node.template[ + templateParam + ].value ?? "" } onChange={(value: string | string[]) => { handleOnNewValue(value, templateParam); }} />
- ) : myData.node?.template[templateParam].type === - "code" ? ( + ) : myData.current.node?.template[templateParam] + .type === "code" ? (
{ handleOnNewValue(value, templateParam); }} />
- ) : myData.node?.template[templateParam].type === - "Any" ? ( + ) : myData.current.node?.template[templateParam] + .type === "Any" ? ( "-" ) : (
@@ -347,12 +453,13 @@ const EditNodeModal = forwardRef(
- changeAdvanced(templateParam) + !myData.current.node?.template[ + templateParam + ].advanced } + setEnabled={(e) => { + changeAdvanced(templateParam); + }} disabled={disabled} size="small" /> @@ -372,7 +479,8 @@ const EditNodeModal = forwardRef( +
+
+ + )} + + { + setRenderKey(true); + handleAddNewKey(); + event.preventDefault(); + }} + > + {renderKey === false && ( +
+ +
+ + Name (optional){" "} + +
+ + { + handleInput({ target: { name: "apikeyname", value } }); + setApiKeyName(value); + }} + value={apiKeyName} + className="primary-input" + placeholder="My key name" + /> + +
+
+ )} + {renderKey === false && ( +
+ + + + + +
+ )} + + {renderKey === true && ( +
+ +
+ )} +
+
+
+ ); +} diff --git a/src/frontend/src/modals/UserManagementModal/index.tsx b/src/frontend/src/modals/UserManagementModal/index.tsx index b0800decf..d246a11b1 100644 --- a/src/frontend/src/modals/UserManagementModal/index.tsx +++ b/src/frontend/src/modals/UserManagementModal/index.tsx @@ -1,8 +1,15 @@ import * as Form from "@radix-ui/react-form"; -import { useEffect, useState } from "react"; -import InputComponent from "../../components/inputComponent"; +import { Eye, EyeOff } from "lucide-react"; +import { useContext, useEffect, useState } from "react"; import { Button } from "../../components/ui/button"; -import { UserManagementType } from "../../types/components"; +import { Checkbox } from "../../components/ui/checkbox"; +import { CONTROL_NEW_USER } from "../../constants/constants"; +import { AuthContext } from "../../contexts/authContext"; +import { + UserInputType, + UserManagementType, + inputHandlerEventType, +} from "../../types/components"; import { nodeIconsLucide } from "../../utils/styleUtils"; import BaseModal from "../baseModal"; @@ -16,20 +23,35 @@ export default function UserManagementModal({ data, index, onConfirm, + asChild, }: UserManagementType) { const Icon: any = nodeIconsLucide[icon]; - + const [pwdVisible, setPwdVisible] = useState(false); + const [confirmPwdVisible, setConfirmPwdVisible] = useState(false); const [open, setOpen] = useState(false); - const [password, setPassword] = useState(data?.password ?? ""); - const [username, setUserName] = useState(data?.user ?? ""); + const [username, setUserName] = useState(data?.username ?? ""); const [confirmPassword, setConfirmPassword] = useState(data?.password ?? ""); + const [isActive, setIsActive] = useState(data?.is_active ?? false); + const [isSuperUser, setIsSuperUser] = useState(data?.is_superuser ?? false); + const [inputState, setInputState] = useState(CONTROL_NEW_USER); + const { userData } = useContext(AuthContext); + + function handleInput({ + target: { name, value }, + }: inputHandlerEventType): void { + setInputState((prev) => ({ ...prev, [name]: value })); + } useEffect(() => { if (!data) { resetForm(); + } else { + handleInput({ target: { name: "username", value: username } }); + handleInput({ target: { name: "is_active", value: isActive } }); + handleInput({ target: { name: "is_superuser", value: isSuperUser } }); } - }, [data, open]); + }, [open]); function resetForm() { setPassword(""); @@ -39,7 +61,7 @@ export default function UserManagementModal({ return ( - {children} + {children} {title} { - setUserName(input.target.value); + onChange={({ target: { value } }) => { + handleInput({ target: { name: "username", value } }); + setUserName(value); }} value={username} className="primary-input" @@ -106,22 +127,40 @@ export default function UserManagementModal({ justifyContent: "space-between", }} > - + Password{" "} - * + + * + + {pwdVisible && ( + setPwdVisible(!pwdVisible)} + className="h-5 cursor-pointer" + strokeWidth={1.5} + /> + )} + {!pwdVisible && ( + setPwdVisible(!pwdVisible)} + className="h-5 cursor-pointer" + strokeWidth={1.5} + /> + )}
- { - setPassword(input); - }} - value={password} - password={true} - isForm - className="primary-input" - required - placeholder="Password" - /> + + { + handleInput({ target: { name: "password", value } }); + setPassword(value); + }} + value={password} + className="primary-input" + required={data ? false : true} + type={pwdVisible ? "text" : "password"} + /> + + Please enter a password @@ -146,93 +185,108 @@ export default function UserManagementModal({ justifyContent: "space-between", }} > - + Confirm password{" "} - * + + * + + {confirmPwdVisible && ( + + setConfirmPwdVisible(!confirmPwdVisible) + } + className="h-5 cursor-pointer" + strokeWidth={1.5} + /> + )} + {!confirmPwdVisible && ( + + setConfirmPwdVisible(!confirmPwdVisible) + } + className="h-5 cursor-pointer" + strokeWidth={1.5} + /> + )}
- { - setConfirmPassword(input); - }} - value={confirmPassword} - password={true} - isForm - className="primary-input" - required - placeholder="Confirm your password" - /> + + { + setConfirmPassword(input.target.value); + }} + value={confirmPassword} + className="primary-input" + required={data ? false : true} + type={confirmPwdVisible ? "text" : "password"} + /> + Please confirm your password
- - {/* - -
- - Email * - - - Please enter your email - - - Please provide a valid email - -
- - - -
*/} - - {/* - -
- - Date of birth{" "} - * - - - Please enter your date of birth - -
- - - -
*/} +
+ +
+ + Active + + + { + handleInput({ target: { name: "is_active", value } }); + setIsActive(value); + }} + /> + +
+
+ {userData?.is_superuser && ( + +
+ + Superuser + + + { + handleInput({ + target: { name: "is_superuser", value }, + }); + setIsSuperUser(value); + }} + /> + +
+
+ )} +
- - - + + + +
diff --git a/src/frontend/src/modals/baseModal/index.tsx b/src/frontend/src/modals/baseModal/index.tsx index 5dd6a4f8d..263f335fd 100644 --- a/src/frontend/src/modals/baseModal/index.tsx +++ b/src/frontend/src/modals/baseModal/index.tsx @@ -1,4 +1,4 @@ -import { ReactNode } from "react"; +import { ReactNode, useEffect } from "react"; import React from "react"; import { @@ -14,13 +14,25 @@ import { modalHeaderType } from "../../types/components"; type ContentProps = { children: ReactNode }; type HeaderProps = { children: ReactNode; description: string }; type FooterProps = { children: ReactNode }; -type TriggerProps = { children: ReactNode }; +type TriggerProps = { + children: ReactNode; + asChild?: boolean; + disable?: boolean; +}; const Content: React.FC = ({ children }) => { return
{children}
; }; -const Trigger: React.FC = ({ children }) => { - return <>{children}; +const Trigger: React.FC = ({ children, asChild, disable }) => { + return ( + + ); }; const Header: React.FC<{ children: ReactNode; description: string | null }> = ({ @@ -47,7 +59,6 @@ interface BaseModalProps { ]; open?: boolean; setOpen?: (open: boolean) => void; - disable?: boolean; size?: | "x-small" | "smaller" @@ -57,13 +68,16 @@ interface BaseModalProps { | "large-h-full" | "small-h-full" | "medium-h-full"; + + disable?: boolean; + onChangeOpenModal?: (open?: boolean) => void; } function BaseModal({ open, setOpen, - disable = false, children, size = "large", + onChangeOpenModal, }: BaseModalProps) { const headerChild = React.Children.toArray(children).find( (child) => (child as React.ReactElement).type === Header @@ -117,20 +131,21 @@ function BaseModal({ break; } + useEffect(() => { + if (onChangeOpenModal) { + onChangeOpenModal(open); + } + }, [open]); + //UPDATE COLORS AND STYLE CLASSSES return ( - + {triggerChild}
{headerChild}
-
+
{ContentChild}
{ContentFooter && ( diff --git a/src/frontend/src/modals/codeAreaModal/index.tsx b/src/frontend/src/modals/codeAreaModal/index.tsx index 238641279..903fb85b5 100644 --- a/src/frontend/src/modals/codeAreaModal/index.tsx +++ b/src/frontend/src/modals/codeAreaModal/index.tsx @@ -39,7 +39,6 @@ export default function CodeAreaModal({ if (dynamic && Object.keys(nodeClass!.template).length > 2) { return; } - processCode(); }, []); function processNonDynamicField() { @@ -165,18 +164,18 @@ export default function CodeAreaModal({
-
+

{error?.detail?.error}

-
+                
                   {error?.detail?.traceback}
-                
+
diff --git a/src/frontend/src/modals/dictAreaModal/index.tsx b/src/frontend/src/modals/dictAreaModal/index.tsx new file mode 100644 index 000000000..fc867cccf --- /dev/null +++ b/src/frontend/src/modals/dictAreaModal/index.tsx @@ -0,0 +1,71 @@ +import "ace-builds/src-noconflict/ace"; +import "ace-builds/src-noconflict/ext-language_tools"; +import "ace-builds/src-noconflict/mode-python"; +import "ace-builds/src-noconflict/theme-github"; +import "ace-builds/src-noconflict/theme-twilight"; +// import "ace-builds/webpack-resolver"; +import { useEffect, useRef, useState } from "react"; +import JsonView from "react18-json-view"; +import "react18-json-view/src/dark.css"; +import "react18-json-view/src/style.css"; +import IconComponent from "../../components/genericIconComponent"; +import { Button } from "../../components/ui/button"; +import { CODE_DICT_DIALOG_SUBTITLE } from "../../constants/constants"; +import BaseModal from "../baseModal"; + +export default function DictAreaModal({ + children, + onChange, + value, +}): JSX.Element { + const [open, setOpen] = useState(false); + + const ref = useRef(value); + + useEffect(() => { + if (value) ref.current = value; + }, [ref]); + + return ( + + {children} + + Edit Dictionary + + +
+ { + ref.current = edit["src"]; + }} + onChange={(edit) => { + ref.current = edit["src"]; + }} + src={ref.current} + /> +
+ +
+
+
+
+ ); +} diff --git a/src/frontend/src/modals/exportModal/index.tsx b/src/frontend/src/modals/exportModal/index.tsx index c296a1e2c..01b655b39 100644 --- a/src/frontend/src/modals/exportModal/index.tsx +++ b/src/frontend/src/modals/exportModal/index.tsx @@ -10,15 +10,15 @@ import BaseModal from "../baseModal"; const ExportModal = forwardRef( (props: { children: ReactNode }, ref): JSX.Element => { - const { flows, tabId, updateFlow, downloadFlow } = useContext(TabsContext); + const { flows, tabId, downloadFlow } = useContext(TabsContext); const [checked, setChecked] = useState(false); const flow = flows.find((f) => f.id === tabId); useEffect(() => { - setName(flow.name); - setDescription(flow.description); - }, [flow.name, flow.description]); - const [name, setName] = useState(flow.name); - const [description, setDescription] = useState(flow.description); + setName(flow!.name); + setDescription(flow!.description); + }, [flow!.name, flow!.description]); + const [name, setName] = useState(flow!.name); + const [description, setDescription] = useState(flow!.description); const [open, setOpen] = useState(false); return ( @@ -40,7 +40,6 @@ const ExportModal = forwardRef( tabId={tabId} setName={setName} setDescription={setDescription} - updateFlow={updateFlow} />
f.id === tabId); useEffect(() => { - setName(flow.name); - setDescription(flow.description); - }, [flow.name, flow.description]); - const [name, setName] = useState(flow.name); - const [description, setDescription] = useState(flow.description); + setName(flow!.name); + setDescription(flow!.description); + }, [flow!.name, flow!.description]); + const [name, setName] = useState(flow!.name); + const [description, setDescription] = useState(flow!.description); const [invalidName, setInvalidName] = useState(false); function handleClick(): void { @@ -28,7 +26,6 @@ export default function FlowSettingsModal({ savedFlow!.name = name; savedFlow!.description = description; saveFlow(savedFlow!); - setSuccessData({ title: "Changes saved successfully" }); setOpen(false); } return ( diff --git a/src/frontend/src/modals/formModal/index.tsx b/src/frontend/src/modals/formModal/index.tsx index acca662d3..89c7919af 100644 --- a/src/frontend/src/modals/formModal/index.tsx +++ b/src/frontend/src/modals/formModal/index.tsx @@ -23,7 +23,9 @@ import { } from "../../components/ui/dialog"; import { Textarea } from "../../components/ui/textarea"; import { CHAT_FORM_DIALOG_SUBTITLE } from "../../constants/constants"; +import { AuthContext } from "../../contexts/authContext"; import { TabsContext } from "../../contexts/tabsContext"; +import { getBuildStatus } from "../../controllers/API"; import { TabsState } from "../../types/tabs"; import { validateNodes } from "../../utils/reactflowUtils"; @@ -60,6 +62,7 @@ export default function FormModal({ const [chatHistory, setChatHistory] = useState([]); const { reactFlowInstance } = useContext(typesContext); + const { accessToken } = useContext(AuthContext); const { setErrorData } = useContext(alertContext); const ws = useRef(null); const [lockChat, setLockChat] = useState(false); @@ -153,14 +156,28 @@ export default function FormModal({ function handleOnClose(event: CloseEvent): void { if (isOpen.current) { + getBuildStatus(flow.id) + .then((response) => { + if (response.data.built) { + connectWS(); + } else { + setErrorData({ + title: "Please build the flow again before using the chat.", + }); + } + }) + .catch((error) => { + setErrorData({ + title: error.data?.detail ? error.data.detail : error.message, + }); + }); setErrorData({ title: event.reason }); setTimeout(() => { - connectWS(); setLockChat(false); }, 1000); } } - + //TODO improve check of user authentication function getWebSocketUrl( chatId: string, isDevelopment: boolean = false @@ -173,11 +190,12 @@ export default function FormModal({ return `${ isDevelopment ? "ws" : webSocketProtocol - }://${host}${chatEndpoint}`; + }://${host}${chatEndpoint}?token=${encodeURIComponent(accessToken!)}`; } function handleWsMessage(data: any) { - if (Array.isArray(data)) { + console.log(data); + if (Array.isArray(data) && data.length > 0) { //set chat history setChatHistory((_) => { let newChatHistory: ChatMessageType[] = []; @@ -258,7 +276,6 @@ export default function FormModal({ }; newWs.onmessage = (event) => { const data = JSON.parse(event.data); - console.log("Received data:", data); handleWsMessage(data); //get chat history }; @@ -266,7 +283,6 @@ export default function FormModal({ handleOnClose(event); }; newWs.onerror = (ev) => { - console.log(ev, "error"); if (flow.id === "") { connectWS(); } else { @@ -292,14 +308,13 @@ export default function FormModal({ useEffect(() => { connectWS(); return () => { - console.log("unmount"); console.log(ws); if (ws.current) { ws.current.close(); } }; // do not add connectWS on dependencies array - }, []); + }, [open]); useEffect(() => { if ( @@ -341,7 +356,10 @@ export default function FormModal({ }, [open]); function sendMessage(): void { - let nodeValidationErrors = validateNodes(reactFlowInstance!); + let nodeValidationErrors = validateNodes( + reactFlowInstance!.getNodes(), + reactFlowInstance!.getEdges() + ); if (nodeValidationErrors.length === 0) { setLockChat(true); let inputs = tabsState[id.current].formKeysData.input_keys; diff --git a/src/frontend/src/modals/genericModal/index.tsx b/src/frontend/src/modals/genericModal/index.tsx index 9498bc941..baebd67ba 100644 --- a/src/frontend/src/modals/genericModal/index.tsx +++ b/src/frontend/src/modals/genericModal/index.tsx @@ -121,21 +121,33 @@ export default function GenericModal({ } function validatePrompt(closeModal: boolean): void { + //nodeClass is always null on tweaks + postValidatePrompt(field_name, inputValue, nodeClass!) .then((apiReturn) => { if (apiReturn.data) { + setValue(inputValue); + apiReturn.data.frontend_node["template"]["template"]["value"] = + inputValue; + setNodeClass!(apiReturn?.data?.frontend_node); + let inputVariables = apiReturn.data.input_variables ?? []; if (inputVariables && inputVariables.length === 0) { setIsEdit(true); setNoticeData({ title: "Your template does not have any variables.", }); + setModalOpen(false); } else { setIsEdit(false); setSuccessData({ title: "Prompt is ready", }); - setNodeClass!(apiReturn.data?.frontend_node); + if ( + JSON.stringify(apiReturn.data?.frontend_node) !== + JSON.stringify({}) + ) + setNodeClass!(apiReturn.data?.frontend_node); setModalOpen(closeModal); setValue(inputValue); } @@ -147,7 +159,6 @@ export default function GenericModal({ } }) .catch((error) => { - console.log(error); setIsEdit(true); return setErrorData({ title: "There is something wrong with this prompt, please review it", @@ -159,7 +170,11 @@ export default function GenericModal({ const [modalOpen, setModalOpen] = useState(false); return ( - + {}} + open={modalOpen} + setOpen={setModalOpen} + > {children} { @@ -286,9 +301,7 @@ export default function GenericModal({ setModalOpen(false); break; case TypeModal.PROMPT: - !inputValue || inputValue === "" - ? setModalOpen(false) - : validatePrompt(false); + validatePrompt(false); break; default: diff --git a/src/frontend/src/pages/AdminPage/LoginPage/index.tsx b/src/frontend/src/pages/AdminPage/LoginPage/index.tsx index 74cc75d07..e0ceaad49 100644 --- a/src/frontend/src/pages/AdminPage/LoginPage/index.tsx +++ b/src/frontend/src/pages/AdminPage/LoginPage/index.tsx @@ -1,12 +1,62 @@ +import { useContext, useState } from "react"; import { useNavigate } from "react-router-dom"; import { Button } from "../../../components/ui/button"; import { Input } from "../../../components/ui/input"; +import { CONTROL_LOGIN_STATE } from "../../../constants/constants"; +import { alertContext } from "../../../contexts/alertContext"; +import { AuthContext } from "../../../contexts/authContext"; +import { getLoggedUser, onLogin } from "../../../controllers/API"; +import { LoginType } from "../../../types/api"; +import { + inputHandlerEventType, + loginInputStateType, +} from "../../../types/components"; export default function LoginAdminPage() { const navigate = useNavigate(); - function loginAdmin() { - navigate("/admin/"); + const [inputState, setInputState] = + useState(CONTROL_LOGIN_STATE); + const { login, getAuthentication, setUserData } = useContext(AuthContext); + + const { password, username } = inputState; + const { setErrorData } = useContext(alertContext); + + function handleInput({ + target: { name, value }, + }: inputHandlerEventType): void { + setInputState((prev) => ({ ...prev, [name]: value })); + } + + function signIn() { + const user: LoginType = { + username: username, + password: password, + }; + onLogin(user) + .then((user) => { + login(user.access_token, user.refresh_token); + getUser(); + navigate("/admin/"); + }) + .catch((error) => { + setErrorData({ + title: "Error signing in", + list: [error["response"]["data"]["detail"]], + }); + }); + } + + function getUser() { + if (getAuthentication()) { + setTimeout(() => { + getLoggedUser() + .then((user) => { + setUserData(user); + }) + .catch((error) => {}); + }, 1000); + } } return ( @@ -14,11 +64,24 @@ export default function LoginAdminPage() {
โ›“๏ธ Admin - - + { + handleInput({ target: { name: "username", value } }); + }} + className="bg-background" + placeholder="Username" + /> + { + handleInput({ target: { name: "password", value } }); + }} + className="bg-background" + placeholder="Password" + /> - )} -
-
- { - handleNewUser(user); - }} - > - - -
-
-
- - - - User - Password - - - - - {filterUserList.map((user, index) => ( - - - {user.user} - - - {user.password} - - -
- { - handleEditUser(index, user); - }} - > - - - - - - { - handleDeleteUser(index); - }} - > - - - - -
-
-
- ))} -
-
-
- { - handleChangePagination(pageSize, pageIndex); - }} - > - )}
+
+ { + handleNewUser(user); + }} + asChild + > + + +
+ {loadingUsers ? ( +
+ +
+ ) : userList.current.length === 0 ? ( + <> +
+ No users registered. +
+ + ) : ( + <> +
+ + + + Id + Username + Active + Superuser + Created At + Updated At + + + + {!loadingUsers && ( + + {filterUserList.map((user: UserInputType, index) => ( + + + + {user.id} + + + + + + {user.username} + + + + + { + handleDisableUser( + user.is_active, + user.id, + user + ); + }} + > +
+ +
+
+
+ + { + handleSuperUserEdit( + user.is_superuser, + user.id, + user + ); + }} + > +
+ +
+
+
+ + { + new Date(user.create_at!) + .toISOString() + .split("T")[0] + } + + + { + new Date(user.updated_at!) + .toISOString() + .split("T")[0] + } + + +
+ { + handleEditUser(user.id, editUser); + }} + > + + + + + + { + handleDeleteUser(user); + }} + > + + + + +
+
+
+ ))} +
+ )} +
+
+ + { + handleChangePagination(pageIndex, pageSize); + }} + > + + )} - + )} ); } diff --git a/src/frontend/src/pages/ApiKeysPage/index.tsx b/src/frontend/src/pages/ApiKeysPage/index.tsx new file mode 100644 index 000000000..d1c91593a --- /dev/null +++ b/src/frontend/src/pages/ApiKeysPage/index.tsx @@ -0,0 +1,283 @@ +import { useContext, useEffect, useRef, useState } from "react"; +import ShadTooltip from "../../components/ShadTooltipComponent"; +import IconComponent from "../../components/genericIconComponent"; +import { Button } from "../../components/ui/button"; +import { + Table, + TableBody, + TableCell, + TableHead, + TableHeader, + TableRow, +} from "../../components/ui/table"; +import { alertContext } from "../../contexts/alertContext"; +import { AuthContext } from "../../contexts/authContext"; +import { deleteApiKey, getApiKey } from "../../controllers/API"; +import ConfirmationModal from "../../modals/ConfirmationModal"; +import SecretKeyModal from "../../modals/SecretKeyModal"; + +import moment from "moment"; +import Header from "../../components/headerComponent"; +import { + API_PAGE_PARAGRAPH_1, + API_PAGE_PARAGRAPH_2, + API_PAGE_USER_KEYS, + LAST_USED_SPAN_1, + LAST_USED_SPAN_2, +} from "../../constants/constants"; +import { ApiKey } from "../../types/components"; + +export default function ApiKeysPage() { + const [loadingKeys, setLoadingKeys] = useState(true); + const { setErrorData, setSuccessData } = useContext(alertContext); + const { userData } = useContext(AuthContext); + const [userId, setUserId] = useState(""); + const keysList = useRef([]); + + useEffect(() => { + getKeys(); + }, [userData]); + + function getKeys() { + setLoadingKeys(true); + if (userData) { + getApiKey() + .then((keys: [ApiKey]) => { + keysList.current = keys["api_keys"]; + setUserId(keys["user_id"]); + setLoadingKeys(false); + }) + .catch((error) => { + setLoadingKeys(false); + }); + } + } + + function resetFilter() { + getKeys(); + } + + function handleDeleteKey(keys) { + deleteApiKey(keys) + .then((res) => { + resetFilter(); + setSuccessData({ + title: "Success! Key deleted!", + }); + }) + .catch((error) => { + setErrorData({ + title: "Error on delete key", + list: [error["response"]["data"]["detail"]], + }); + }); + } + + function lastUsedMessage() { + return ( +
+ + {LAST_USED_SPAN_1} +

{LAST_USED_SPAN_2} +
+
+ ); + } + + return ( + <> +
+ {userData && ( +
+
+
+
+
+
+

+ API keys +

+

+ {API_PAGE_PARAGRAPH_1} +
+ {API_PAGE_PARAGRAPH_2} +

+
+
+
+ + {keysList.current && + keysList.current.length === 0 && + !loadingKeys && ( + <> +
+

{API_PAGE_USER_KEYS}

+
+ + )} + <> + {loadingKeys && ( +
+ Loading... +
+ )} +
+ {keysList.current && + keysList.current.length > 0 && + !loadingKeys && ( + + + + Name + Key + Created + + Last Used + +
+ +
+
+
+ Total Uses + +
+
+ {!loadingKeys && ( + + {keysList.current.map( + (api_keys: ApiKey, index: number) => ( + + + + + {api_keys.name ? api_keys.name : "-"} + + + + + + {api_keys.api_key} + + + + +
+ {moment(api_keys.created_at).format( + "YYYY-MM-DD HH:mm" + )} +
+
+
+ + +
+ {moment(api_keys.last_used_at).format( + "YYYY-MM-DD HH:mm" + ) === "Invalid date" + ? "Never" + : moment( + api_keys.last_used_at + ).format("YYYY-MM-DD HH:mm")} +
+
+
+ + {api_keys.total_uses} + + +
+ { + handleDeleteKey(keys); + }} + > + + + + +
+
+
+ ) + )} +
+ )} +
+ )} +
+ +
+
+ + + +
+
+ +
+
+
+
+ )} + + ); +} diff --git a/src/frontend/src/pages/CommunityPage/index.tsx b/src/frontend/src/pages/CommunityPage/index.tsx index c96bbdaf4..7f808f035 100644 --- a/src/frontend/src/pages/CommunityPage/index.tsx +++ b/src/frontend/src/pages/CommunityPage/index.tsx @@ -7,6 +7,7 @@ import { useNavigate } from "react-router-dom"; import { CardComponent } from "../../components/cardComponent"; import IconComponent from "../../components/genericIconComponent"; import Header from "../../components/headerComponent"; +import { SkeletonCardComponent } from "../../components/skeletonCardComponent"; import { getExamples } from "../../controllers/API"; import { FlowType } from "../../types/flow"; export default function CommunityPage(): JSX.Element { @@ -74,7 +75,14 @@ export default function CommunityPage(): JSX.Element { new and powerful features.
- {!loadingExamples && + {loadingExamples ? ( + <> + + + + + + ) : ( examples.map((flow, idx) => ( } /> - ))} + )) + )}
diff --git a/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx index 0a147373a..f9db1f233 100644 --- a/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/PageComponent/index.tsx @@ -64,8 +64,13 @@ export default function Page({ setTabsState, tabId, } = useContext(TabsContext); - const { types, reactFlowInstance, setReactFlowInstance, templates } = - useContext(typesContext); + const { + types, + reactFlowInstance, + setReactFlowInstance, + templates, + setFilterEdge, + } = useContext(typesContext); const reactFlowWrapper = useRef(null); const { takeSnapshot } = useContext(undoRedoContext); @@ -155,6 +160,27 @@ export default function Page({ setExtraNavigation({ title: "Components" }); }, [setExtraComponent, setExtraNavigation]); + const [seconds, setSeconds] = useState(0); + + useEffect(() => { + const interval = setInterval(() => { + setSeconds((prevSeconds) => { + let updatedSeconds = prevSeconds + 1; + + if (updatedSeconds % 30 === 0) { + saveFlow(flow, true); + updatedSeconds = 0; + } + + return updatedSeconds; + }); + }, 1000); + + return () => { + clearInterval(interval); + }; + }, []); + const onEdgesChangeMod = useCallback( (change: EdgeChange[]) => { onEdgesChange(change); @@ -298,7 +324,14 @@ export default function Page({ setNodes((nds) => nds.concat(newNode)); } else if (event.dataTransfer.types.some((types) => types === "Files")) { takeSnapshot(); - uploadFlow(false, event.dataTransfer.files.item(0)!); + if (event.dataTransfer.files.item(0)!.type === "application/json") { + uploadFlow(false, event.dataTransfer.files.item(0)!); + } else { + setErrorData({ + title: "Invalid file type", + list: ["Please upload a JSON file"], + }); + } } }, // Specify dependencies for useCallback @@ -375,6 +408,10 @@ export default function Page({ [] ); + const onPaneClick = useCallback((flow) => { + setFilterEdge([]); + }, []); + return (
{!view && } @@ -421,6 +458,8 @@ export default function Page({ zoomOnScroll={!view} zoomOnPinch={!view} panOnDrag={!view} + proOptions={{ hideAttribution: true }} + onPaneClick={onPaneClick} > {!view && ( diff --git a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx index f8ac36230..d831716dc 100644 --- a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx @@ -1,3 +1,4 @@ +import { cloneDeep } from "lodash"; import { useContext, useEffect, useState } from "react"; import ShadTooltip from "../../../../components/ShadTooltipComponent"; import IconComponent from "../../../../components/genericIconComponent"; @@ -18,7 +19,8 @@ import { classNames } from "../../../../utils/utils"; import DisclosureComponent from "../DisclosureComponent"; export default function ExtraSidebar(): JSX.Element { - const { data, templates } = useContext(typesContext); + const { data, templates, getFilterEdge, setFilterEdge } = + useContext(typesContext); const { flows, tabId, uploadFlow, tabsState, saveFlow, isBuilt } = useContext(TabsContext); const { setSuccessData, setErrorData } = useContext(alertContext); @@ -42,6 +44,10 @@ export default function ExtraSidebar(): JSX.Element { // Handle showing components after use search input function handleSearchInput(e: string) { + if (e === "") { + setFilterData(data); + return; + } setFilterData((_) => { let ret = {}; Object.keys(data).forEach((d: keyof APIObjectType, i) => { @@ -69,6 +75,57 @@ export default function ExtraSidebar(): JSX.Element { setErrorData({ title: " Components with errors: ", list: errors }); }, []); + function handleBlur() { + setFilterData(data); + setFilterEdge([]); + setSearch(""); + } + + useEffect(() => { + if (getFilterEdge.length === 0 && search === "") { + setFilterData(data); + setFilterEdge([]); + setSearch(""); + } + }, [getFilterEdge]); + + useEffect(() => { + if (getFilterEdge?.length > 0) { + setFilterData((_) => { + let dataClone = cloneDeep(data); + let ret = {}; + Object.keys(dataClone).forEach((d: keyof APIObjectType, i) => { + ret[d] = {}; + if (getFilterEdge.some((x) => x.family === d)) { + ret[d] = dataClone[d]; + + const filtered = getFilterEdge + .filter((x) => x.family === d) + .pop() + .type.split(","); + + for (let i = 0; i < filtered.length; i++) { + filtered[i] = filtered[i].trimStart(); + } + + if (filtered.some((x) => x !== "")) { + let keys = Object.keys(dataClone[d]).filter((nd) => + filtered.includes(nd) + ); + Object.keys(dataClone[d]).forEach((element) => { + if (!keys.includes(element)) { + delete ret[d][element]; + } + }); + } + } + }); + setSearch("search"); + return ret; + }); + } + }, [getFilterEdge]); + return (
@@ -99,16 +156,20 @@ export default function ExtraSidebar(): JSX.Element {
{flow && flow.data && ( - -
- -
+ + )}
@@ -121,7 +182,6 @@ export default function ExtraSidebar(): JSX.Element { } onClick={(event) => { saveFlow(flow!); - setSuccessData({ title: "Changes saved successfully" }); }} >
handleBlur()} type="text" name="search" id="search" diff --git a/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx index c628e4003..54651ff4d 100644 --- a/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/nodeToolbarComponent/index.tsx @@ -1,16 +1,25 @@ import { useContext, useState } from "react"; -import { useReactFlow } from "reactflow"; +import { useReactFlow, useUpdateNodeInternals } from "reactflow"; import ShadTooltip from "../../../../components/ShadTooltipComponent"; import IconComponent from "../../../../components/genericIconComponent"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, +} from "../../../../components/ui/select-custom"; import { TabsContext } from "../../../../contexts/tabsContext"; import EditNodeModal from "../../../../modals/EditNodeModal"; import { nodeToolbarPropsType } from "../../../../types/components"; -import { classNames } from "../../../../utils/utils"; +import { classNames, getRandomKeyByssmm } from "../../../../utils/utils"; export default function NodeToolbarComponent({ data, setData, deleteNode, + setShowNode, + numberOfHandles, + showNode, }: nodeToolbarPropsType): JSX.Element { const [nodeLength, setNodeLength] = useState( Object.keys(data.node!.template).filter( @@ -27,9 +36,37 @@ export default function NodeToolbarComponent({ data.node.template[templateField].type === "int") ).length ); + const updateNodeInternals = useUpdateNodeInternals(); + function canMinimize() { + let countHandles: number = 0; + numberOfHandles.forEach((bool) => { + if (bool) countHandles += 1; + }); + if (countHandles > 1) return false; + return true; + } + const isMinimal = canMinimize(); const { paste } = useContext(TabsContext); const reactFlowInstance = useReactFlow(); + const [showModalAdvanced, setShowModalAdvanced] = useState(false); + const [selectedValue, setSelectedValue] = useState(""); + + const handleSelectChange = (event) => { + setSelectedValue(event); + if (event.includes("advanced")) { + return setShowModalAdvanced(true); + } + setShowModalAdvanced(false); + if (event.includes("show")) { + setShowNode((prev) => !prev); + updateNodeInternals(data.id); + } + if (event.includes("disabled")) { + return; + } + }; + return ( <>
@@ -97,6 +134,96 @@ export default function NodeToolbarComponent({ + {isMinimal ? ( + + ) : ( + +
+ +
+
+ )} + + {showModalAdvanced && ( + { + setShowModalAdvanced(modal); + }} + > + <> + + )} + + {/*
- + */}
diff --git a/src/frontend/src/pages/MainPage/index.tsx b/src/frontend/src/pages/MainPage/index.tsx index 67412401a..5921b3cc8 100644 --- a/src/frontend/src/pages/MainPage/index.tsx +++ b/src/frontend/src/pages/MainPage/index.tsx @@ -1,14 +1,35 @@ -import { useContext, useEffect } from "react"; +import { useContext, useEffect, useState } from "react"; import { Link, useNavigate } from "react-router-dom"; +import DropdownButton from "../../components/DropdownButtonComponent"; import { CardComponent } from "../../components/cardComponent"; import IconComponent from "../../components/genericIconComponent"; import Header from "../../components/headerComponent"; +import { SkeletonCardComponent } from "../../components/skeletonCardComponent"; import { Button } from "../../components/ui/button"; import { USER_PROJECTS_HEADER } from "../../constants/constants"; +import { alertContext } from "../../contexts/alertContext"; import { TabsContext } from "../../contexts/tabsContext"; export default function HomePage(): JSX.Element { - const { flows, setTabId, downloadFlows, uploadFlows, addFlow, removeFlow } = - useContext(TabsContext); + const { + flows, + setTabId, + downloadFlows, + uploadFlows, + addFlow, + removeFlow, + uploadFlow, + isLoading, + } = useContext(TabsContext); + const { setErrorData } = useContext(alertContext); + const dropdownOptions = [ + { + name: "Import from JSON", + onBtnClick: () => + uploadFlow(true).then((id) => { + navigate("/flow/" + id); + }), + }, + ]; // Set a null id useEffect(() => { @@ -16,6 +37,41 @@ export default function HomePage(): JSX.Element { }, []); const navigate = useNavigate(); + const [isDragging, setIsDragging] = useState(false); + + const dragOver = (e) => { + e.preventDefault(); + if (e.dataTransfer.types.some((types) => types === "Files")) { + setIsDragging(true); + } + }; + + const dragEnter = (e) => { + if (e.dataTransfer.types.some((types) => types === "Files")) { + setIsDragging(true); + } + e.preventDefault(); + }; + + const dragLeave = () => { + setIsDragging(false); + }; + + const fileDrop = (e) => { + e.preventDefault(); + setIsDragging(false); + if (e.dataTransfer.types.some((types) => types === "Files")) { + if (e.dataTransfer.files.item(0).type === "application/json") { + uploadFlow(true, e.dataTransfer.files.item(0)!); + } else { + setErrorData({ + title: "Invalid file type", + list: ["Please upload a JSON file"], + }); + } + } + }; + // Personal flows display return ( <> @@ -45,48 +101,78 @@ export default function HomePage(): JSX.Element { Upload Collection - + options={dropdownOptions} + />
Manage your personal projects. Download or upload your collection. -
- {flows.map((flow, idx) => ( - - - - } - onDelete={() => { - removeFlow(flow.id); - }} - /> - ))} +
+ {isDragging ? ( + <> + + Drop your flow here + + ) : ( +
+ {isLoading && flows.length == 0 ? ( + <> + + + + + + ) : ( + flows.map((flow, idx) => ( + + + + } + onDelete={() => { + removeFlow(flow.id); + }} + /> + )) + )} +
+ )}
diff --git a/src/frontend/src/pages/ProfileSettingsPage/index.tsx b/src/frontend/src/pages/ProfileSettingsPage/index.tsx new file mode 100644 index 000000000..443de4a4b --- /dev/null +++ b/src/frontend/src/pages/ProfileSettingsPage/index.tsx @@ -0,0 +1,173 @@ +import * as Form from "@radix-ui/react-form"; +import { cloneDeep } from "lodash"; +import { useContext, useEffect, useState } from "react"; +import IconComponent from "../../components/genericIconComponent"; +import GradientChooserComponent from "../../components/gradientChooserComponent"; +import Header from "../../components/headerComponent"; +import InputComponent from "../../components/inputComponent"; +import { Button } from "../../components/ui/button"; +import { CONTROL_PATCH_USER_STATE } from "../../constants/constants"; +import { alertContext } from "../../contexts/alertContext"; +import { AuthContext } from "../../contexts/authContext"; +import { TabsContext } from "../../contexts/tabsContext"; +import { resetPassword, updateUser } from "../../controllers/API"; +import { + inputHandlerEventType, + patchUserInputStateType, +} from "../../types/components"; +import { gradients } from "../../utils/styleUtils"; +export default function ProfileSettingsPage(): JSX.Element { + const { setTabId } = useContext(TabsContext); + + const [inputState, setInputState] = useState( + CONTROL_PATCH_USER_STATE + ); + + // set null id + useEffect(() => { + setTabId(""); + }, []); + const { setErrorData, setSuccessData } = useContext(alertContext); + const { userData, setUserData } = useContext(AuthContext); + const { password, cnfPassword, gradient } = inputState; + + async function handlePatchUser() { + if (password !== cnfPassword) { + setErrorData({ + title: "Error changing password", + list: ["Passwords do not match"], + }); + return; + } + try { + if (password !== "") await resetPassword(userData!.id, { password }); + if (gradient !== "") + await updateUser(userData!.id, { profile_image: gradient }); + if (gradient !== "") { + let newUserData = cloneDeep(userData); + newUserData!.profile_image = gradient; + + setUserData(newUserData); + } + handleInput({ target: { name: "password", value: "" } }); + handleInput({ target: { name: "cnfPassword", value: "" } }); + setSuccessData({ title: "Changes saved successfully!" }); + } catch (error) { + setErrorData({ + title: "Error saving changes", + list: [(error as any).response.data.detail], + }); + } + } + + function handleInput({ + target: { name, value }, + }: inputHandlerEventType): void { + setInputState((prev) => ({ ...prev, [name]: value })); + } + + return ( + <> +
+ +
+
+ + + Profile Settings + +
+ + Change your profile settings like your password and your profile + picture. + + { + handlePatchUser(); + const data = Object.fromEntries(new FormData(event.currentTarget)); + event.preventDefault(); + }} + className="flex h-full flex-col px-6 pb-16" + > +
+
+
+ + + Password{" "} + + { + handleInput({ target: { name: "password", value } }); + }} + value={password} + isForm + password={true} + placeholder="Password" + className="w-full" + /> + + Please enter your password + + +
+
+ + + Confirm Password{" "} + + + { + handleInput({ target: { name: "cnfPassword", value } }); + }} + value={cnfPassword} + isForm + password={true} + placeholder="Confirm Password" + className="w-full" + /> + + + Please confirm your password + + +
+
+ + + Profile Gradient{" "} + + +
+ { + handleInput({ target: { name: "gradient", value } }); + }} + /> +
+
+
+ +
+
+ + + +
+
+
+
+ + ); +} diff --git a/src/frontend/src/pages/ViewPage/index.tsx b/src/frontend/src/pages/ViewPage/index.tsx index 3d23b3fe6..4ce5baed4 100644 --- a/src/frontend/src/pages/ViewPage/index.tsx +++ b/src/frontend/src/pages/ViewPage/index.tsx @@ -10,7 +10,7 @@ export default function ViewPage() { // Set flow tab id useEffect(() => { - setTabId(id); + setTabId(id!); }, [id]); // Initialize state variable for the version @@ -26,7 +26,7 @@ export default function ViewPage() { {flows.length > 0 && tabId !== "" && flows.findIndex((flow) => flow.id === tabId) !== -1 && ( - flow.id === tabId)} /> + flow.id === tabId)!} /> )}
); diff --git a/src/frontend/src/pages/loginPage/index.tsx b/src/frontend/src/pages/loginPage/index.tsx index 8cba8bc60..ff9d53e82 100644 --- a/src/frontend/src/pages/loginPage/index.tsx +++ b/src/frontend/src/pages/loginPage/index.tsx @@ -1,10 +1,14 @@ import * as Form from "@radix-ui/react-form"; -import { useState } from "react"; -import { Link } from "react-router-dom"; +import { useContext, useState } from "react"; +import { Link, useNavigate } from "react-router-dom"; import InputComponent from "../../components/inputComponent"; import { Button } from "../../components/ui/button"; import { Input } from "../../components/ui/input"; import { CONTROL_LOGIN_STATE } from "../../constants/constants"; +import { alertContext } from "../../contexts/alertContext"; +import { AuthContext } from "../../contexts/authContext"; +import { getLoggedUser, onLogin } from "../../controllers/API"; +import { LoginType } from "../../types/api"; import { inputHandlerEventType, loginInputStateType, @@ -15,12 +19,50 @@ export default function LoginPage(): JSX.Element { useState(CONTROL_LOGIN_STATE); const { password, username } = inputState; + const { login, getAuthentication, setUserData, setIsAdmin } = + useContext(AuthContext); + const navigate = useNavigate(); + const { setErrorData } = useContext(alertContext); function handleInput({ target: { name, value }, }: inputHandlerEventType): void { setInputState((prev) => ({ ...prev, [name]: value })); } + + function signIn() { + const user: LoginType = { + username: username.trim(), + password: password.trim(), + }; + onLogin(user) + .then((user) => { + login(user.access_token, user.refresh_token); + getUser(); + navigate("/"); + }) + .catch((error) => { + setErrorData({ + title: "Error signing in", + list: [error["response"]["data"]["detail"]], + }); + }); + } + + function getUser() { + if (getAuthentication()) { + setTimeout(() => { + getLoggedUser() + .then((user) => { + const isSuperUser = user!.is_superuser; + setIsAdmin(isSuperUser); + setUserData(user); + }) + .catch((error) => {}); + }, 500); + } + } + return ( { @@ -28,7 +70,7 @@ export default function LoginPage(): JSX.Element { event.preventDefault(); return; } - + signIn(); const data = Object.fromEntries(new FormData(event.currentTarget)); event.preventDefault(); }} @@ -48,6 +90,7 @@ export default function LoginPage(): JSX.Element { { handleInput({ target: { name: "username", value } }); }} @@ -88,12 +131,14 @@ export default function LoginPage(): JSX.Element {
- +
- - diff --git a/src/frontend/src/pages/signUpPage/index.tsx b/src/frontend/src/pages/signUpPage/index.tsx index c81253f4d..d153dc522 100644 --- a/src/frontend/src/pages/signUpPage/index.tsx +++ b/src/frontend/src/pages/signUpPage/index.tsx @@ -1,11 +1,17 @@ import * as Form from "@radix-ui/react-form"; -import { FormEvent, useState } from "react"; -import { Link } from "react-router-dom"; +import { FormEvent, useContext, useEffect, useState } from "react"; +import { Link, useNavigate } from "react-router-dom"; import InputComponent from "../../components/inputComponent"; import { Button } from "../../components/ui/button"; import { Input } from "../../components/ui/input"; -import { CONTROL_INPUT_STATE } from "../../constants/constants"; import { + CONTROL_INPUT_STATE, + SIGN_UP_SUCCESS, +} from "../../constants/constants"; +import { alertContext } from "../../contexts/alertContext"; +import { addUser } from "../../controllers/API"; +import { + UserInputType, inputHandlerEventType, signUpInputStateType, } from "../../types/components"; @@ -14,13 +20,52 @@ export default function SignUp(): JSX.Element { const [inputState, setInputState] = useState(CONTROL_INPUT_STATE); + const [isDisabled, setDisableBtn] = useState(true); + const { password, cnfPassword, username } = inputState; + const { setErrorData, setSuccessData } = useContext(alertContext); + const navigate = useNavigate(); function handleInput({ target: { name, value }, }: inputHandlerEventType): void { setInputState((prev) => ({ ...prev, [name]: value })); } + + useEffect(() => { + if (password !== cnfPassword) return setDisableBtn(true); + if (password === "" || cnfPassword === "") return setDisableBtn(true); + if (username === "") return setDisableBtn(true); + setDisableBtn(false); + }, [password, cnfPassword, username, handleInput]); + + function handleSignup(): void { + const { username, password } = inputState; + const newUser: UserInputType = { + username: username.trim(), + password: password.trim(), + }; + addUser(newUser) + .then((user) => { + setSuccessData({ + title: SIGN_UP_SUCCESS, + }); + navigate("/login"); + }) + .catch((error) => { + const { + response: { + data: { detail }, + }, + } = error; + setErrorData({ + title: "Error signing up", + list: [detail], + }); + return; + }); + } + return ( ) => { @@ -48,6 +93,7 @@ export default function SignUp(): JSX.Element { { handleInput({ target: { name: "username", value } }); }} @@ -120,7 +166,16 @@ export default function SignUp(): JSX.Element {
- +
diff --git a/src/frontend/src/routes.tsx b/src/frontend/src/routes.tsx index 0d023e360..e9e6f1858 100644 --- a/src/frontend/src/routes.tsx +++ b/src/frontend/src/routes.tsx @@ -1,32 +1,125 @@ import { Route, Routes } from "react-router-dom"; +import { ProtectedAdminRoute } from "./components/authAdminGuard"; +import { ProtectedRoute } from "./components/authGuard"; +import { ProtectedLoginRoute } from "./components/authLoginGuard"; +import { CatchAllRoute } from "./components/catchAllRoutes"; import AdminPage from "./pages/AdminPage"; import LoginAdminPage from "./pages/AdminPage/LoginPage"; +import ApiKeysPage from "./pages/ApiKeysPage"; import CommunityPage from "./pages/CommunityPage"; import FlowPage from "./pages/FlowPage"; import HomePage from "./pages/MainPage"; +import ProfileSettingsPage from "./pages/ProfileSettingsPage"; import ViewPage from "./pages/ViewPage"; import DeleteAccountPage from "./pages/deleteAccountPage"; import LoginPage from "./pages/loginPage"; +import SignUp from "./pages/signUpPage"; const Router = () => { return ( - } /> - } /> + + + + } + /> + + + + } + /> - } /> - } /> + + + + } + /> + + + + } + /> - } /> + + + + } + /> - } /> - {/* } /> */} - } /> + + + + } + /> + + + + } + /> + + + + } + /> - } /> + + + + } + /> - }> + + + + } + /> + + + + } + > + + + + } + > ); diff --git a/src/frontend/src/style/applies.css b/src/frontend/src/style/applies.css index c10a85f02..203c7da69 100644 --- a/src/frontend/src/style/applies.css +++ b/src/frontend/src/style/applies.css @@ -126,6 +126,18 @@ @apply form-input block w-full truncate rounded-md border-border bg-background px-3 text-left shadow-sm placeholder:text-muted-foreground focus:border-ring focus:placeholder-transparent focus:ring-ring disabled:cursor-not-allowed disabled:opacity-50 sm:text-sm; } + .skeleton-card { + @apply bg-background h-48 p-4 border rounded-lg flex flex-col gap-6; + } + + .skeleton-card-wrapper { + @apply flex items-center space-x-4; + } + + .skeleton-card-text { + @apply flex flex-col gap-3; + } + /* The same as primary-input but no-truncate */ .textarea-primary { @apply form-input block w-full rounded-md border-border bg-background px-3 text-left shadow-sm placeholder:text-muted-foreground focus:border-ring focus:placeholder-transparent focus:ring-ring disabled:cursor-not-allowed disabled:opacity-50 sm:text-sm; @@ -200,6 +212,10 @@ @apply flex-max-width h-full flex-col overflow-auto bg-muted px-16; } + .admin-page-panel { + @apply flex-max-width h-full flex-col overflow-auto bg-muted px-16; + } + .main-page-nav-arrangement { @apply flex-max-width justify-between px-6 py-12 pb-2; } @@ -216,6 +232,10 @@ @apply flex w-[60%] px-6 pb-14 text-muted-foreground; } + .admin-page-description-text { + @apply flex w-[80%] px-6 pb-8 text-muted-foreground; + } + .main-page-flows-display { @apply grid w-full gap-4 p-4 md:grid-cols-2 lg:grid-cols-4; } @@ -244,10 +264,10 @@ @apply grid w-full gap-4 p-4 md:grid-cols-2 lg:grid-cols-4; } .generic-node-div { - @apply relative flex w-96 flex-col justify-center rounded-lg bg-background; + @apply relative flex flex-col justify-center bg-background; } .generic-node-div-title { - @apply flex w-full items-center justify-between gap-8 rounded-t-lg border-b bg-muted p-4; + @apply flex w-full items-center gap-8 border-b bg-muted p-4; } .generic-node-title-arrangement { @apply flex-max-width items-center truncate; @@ -291,7 +311,7 @@ @apply hover:text-accent-foreground hover:transition-all; } .generic-node-desc { - @apply h-full w-full py-5 text-foreground; + @apply h-full w-full text-foreground; } .generic-node-desc-text { @apply w-full px-5 pb-3 text-sm text-muted-foreground; @@ -488,10 +508,10 @@ @apply flex-max-width h-12 items-center justify-between border-border bg-muted; } .header-start-display { - @apply flex w-96 items-center justify-start gap-2; + @apply flex w-[30%] items-center justify-start gap-2; } .header-end-division { - @apply flex w-96 justify-end px-2; + @apply flex w-[30%] justify-end px-2; } .header-end-display { @apply ml-auto mr-2 flex items-center gap-5; @@ -1037,4 +1057,8 @@ .label-invalid{ @apply text-destructive } + + .input-invalid{ + @apply border-destructive focus:ring-destructive focus:border-destructive + } } diff --git a/src/frontend/src/style/classes.css b/src/frontend/src/style/classes.css index b6662e7bf..1e274135d 100644 --- a/src/frontend/src/style/classes.css +++ b/src/frontend/src/style/classes.css @@ -29,10 +29,42 @@ pre { animation: slideUp 300ms ease-out; } - .gradient-end { animation: gradient-motion-end 3s infinite forwards; } .gradient-start { animation: gradient-motion-start 4s infinite forwards; } + +input:-webkit-autofill, +input:-webkit-autofill:hover, +input:-webkit-autofill:focus, +textarea:-webkit-autofill, +textarea:-webkit-autofill:hover, +textarea:-webkit-autofill:focus, +select:-webkit-autofill, +select:-webkit-autofill:hover, +select:-webkit-autofill:focus { + -webkit-text-fill-color: black; + -webkit-box-shadow: 0 0 0px 1000px #fff6d0 inset; + box-shadow: 0 0 0px 1000px #fff6d0 inset; + color: black; +} +.ace_scrollbar::-webkit-scrollbar { + height: 8px; + width: 8px; +} + +.ace_scrollbar::-webkit-scrollbar-track { + background-color: #f1f1f1; +} + +.ace_scrollbar::-webkit-scrollbar-thumb { + background-color: #ccc; + border-radius: 999px; +} + +.ace_scrollbar::-webkit-scrollbar-thumb:hover { + background-color: #bbb; + border-radius: 999px; +} diff --git a/src/frontend/src/types/api/index.ts b/src/frontend/src/types/api/index.ts index 5a5701b2d..a062274ef 100644 --- a/src/frontend/src/types/api/index.ts +++ b/src/frontend/src/types/api/index.ts @@ -62,3 +62,41 @@ export type UploadFileTypeAPI = { file_path: string; flowId: string; }; + +export type LoginType = { + grant_type?: string; + username: string; + password: string; + scrope?: string; + client_id?: string; + client_secret?: string; +}; + +export type LoginAuthType = { + access_token: string; + refresh_token: string; + token_type?: string; +}; + +export type changeUser = { + username?: string; + is_active?: boolean; + is_superuser?: boolean; + password?: string; + profile_image?: string; +}; + +export type resetPasswordType = { + password?: string; + profile_image?: string; +}; + +export type Users = { + id: string; + username: string; + is_active: boolean; + is_superuser: boolean; + profile_image: string; + create_at: Date; + updated_at: Date; +}; diff --git a/src/frontend/src/types/components/index.ts b/src/frontend/src/types/components/index.ts index d4adccb37..a3801d10e 100644 --- a/src/frontend/src/types/components/index.ts +++ b/src/frontend/src/types/components/index.ts @@ -46,6 +46,7 @@ export type ParameterComponentType = { dataContext?: typesContextType; optionalHandle?: Array | null; info?: string; + showNode?: boolean; }; export type InputListComponentType = { value: string[]; @@ -54,6 +55,21 @@ export type InputListComponentType = { editNode?: boolean; }; +export type KeyPairListComponentType = { + value: any; + onChange: (value: Object[]) => void; + disabled: boolean; + editNode?: boolean; + duplicateKey?: boolean; +}; + +export type DictComponentType = { + value: any; + onChange: (value) => void; + disabled: boolean; + editNode?: boolean; +}; + export type TextAreaComponentType = { field_name?: string; nodeClass?: APIClassType; @@ -64,6 +80,16 @@ export type TextAreaComponentType = { editNode?: boolean; }; +export type PromptAreaComponentType = { + field_name?: string; + nodeClass?: APIClassType; + setNodeClass?: (value: APIClassType) => void; + disabled: boolean; + onChange: (value: string[] | string) => void; + value: string; + editNode?: boolean; +}; + export type CodeAreaComponentType = { disabled: boolean; onChange: (value: string[] | string) => void; @@ -173,6 +199,7 @@ export type IconComponentProps = { name: string; className?: string; iconColor?: string; + onClick?: () => void; }; export type InputProps = { @@ -218,7 +245,7 @@ export type signUpInputStateType = { export type inputHandlerEventType = { target: { - value: string; + value: string | boolean; name: string; }; }; @@ -233,6 +260,7 @@ export type PaginatorComponentType = { export type ConfirmationModalType = { title: string; titleHeader: string; + asChild?: boolean; modalContent: string; modalContentTitle: string; cancelText: string; @@ -253,6 +281,7 @@ export type UserManagementType = { icon: string; data?: any; index?: number; + asChild?: boolean; onConfirm: (index, data) => void; }; @@ -261,6 +290,35 @@ export type loginInputStateType = { password: string; }; +export type patchUserInputStateType = { + password: string; + cnfPassword: string; + gradient: string; +}; + +export type UserInputType = { + username: string; + password: string; + is_active?: boolean; + is_superuser?: boolean; + id?: string; + create_at?: string; + updated_at?: string; +}; + +export type ApiKeyType = { + title: string; + cancelText: string; + confirmationText: string; + children: ReactElement; + icon: string; + data?: any; + onCloseModal: () => void; +}; + +export type ApiKeyInputType = { + apikeyname: string; +}; export type groupedObjType = { family: string; type: string; @@ -366,6 +424,9 @@ export type nodeToolbarPropsType = { data: NodeDataType; deleteNode: (idx: string) => void; setData: (newState: NodeDataType) => void; + setShowNode: (boolean: any) => void; + numberOfHandles: boolean[] | []; + showNode: boolean; }; export type parsedDataType = { @@ -488,7 +549,7 @@ export type codeTabsPropsType = { ) => string; buildTweakObject?: ( tw: string, - changes: string | string[] | boolean | number, + changes: string | string[] | boolean | number | Object[] | Object, template: TemplateVariableType ) => string | void; }; @@ -508,3 +569,22 @@ export type validationStatusType = { progress: number; valid: boolean; }; + +export type ApiKey = { + id: string; + api_key: string; + name: string; + created_at: string; + last_used_at: string; + total_uses: number; +}; +export type fetchErrorComponentType = { + message: string; + description: string; +}; + +export type dropdownButtonPropsType = { + firstButtonName: string; + onFirstBtnClick: () => void; + options: Array<{ name: string; onBtnClick: () => void }>; +}; diff --git a/src/frontend/src/types/contexts/auth.ts b/src/frontend/src/types/contexts/auth.ts index af037ecae..4946efa44 100644 --- a/src/frontend/src/types/contexts/auth.ts +++ b/src/frontend/src/types/contexts/auth.ts @@ -1,16 +1,17 @@ +import { Users } from "../api"; + export type AuthContextType = { + isAdmin: boolean; + setIsAdmin: (isAdmin: boolean) => void; isAuthenticated: boolean; accessToken: string | null; + refreshToken: string | null; login: (accessToken: string, refreshToken: string) => void; logout: () => void; - refreshAccessToken: (refreshToken: string) => Promise; - userData: userData | null; - setUserData: (userData: userData | null) => void; -}; - -export type userData = { - id: string; - name: string; - email: string; - role: string; + userData: Users | null; + setUserData: (userData: Users | null) => void; + getAuthentication: () => boolean; + authenticationErrorCount: number; + autoLogin: boolean; + setAutoLogin: (autoLogin: boolean) => void; }; diff --git a/src/frontend/src/types/tabs/index.ts b/src/frontend/src/types/tabs/index.ts index a87cdb35e..a06b514a4 100644 --- a/src/frontend/src/types/tabs/index.ts +++ b/src/frontend/src/types/tabs/index.ts @@ -2,9 +2,10 @@ import { tweakType } from "../components"; import { FlowType } from "../flow"; export type TabsContextType = { - saveFlow: (flow: FlowType) => Promise; + saveFlow: (flow: FlowType, silent?: boolean) => Promise; save: () => void; tabId: string; + isLoading: boolean; setTabId: (index: string) => void; flows: Array; removeFlow: (id: string) => void; @@ -23,7 +24,7 @@ export type TabsContextType = { uploadFlows: () => void; isBuilt: boolean; setIsBuilt: (state: boolean) => void; - uploadFlow: (newFlow?: boolean, file?: File) => void; + uploadFlow: (newFlow?: boolean, file?: File) => Promise; hardReset: () => void; getNodeId: (nodeType: string) => string; tabsState: TabsState; diff --git a/src/frontend/src/types/typesContext/index.ts b/src/frontend/src/types/typesContext/index.ts index 9e57822b9..ff877b9d4 100644 --- a/src/frontend/src/types/typesContext/index.ts +++ b/src/frontend/src/types/typesContext/index.ts @@ -16,6 +16,10 @@ export type typesContextType = { setTemplates: (newState: {}) => void; data: APIDataType; setData: (newState: {}) => void; + fetchError: boolean; + setFetchError: (newState: boolean) => void; + setFilterEdge: (newState) => void; + getFilterEdge: any[]; }; export type alertContextType = { @@ -44,6 +48,10 @@ export type alertContextType = { export type darkContextType = { dark: {}; setDark: (newState: {}) => void; + stars: number; + setStars: (stars: number) => void; + gradientIndex: number; + setGradientIndex: (index: number) => void; }; export type locationContextType = { diff --git a/src/frontend/src/types/utils/reactflowUtils.ts b/src/frontend/src/types/utils/reactflowUtils.ts index ecbbda4e4..9944fdba4 100644 --- a/src/frontend/src/types/utils/reactflowUtils.ts +++ b/src/frontend/src/types/utils/reactflowUtils.ts @@ -1,4 +1,4 @@ -import { Edge } from "reactflow"; +import { Edge, Node } from "reactflow"; import { NodeType } from "../flow"; export type cleanEdgesType = { @@ -8,3 +8,8 @@ export type cleanEdgesType = { }; updateEdge: (edge: Edge[]) => void; }; + +export type unselectAllNodesType = { + updateNodes: (nodes: Node[]) => void; + data: Node[] | null; +}; diff --git a/src/frontend/src/utils/reactflowUtils.ts b/src/frontend/src/utils/reactflowUtils.ts index 13c3973f1..5cb549471 100644 --- a/src/frontend/src/utils/reactflowUtils.ts +++ b/src/frontend/src/utils/reactflowUtils.ts @@ -2,13 +2,17 @@ import _ from "lodash"; import { Connection, Edge, + Node, ReactFlowInstance, ReactFlowJsonObject, } from "reactflow"; import { specialCharsRegex } from "../constants/constants"; import { APITemplateType } from "../types/api"; import { FlowType, NodeType } from "../types/flow"; -import { cleanEdgesType } from "../types/utils/reactflowUtils"; +import { + cleanEdgesType, + unselectAllNodesType, +} from "../types/utils/reactflowUtils"; import { toNormalCase } from "./utils"; export function cleanEdges({ @@ -55,6 +59,14 @@ export function cleanEdges({ updateEdge(newEdges); } +export function unselectAllNodes({ updateNodes, data }: unselectAllNodesType) { + let newNodes = _.cloneDeep(data); + newNodes!.forEach((node: Node) => { + node.selected = false; + }); + updateNodes(newNodes!); +} + export function isValidConnection( { source, target, sourceHandle, targetHandle }: Connection, reactFlowInstance: ReactFlowInstance @@ -177,10 +189,7 @@ export function buildTweaks(flow: FlowType) { }, {}); } -export function validateNode( - node: NodeType, - reactFlowInstance: ReactFlowInstance -): Array { +export function validateNode(node: NodeType, edges: Edge[]): Array { if (!node.data?.node?.template || !Object.keys(node.data.node.template)) { return [ "We've noticed a potential issue with a node in the flow. Please review it and, if necessary, submit a bug report with your exported flow file. Thank you for your help!", @@ -192,41 +201,56 @@ export function validateNode( node: { template }, } = node.data; - return Object.keys(template).reduce( - (errors: Array, t) => - errors.concat( - template[t].required && - template[t].show && - (template[t].value === undefined || - template[t].value === null || - template[t].value === "") && - !reactFlowInstance - .getEdges() - .some( - (edge) => - edge.targetHandle?.split("|")[1] === t && - edge.targetHandle.split("|")[2] === node.id - ) - ? [ - `${type} is missing ${ - template.display_name || toNormalCase(template[t].name) - }.`, - ] - : [] - ), - [] as string[] - ); + return Object.keys(template).reduce((errors: Array, t) => { + if ( + template[t].required && + template[t].show && + (template[t].value === undefined || + template[t].value === null || + template[t].value === "") && + !edges.some( + (edge) => + edge.targetHandle?.split("|")[1] === t && + edge.targetHandle.split("|")[2] === node.id + ) + ) { + errors.push( + `${type} is missing ${ + template.display_name || toNormalCase(template[t].name) + }.` + ); + } else if ( + template[t].type === "dict" && + template[t].required && + template[t].show && + (template[t].value !== undefined || + template[t].value !== null || + template[t].value !== "") + ) { + if (hasDuplicateKeys(template[t].value)) + errors.push( + `${type} (${ + template.display_name || template[t].name + }) contains duplicate keys with the same values.` + ); + if (hasEmptyKey(template[t].value)) + errors.push( + `${type} (${ + template.display_name || template[t].name + }) field must not be empty.` + ); + } + return errors; + }, [] as string[]); } -export function validateNodes(reactFlowInstance: ReactFlowInstance) { - if (reactFlowInstance.getNodes().length === 0) { +export function validateNodes(nodes: Node[], edges: Edge[]) { + if (nodes.length === 0) { return [ "No nodes found in the flow. Please add at least one node to the flow.", ]; } - return reactFlowInstance - .getNodes() - .flatMap((n: NodeType) => validateNode(n, reactFlowInstance)); + return nodes.flatMap((n: NodeType) => validateNode(n, edges)); } export function addVersionToDuplicates(flow: FlowType, flows: FlowType[]) { @@ -243,11 +267,12 @@ export function addVersionToDuplicates(flow: FlowType, flows: FlowType[]) { } export function handleKeyDown( - e: React.KeyboardEvent, + e: + | React.KeyboardEvent + | React.KeyboardEvent, inputValue: string | string[] | null, block: string ) { - console.log(e, inputValue, block); //condition to fix bug control+backspace on Windows/Linux if ( (typeof inputValue === "string" && @@ -278,3 +303,77 @@ export function getConnectedNodes( const targetId = edge.target; return nodes.filter((node) => node.id === targetId || node.id === sourceId); } + +export function convertObjToArray(singleObject: object | string) { + if (typeof singleObject === "string") { + singleObject = JSON.parse(singleObject); + } + if (Array.isArray(singleObject)) return singleObject; + + let arrConverted: any[] = []; + if (typeof singleObject === "object") { + for (const key in singleObject) { + if (Object.prototype.hasOwnProperty.call(singleObject, key)) { + const newObj = {}; + newObj[key] = singleObject[key]; + arrConverted.push(newObj); + } + } + } + return arrConverted; +} + +export function convertArrayToObj(arrayOfObjects) { + if (!Array.isArray(arrayOfObjects)) return arrayOfObjects; + + let objConverted = {}; + for (const obj of arrayOfObjects) { + for (const key in obj) { + if (obj.hasOwnProperty(key)) { + objConverted[key] = obj[key]; + } + } + } + return objConverted; +} + +export function hasDuplicateKeys(array) { + const keys = {}; + for (const obj of array) { + for (const key in obj) { + if (keys[key]) { + return true; + } + keys[key] = true; + } + } + return false; +} + +export function hasEmptyKey(objArray) { + for (const obj of objArray) { + for (const key in obj) { + if (obj.hasOwnProperty(key) && key === "") { + return true; // Found an empty key + } + } + } + return false; // No empty keys found +} + +export function convertValuesToNumbers(arr) { + return arr.map((obj) => { + const newObj = {}; + for (const key in obj) { + if (obj.hasOwnProperty(key)) { + let value = obj[key]; + if (/\s/g.test(value)) { + value = value.trim(); + } + newObj[key] = + value === "" || isNaN(value) ? value.toString() : Number(value); + } + } + return newObj; + }); +} diff --git a/src/frontend/src/utils/styleUtils.ts b/src/frontend/src/utils/styleUtils.ts index ffeb4c650..14bafb78b 100644 --- a/src/frontend/src/utils/styleUtils.ts +++ b/src/frontend/src/utils/styleUtils.ts @@ -1,10 +1,13 @@ import { + ArrowUpToLine, Bell, + BookMarked, Check, CheckCircle2, ChevronDown, ChevronLeft, ChevronRight, + ChevronUp, ChevronsLeft, ChevronsRight, ChevronsUpDown, @@ -19,6 +22,8 @@ import { Edit, Eraser, ExternalLink, + Eye, + EyeOff, File, FileDown, FileSearch, @@ -33,16 +38,20 @@ import { HelpCircle, Home, Info, + Key, Laptop2, Layers, Lightbulb, Link, Lock, LucideSend, + Maximize2, Menu, MessageCircle, MessageSquare, MessagesSquare, + Minimize2, + Minus, MoonIcon, MoreHorizontal, Paperclip, @@ -54,12 +63,17 @@ import { Scissors, Search, Settings2, + Shield, Sparkles, + Square, SunIcon, TerminalSquare, Trash2, Undo, + Unplug, Upload, + User, + UserCog2, UserMinus2, UserPlus2, Users2, @@ -80,7 +94,7 @@ import { EvernoteIcon } from "../icons/Evernote"; import { FBIcon } from "../icons/FacebookMessenger"; import { GitBookIcon } from "../icons/GitBook"; import { GoogleIcon } from "../icons/Google"; -import GradientSparkles from "../icons/GradientSparkles"; +import { GradientSparkles } from "../icons/GradientSparkles"; import { HuggingFaceIcon } from "../icons/HuggingFace"; import { IFixIcon } from "../icons/IFixIt"; import { MetaIcon } from "../icons/Meta"; @@ -180,6 +194,7 @@ export const nodeNames: { [char: string]: string } = { }; export const nodeIconsLucide: iconsType = { + ArrowUpToLine: ArrowUpToLine, Chroma: ChromaIcon, AirbyteJSONLoader: AirbyteIcon, Anthropic: AnthropicIcon, @@ -214,6 +229,7 @@ export const nodeIconsLucide: iconsType = { ChatVertexAI: VertexAIIcon, VertexAIEmbeddings: VertexAIIcon, agents: Rocket, + User, WikipediaAPIWrapper: SvgWikipedia, chains: Link, memories: Cpu, @@ -254,6 +270,7 @@ export const nodeIconsLucide: iconsType = { Bell, ChevronLeft, ChevronDown, + Shield, Plus, Redo, Settings2, @@ -290,4 +307,15 @@ export const nodeIconsLucide: iconsType = { ChevronsLeft, FaGithub, FaApple, + EyeOff, + Eye, + UserCog2, + Key, + Unplug, + BookMarked, + ChevronUp, + Minus, + Square, + Minimize2, + Maximize2, }; diff --git a/src/frontend/src/utils/utils.ts b/src/frontend/src/utils/utils.ts index 4d9c73f3d..f06a8f63c 100644 --- a/src/frontend/src/utils/utils.ts +++ b/src/frontend/src/utils/utils.ts @@ -299,6 +299,7 @@ export function getChatInputField(flow: FlowType, tabsState?: TabsState) { */ export function getPythonApiCode( flow: FlowType, + isAuth: boolean, tweak?: any[], tabsState?: TabsState ): string { @@ -325,7 +326,9 @@ TWEAKS = ${ : JSON.stringify(tweaks, null, 2) } -def run_flow(inputs: dict, flow_id: str, tweaks: Optional[dict] = None) -> dict: +def run_flow(inputs: dict, flow_id: str, tweaks: Optional[dict] = None${ + !isAuth ? `, api_key: Optional[str] = None` : "" + }) -> dict: """ Run a flow with a given message and optional tweaks. @@ -337,16 +340,20 @@ def run_flow(inputs: dict, flow_id: str, tweaks: Optional[dict] = None) -> dict: api_url = f"{BASE_API_URL}/{flow_id}" payload = {"inputs": inputs} - + headers = None if tweaks: payload["tweaks"] = tweaks - - response = requests.post(api_url, json=payload) + if api_key: + headers = {"x-api-key": api_key} + response = requests.post(api_url, json=payload, headers=headers) return response.json() # Setup any tweaks you want to apply to the flow inputs = ${inputs} -print(run_flow(inputs, flow_id=FLOW_ID, tweaks=TWEAKS))`; +${!isAuth ? `api_key = ""` : ""} +print(run_flow(inputs, flow_id=FLOW_ID, tweaks=TWEAKS${ + !isAuth ? `, api_key=api_key` : "" + }))`; } /** @@ -356,6 +363,7 @@ print(run_flow(inputs, flow_id=FLOW_ID, tweaks=TWEAKS))`; */ export function getCurlCode( flow: FlowType, + isAuth: boolean, tweak?: any[], tabsState?: TabsState ): string { @@ -367,7 +375,9 @@ export function getCurlCode( ${window.location.protocol}//${ window.location.host }/api/v1/process/${flowId} \\ - -H 'Content-Type: application/json' \\ + -H 'Content-Type: application/json'\\${ + !isAuth ? `\n -H 'x-api-key: '\\` : "" + } -d '{"inputs": ${inputs}, "tweaks": ${ tweak && tweak.length > 0 ? buildTweakObject(tweak) @@ -405,7 +415,11 @@ flow(inputs)`; * @param {string} flow - The current flow. * @returns {string} - The widget code */ -export function getWidgetCode(flow: FlowType, tabsState?: TabsState): string { +export function getWidgetCode( + flow: FlowType, + isAuth: boolean, + tabsState?: TabsState +): string { const flowId = flow.id; const flowName = flow.name; const inputs = buildInputs(tabsState!, flow.id); @@ -425,7 +439,13 @@ chat_input_field: Input key that you want the chat to send the user message with chat_input_field="${chat_input_field}" ` : "" - }host_url="http://localhost:7860" + }host_url="http://localhost:7860"${ + !isAuth + ? ` + api_key="..."` + : "" + } + >`; } diff --git a/src/frontend/start-nginx.sh b/src/frontend/start-nginx.sh new file mode 100644 index 000000000..dd20f5305 --- /dev/null +++ b/src/frontend/start-nginx.sh @@ -0,0 +1,8 @@ +#!/bin/sh + +# Replace the placeholder with the actual value +sed -i "s|__BACKEND_URL__|$BACKEND_URL|g" /etc/nginx/conf.d/default.conf + + +# Start nginx +exec nginx -g 'daemon off;' diff --git a/src/frontend/tailwind.config.js b/src/frontend/tailwind.config.js index 52330ae92..5130f3fcf 100644 --- a/src/frontend/tailwind.config.js +++ b/src/frontend/tailwind.config.js @@ -201,6 +201,12 @@ module.exports = { ".dark .theme-attribution .react-flow__attribution a": { color: "black", }, + ".text-align-last-left": { + "text-align-last": "left", + }, + ".text-align-last-right": { + "text-align-last": "right", + }, }); }), require("@tailwindcss/typography"), diff --git a/src/frontend/tests/auto_login.spec.ts b/src/frontend/tests/auto_login.spec.ts new file mode 100644 index 000000000..ba14263bc --- /dev/null +++ b/src/frontend/tests/auto_login.spec.ts @@ -0,0 +1,32 @@ +import { expect, test } from "@playwright/test"; + +test.describe("Auto_login tests", () => { + test("auto_login sign in", async ({ page }) => { + await page.routeFromHAR("harFiles/langflow.har", { + url: "**/api/v1/**", + update: false, + }); + await page.goto("http:localhost:3000/"); + await page.getByRole("button", { name: "Community Examples" }).click(); + await page.waitForSelector(".community-pages-flows-panel"); + expect( + await page + .locator(".community-pages-flows-panel") + .evaluate((el) => el.children) + ).toBeTruthy(); + }); + test("auto_login block_admin", async ({ page }) => { + await page.routeFromHAR("harFiles/langflow.har", { + url: "**/api/v1/**", + update: false, + }); + await page.goto("http:localhost:3000/"); + await page.getByRole("button", { name: "Community Examples" }).click(); + await page.goto("http:localhost:3000/login"); + await page.getByRole("button", { name: "Community Examples" }).click(); + await page.goto("http:localhost:3000/admin"); + await page.getByRole("button", { name: "Community Examples" }).click(); + await page.goto("http:localhost:3000/admin/login"); + await page.getByRole("button", { name: "Community Examples" }).click(); + }); +}); diff --git a/src/frontend/tests/login.spec.ts b/src/frontend/tests/login.spec.ts new file mode 100644 index 000000000..e3980912a --- /dev/null +++ b/src/frontend/tests/login.spec.ts @@ -0,0 +1,128 @@ +import { expect, test } from "@playwright/test"; + +test.describe("Login Tests", () => { + test("Login_Success", async ({ page }) => { + await page.route("**/api/v1/login", async (route) => { + const json = { + access_token: + "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJhMWNlM2FkOS1iZTE2LTRiNjgtOGRhYi1hYjA4YTVjMmZjZTkiLCJleHAiOjE2OTUyNTIwNTh9.MBYFwMhTcZnsW_L7p4qavUhSDylCllJQWUCJdU1wX8o", + refresh_token: + "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJhMWNlM2FkOS1iZTE2LTRiNjgtOGRhYi1hYjA4YTVjMmZjZTkiLCJ0eXBlIjoicmYiLCJleHAiOjE2OTUyNTI2NTh9.a4wL9-XK_zyTyrXduBFgCsODFXrqiByVr5HOeiCbiQA", + token_type: "bearer", + }; + await route.fulfill({ json }); + }); + + await page.goto("http://localhost:3000/"); + await page.waitForURL("http://localhost:3000/login"); + await page.waitForURL("http://localhost:3000/login", { timeout: 100 }); + await page.getByPlaceholder("Username").click(); + await page.getByPlaceholder("Username").fill("test"); + await page.getByPlaceholder("Password").click(); + await page.getByPlaceholder("Password").fill("test"); + await page.getByRole("button", { name: "Sign in" }).click(); + await page.getByRole("button", { name: "Community Examples" }).click(); + await page.waitForSelector(".community-pages-flows-panel"); + expect( + await page + .locator(".community-pages-flows-panel") + .evaluate((el) => el.children) + ).toBeTruthy(); + }); + + test("Login Error", async ({ page }) => { + await page.route("**/api/v1/login", async (route) => { + const json = { detail: "Incorrect username or password" }; + await route.fulfill({ json, status: 401 }); + }); + + await page.goto("http://localhost:3000/"); + await page.waitForURL("http://localhost:3000/login"); + await page.waitForURL("http://localhost:3000/login", { timeout: 100 }); + await page.getByPlaceholder("Username").click(); + await page.getByPlaceholder("Username").fill("test"); + await page.getByPlaceholder("Password").click(); + await page.getByPlaceholder("Password").fill("test5"); + await page.getByRole("button", { name: "Sign in" }).click(); + await page.getByRole("heading", { name: "Error signing in" }).click(); + }); + + test("Login create account wrong form", async ({ page }) => { + const fullfillForm = async (username, password, confirmPassword) => { + await page.getByPlaceholder("Username").click(); + await page.getByPlaceholder("Username").fill(username); + await page.getByPlaceholder("Password", { exact: true }).click(); + await page.getByPlaceholder("Password", { exact: true }).fill(password); + await page.getByPlaceholder("Confirm your password").click(); + await page + .getByPlaceholder("Confirm your password") + .fill(confirmPassword); + }; + + await page.goto("http://localhost:3000/"); + await page.waitForURL("http://localhost:3000/login"); + await page.waitForURL("http://localhost:3000/login", { timeout: 100 }); + await page + .getByRole("button", { name: "Don't have an account? Sign Up" }) + .click(); + await page.getByText("Sign up to Langflow").click(); + await page.goto("http://localhost:3000/signup"); + await page.getByText("Sign up to Langflow").click(); + + await fullfillForm("name", "vazz", "vazz5"); + expect( + await page.getByRole("button", { name: "Sign up" }).isDisabled() + ).toBeTruthy(); + await fullfillForm("", "vazz", "vazz"); + expect( + await page.getByRole("button", { name: "Sign up" }).isDisabled() + ).toBeTruthy(); + await fullfillForm("name", "", ""); + expect( + await page.getByRole("button", { name: "Sign up" }).isDisabled() + ).toBeTruthy(); + await fullfillForm("", "", ""); + expect( + await page.getByRole("button", { name: "Sign up" }).isDisabled() + ).toBeTruthy(); + }); + test("Login create account success", async ({ page }) => { + await page.route("**/api/v1/users/", async (route) => { + const json = { + id: "e9ac1bdc-429b-475d-ac03-d26f9a2a3210", + username: "teste", + profile_image: null, + is_active: false, + is_superuser: false, + create_at: "2023-09-21T01:45:51.873303", + updated_at: "2023-09-21T01:45:51.873305", + last_login_at: null, + }; + await route.fulfill({ json, status: 201 }); + }); + const submitForm = async (username, password, confirmPassword) => { + await page.getByPlaceholder("Username").click(); + await page.getByPlaceholder("Username").fill(username); + await page.getByPlaceholder("Password", { exact: true }).click(); + await page.getByPlaceholder("Password", { exact: true }).fill(password); + await page.getByPlaceholder("Confirm your password").click(); + await page + .getByPlaceholder("Confirm your password") + .fill(confirmPassword); + }; + + await page.goto("http://localhost:3000/"); + await page.waitForURL("http://localhost:3000/login"); + await page.waitForURL("http://localhost:3000/login", { timeout: 100 }); + await page + .getByRole("button", { name: "Don't have an account? Sign Up" }) + .click(); + await page.getByText("Sign up to Langflow").click(); + await page.goto("http://localhost:3000/signup"); + await page.getByText("Sign up to Langflow").click(); + await submitForm("teste", "pass", "pass"); + await page.getByRole("button", { name: "Sign up" }).click(); + await page.waitForURL("http://localhost:3000/login", { timeout: 1000 }); + await page.getByText("Account created! Await admin activation.").click(); + }); +}); diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/conftest.py b/tests/conftest.py index 182e59e89..468b6cb11 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,10 +1,16 @@ from contextlib import contextmanager import json +from contextlib import suppress from pathlib import Path from typing import AsyncGenerator, TYPE_CHECKING -from langflow.api.v1.flows import get_session from langflow.graph.graph.base import Graph +from langflow.services.auth.utils import get_password_hash +from langflow.services.database.models.flow.flow import Flow, FlowCreate +from langflow.services.database.models.user.user import User, UserCreate +import orjson +from langflow.services.database.utils import session_getter +from langflow.services.getters import get_db_service import pytest from fastapi.testclient import TestClient from httpx import AsyncClient @@ -12,8 +18,11 @@ from sqlmodel import SQLModel, Session, create_engine from sqlmodel.pool import StaticPool from typer.testing import CliRunner +# we need to import tmpdir +import tempfile + if TYPE_CHECKING: - from langflow.services.database.manager import DatabaseManager + from langflow.services.database.manager import DatabaseService def pytest_configure(): @@ -26,7 +35,12 @@ def pytest_configure(): pytest.OPENAPI_EXAMPLE_PATH = ( Path(__file__).parent.absolute() / "data" / "Openapi.json" ) - + pytest.BASIC_CHAT_WITH_PROMPT_AND_HISTORY = ( + Path(__file__).parent.absolute() / "data" / "BasicChatwithPromptandHistory.json" + ) + pytest.VECTOR_STORE_PATH = ( + Path(__file__).parent.absolute() / "data" / "Vector_store.json" + ) pytest.CODE_WITH_SYNTAX_ERROR = """ def get_text(): retun "Hello World" @@ -42,15 +56,62 @@ async def async_client() -> AsyncGenerator: yield client -# Create client fixture for FastAPI -@pytest.fixture(scope="module") -def client(): +@pytest.fixture(name="session") +def session_fixture(): + engine = create_engine( + "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool + ) + SQLModel.metadata.create_all(engine) + with Session(engine) as session: + yield session + + +class Config: + broker_url = "redis://localhost:6379/0" + result_backend = "redis://localhost:6379/0" + + +@pytest.fixture(name="distributed_env") +def setup_env(monkeypatch): + monkeypatch.setenv("LANGFLOW_CACHE_TYPE", "redis") + monkeypatch.setenv("LANGFLOW_REDIS_HOST", "result_backend") + monkeypatch.setenv("LANGFLOW_REDIS_PORT", "6379") + monkeypatch.setenv("LANGFLOW_REDIS_DB", "0") + monkeypatch.setenv("LANGFLOW_REDIS_EXPIRE", "3600") + monkeypatch.setenv("LANGFLOW_REDIS_PASSWORD", "") + monkeypatch.setenv("FLOWER_UNAUTHENTICATED_API", "True") + monkeypatch.setenv("BROKER_URL", "redis://result_backend:6379/0") + monkeypatch.setenv("RESULT_BACKEND", "redis://result_backend:6379/0") + monkeypatch.setenv("C_FORCE_ROOT", "true") + + +@pytest.fixture(name="distributed_client") +def distributed_client_fixture(session: Session, monkeypatch, distributed_env): + # Here we load the .env from ../deploy/.env + from langflow.core import celery_app + + db_dir = tempfile.mkdtemp() + db_path = Path(db_dir) / "test.db" + monkeypatch.setenv("LANGFLOW_DATABASE_URL", f"sqlite:///{db_path}") + monkeypatch.setenv("LANGFLOW_AUTO_LOGIN", "false") + # monkeypatch langflow.services.task.manager.USE_CELERY to True + # monkeypatch.setattr(manager, "USE_CELERY", True) + monkeypatch.setattr( + celery_app, "celery_app", celery_app.make_celery("langflow", Config) + ) + + # def get_session_override(): + # return session + from langflow.main import create_app app = create_app() + # app.dependency_overrides[get_session] = get_session_override with TestClient(app) as client: yield client + app.dependency_overrides.clear() + monkeypatch.undo() def get_graph(_type="basic"): @@ -98,55 +159,53 @@ def json_flow(): return f.read() -@pytest.fixture(name="session") -def session_fixture(): - engine = create_engine( - "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool - ) - SQLModel.metadata.create_all(engine) - with Session(engine) as session: - yield session +@pytest.fixture +def json_flow_with_prompt_and_history(): + with open(pytest.BASIC_CHAT_WITH_PROMPT_AND_HISTORY, "r") as f: + return f.read() +<<<<<<< HEAD @pytest.fixture(name="client", scope="function", autouse=True) def client_fixture(session: Session): def get_session_override(): return session +======= +@pytest.fixture +def json_vector_store(): + with open(pytest.VECTOR_STORE_PATH, "r") as f: + return f.read() + + +@pytest.fixture(name="client", autouse=True) +def client_fixture(session: Session, monkeypatch): + # Set the database url to a test database + db_dir = tempfile.mkdtemp() + db_path = Path(db_dir) / "test.db" + monkeypatch.setenv("LANGFLOW_DATABASE_URL", f"sqlite:///{db_path}") + monkeypatch.setenv("LANGFLOW_AUTO_LOGIN", "false") +>>>>>>> origin/dev from langflow.main import create_app app = create_app() - app.dependency_overrides[get_session] = get_session_override + # app.dependency_overrides[get_session] = get_session_override with TestClient(app) as client: yield client - app.dependency_overrides.clear() - - -# @contextmanager -# def session_getter(): -# try: -# session = Session(engine) -# yield session -# except Exception as e: -# print("Session rollback because of exception:", e) -# session.rollback() -# raise -# finally: -# session.close() + # app.dependency_overrides.clear() + monkeypatch.undo() + # clear the temp db + with suppress(FileNotFoundError): + db_path.unlink() # create a fixture for session_getter above @pytest.fixture(name="session_getter") def session_getter_fixture(client): - engine = create_engine( - "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool - ) - SQLModel.metadata.create_all(engine) - @contextmanager - def blank_session_getter(db_manager: "DatabaseManager"): - with Session(db_manager.engine) as session: + def blank_session_getter(db_service: "DatabaseService"): + with Session(db_service.engine) as session: yield session yield blank_session_getter @@ -155,3 +214,90 @@ def session_getter_fixture(client): @pytest.fixture def runner(): return CliRunner() + + +@pytest.fixture +def test_user(client): + user_data = UserCreate( + username="testuser", + password="testpassword", + ) + response = client.post("/api/v1/users", json=user_data.dict()) + assert response.status_code == 201 + return response.json() + + +@pytest.fixture(scope="function") +def active_user(client): + db_manager = get_db_service() + with session_getter(db_manager) as session: + user = User( + username="activeuser", + password=get_password_hash("testpassword"), + is_active=True, + is_superuser=False, + ) + # check if user exists + if ( + active_user := session.query(User) + .filter(User.username == user.username) + .first() + ): + return active_user + session.add(user) + session.commit() + session.refresh(user) + return user + + +@pytest.fixture +def logged_in_headers(client, active_user): + login_data = {"username": active_user.username, "password": "testpassword"} + response = client.post("/api/v1/login", data=login_data) + assert response.status_code == 200 + tokens = response.json() + a_token = tokens["access_token"] + return {"Authorization": f"Bearer {a_token}"} + + +@pytest.fixture +def flow(client, json_flow: str, active_user): + from langflow.services.database.models.flow.flow import FlowCreate + + loaded_json = json.loads(json_flow) + flow_data = FlowCreate( + name="test_flow", data=loaded_json.get("data"), user_id=active_user.id + ) + flow = Flow(**flow_data.dict()) + with session_getter(get_db_service()) as session: + session.add(flow) + session.commit() + session.refresh(flow) + + return flow + + +@pytest.fixture +def added_flow(client, json_flow_with_prompt_and_history, logged_in_headers): + flow = orjson.loads(json_flow_with_prompt_and_history) + data = flow["data"] + flow = FlowCreate(name="Basic Chat", description="description", data=data) + response = client.post("api/v1/flows/", json=flow.dict(), headers=logged_in_headers) + assert response.status_code == 201 + assert response.json()["name"] == flow.name + assert response.json()["data"] == flow.data + return response.json() + + +@pytest.fixture +def added_vector_store(client, json_vector_store, logged_in_headers): + vector_store = orjson.loads(json_vector_store) + data = vector_store["data"] + vector_store = FlowCreate(name="Vector Store", description="description", data=data) + response = client.post( + "api/v1/flows/", json=vector_store.dict(), headers=logged_in_headers + ) + assert response.status_code == 201 + assert response.json()["name"] == vector_store.name + assert response.json()["data"] == vector_store.data + return response.json() diff --git a/tests/data/BasicChatwithPromptandHistory.json b/tests/data/BasicChatwithPromptandHistory.json new file mode 100644 index 000000000..658ac0479 --- /dev/null +++ b/tests/data/BasicChatwithPromptandHistory.json @@ -0,0 +1 @@ +{"description":"A simple chat with a custom prompt template and conversational memory buffer","name":"Basic Chat with Prompt and History (2)","data":{"nodes":[{"width":384,"height":621,"id":"ChatOpenAI-vy7fV","type":"genericNode","position":{"x":170.87326389541306,"y":465.8628482073749},"data":{"type":"ChatOpenAI","node":{"template":{"callbacks":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"callbacks","advanced":false,"dynamic":false,"info":"","type":"langchain.callbacks.base.BaseCallbackHandler","list":true},"cache":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"cache","advanced":false,"dynamic":false,"info":"","type":"bool","list":false},"client":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"client","advanced":false,"dynamic":false,"info":"","type":"Any","list":false},"max_retries":{"required":false,"placeholder":"","show":false,"multiline":false,"value":6,"password":false,"name":"max_retries","advanced":false,"dynamic":false,"info":"","type":"int","list":false},"max_tokens":{"required":false,"placeholder":"","show":true,"multiline":false,"password":true,"name":"max_tokens","advanced":false,"dynamic":false,"info":"","type":"int","list":false,"value":""},"metadata":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"metadata","advanced":false,"dynamic":false,"info":"","type":"code","list":false},"model_kwargs":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"model_kwargs","advanced":true,"dynamic":false,"info":"","type":"code","list":false},"model_name":{"required":false,"placeholder":"","show":true,"multiline":false,"value":"gpt-3.5-turbo","password":false,"options":["gpt-3.5-turbo-0613","gpt-3.5-turbo","gpt-3.5-turbo-16k-0613","gpt-3.5-turbo-16k","gpt-4-0613","gpt-4-32k-0613","gpt-4","gpt-4-32k"],"name":"model_name","advanced":false,"dynamic":false,"info":"","type":"str","list":true},"n":{"required":false,"placeholder":"","show":false,"multiline":false,"value":1,"password":false,"name":"n","advanced":false,"dynamic":false,"info":"","type":"int","list":false},"openai_api_base":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"openai_api_base","display_name":"OpenAI API Base","advanced":false,"dynamic":false,"info":"\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\n","type":"str","list":false},"openai_api_key":{"required":false,"placeholder":"","show":true,"multiline":false,"value":"","password":true,"name":"openai_api_key","display_name":"OpenAI API Key","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"openai_organization":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"openai_organization","display_name":"OpenAI Organization","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"openai_proxy":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"openai_proxy","display_name":"OpenAI Proxy","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"request_timeout":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"request_timeout","advanced":false,"dynamic":false,"info":"","type":"float","list":false,"value":60},"streaming":{"required":false,"placeholder":"","show":false,"multiline":false,"value":false,"password":false,"name":"streaming","advanced":false,"dynamic":false,"info":"","type":"bool","list":false},"tags":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"tags","advanced":false,"dynamic":false,"info":"","type":"str","list":true},"temperature":{"required":false,"placeholder":"","show":true,"multiline":false,"value":0.7,"password":false,"name":"temperature","advanced":false,"dynamic":false,"info":"","type":"float","list":false},"tiktoken_model_name":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"tiktoken_model_name","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"verbose":{"required":false,"placeholder":"","show":false,"multiline":false,"value":false,"password":false,"name":"verbose","advanced":false,"dynamic":false,"info":"","type":"bool","list":false},"_type":"ChatOpenAI"},"description":"`OpenAI` Chat large language models API.","base_classes":["ChatOpenAI","BaseChatModel","BaseLanguageModel","BaseLLM"],"display_name":"ChatOpenAI","documentation":"https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai"},"id":"ChatOpenAI-vy7fV","value":null},"selected":true,"dragging":false,"positionAbsolute":{"x":170.87326389541306,"y":465.8628482073749}},{"width":384,"height":307,"id":"LLMChain-UjBh1","type":"genericNode","position":{"x":1250.1806448178158,"y":588.4657451068704},"data":{"type":"LLMChain","node":{"template":{"callbacks":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"callbacks","advanced":false,"dynamic":false,"info":"","type":"langchain.callbacks.base.BaseCallbackHandler","list":true},"llm":{"required":true,"placeholder":"","show":true,"multiline":false,"password":false,"name":"llm","advanced":false,"dynamic":false,"info":"","type":"BaseLanguageModel","list":false},"memory":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"memory","advanced":false,"dynamic":false,"info":"","type":"BaseMemory","list":false},"output_parser":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"output_parser","advanced":false,"dynamic":false,"info":"","type":"BaseLLMOutputParser","list":false},"prompt":{"required":true,"placeholder":"","show":true,"multiline":false,"password":false,"name":"prompt","advanced":false,"dynamic":false,"info":"","type":"BasePromptTemplate","list":false},"llm_kwargs":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"llm_kwargs","advanced":false,"dynamic":false,"info":"","type":"code","list":false},"metadata":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"metadata","advanced":false,"dynamic":false,"info":"","type":"code","list":false},"output_key":{"required":true,"placeholder":"","show":true,"multiline":false,"value":"text","password":false,"name":"output_key","advanced":true,"dynamic":false,"info":"","type":"str","list":false},"return_final_only":{"required":false,"placeholder":"","show":false,"multiline":false,"value":true,"password":false,"name":"return_final_only","advanced":false,"dynamic":false,"info":"","type":"bool","list":false},"tags":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"tags","advanced":false,"dynamic":false,"info":"","type":"str","list":true},"verbose":{"required":false,"placeholder":"","show":false,"multiline":false,"value":false,"password":false,"name":"verbose","advanced":true,"dynamic":false,"info":"","type":"bool","list":false},"_type":"LLMChain"},"description":"Chain to run queries against LLMs.","base_classes":["LLMChain","Chain","function"],"display_name":"LLMChain","documentation":"https://python.langchain.com/docs/modules/chains/foundational/llm_chain"},"id":"LLMChain-UjBh1","value":null},"selected":false,"positionAbsolute":{"x":1250.1806448178158,"y":588.4657451068704},"dragging":false},{"width":384,"height":273,"id":"PromptTemplate-5Q0W8","type":"genericNode","position":{"x":172.18064481781585,"y":67.26574510687044},"data":{"type":"PromptTemplate","node":{"template":{"output_parser":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"output_parser","advanced":false,"dynamic":false,"info":"","type":"BaseOutputParser","list":false},"input_variables":{"required":true,"placeholder":"","show":false,"multiline":false,"password":false,"name":"input_variables","advanced":false,"dynamic":false,"info":"","type":"str","list":true,"value":["history","text"]},"partial_variables":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"partial_variables","advanced":false,"dynamic":false,"info":"","type":"code","list":false},"template":{"required":true,"placeholder":"","show":true,"multiline":true,"password":false,"name":"template","advanced":false,"dynamic":false,"info":"","type":"prompt","list":false,"value":"The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.\n\nCurrent conversation:\n\n{history}\nHuman: {text}\nAI:"},"template_format":{"required":false,"placeholder":"","show":false,"multiline":false,"value":"f-string","password":false,"name":"template_format","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"validate_template":{"required":false,"placeholder":"","show":false,"multiline":false,"value":true,"password":false,"name":"validate_template","advanced":false,"dynamic":false,"info":"","type":"bool","list":false},"_type":"PromptTemplate","history":{"required":false,"placeholder":"","show":true,"multiline":true,"value":"","password":false,"name":"history","display_name":"history","advanced":false,"input_types":["Document","BaseOutputParser"],"dynamic":false,"info":"","type":"str","list":false},"text":{"required":false,"placeholder":"","show":true,"multiline":true,"value":"","password":false,"name":"text","display_name":"text","advanced":false,"input_types":["Document","BaseOutputParser"],"dynamic":false,"info":"","type":"str","list":false}},"description":"A prompt template for a language model.","base_classes":["StringPromptTemplate","PromptTemplate","BasePromptTemplate"],"name":"","display_name":"PromptTemplate","documentation":"https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/","custom_fields":{"template":["history","text"]},"output_types":[],"field_formatters":{"formatters":{"openai_api_key":{}},"base_formatters":{"kwargs":{},"optional":{},"list":{},"dict":{},"union":{},"multiline":{},"show":{},"password":{},"default":{},"headers":{},"dict_code_file":{},"model_fields":{"MODEL_DICT":{"OpenAI":["text-davinci-003","text-davinci-002","text-curie-001","text-babbage-001","text-ada-001"],"ChatOpenAI":["gpt-3.5-turbo-0613","gpt-3.5-turbo","gpt-3.5-turbo-16k-0613","gpt-3.5-turbo-16k","gpt-4-0613","gpt-4-32k-0613","gpt-4","gpt-4-32k"],"Anthropic":["claude-v1","claude-v1-100k","claude-instant-v1","claude-instant-v1-100k","claude-v1.3","claude-v1.3-100k","claude-v1.2","claude-v1.0","claude-instant-v1.1","claude-instant-v1.1-100k","claude-instant-v1.0"],"ChatAnthropic":["claude-v1","claude-v1-100k","claude-instant-v1","claude-instant-v1-100k","claude-v1.3","claude-v1.3-100k","claude-v1.2","claude-v1.0","claude-instant-v1.1","claude-instant-v1.1-100k","claude-instant-v1.0"]}}}},"beta":false,"error":null},"id":"PromptTemplate-5Q0W8","value":null},"selected":false,"dragging":false,"positionAbsolute":{"x":172.18064481781585,"y":67.26574510687044}},{"width":384,"height":561,"id":"ConversationBufferMemory-Lu2Nb","type":"genericNode","position":{"x":802.1806448178158,"y":43.265745106870426},"data":{"type":"ConversationBufferMemory","node":{"template":{"chat_memory":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"chat_memory","advanced":false,"dynamic":false,"info":"","type":"BaseChatMessageHistory","list":false},"ai_prefix":{"required":false,"placeholder":"","show":false,"multiline":false,"value":"AI","password":false,"name":"ai_prefix","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"human_prefix":{"required":false,"placeholder":"","show":false,"multiline":false,"value":"Human","password":false,"name":"human_prefix","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"input_key":{"required":false,"placeholder":"","show":true,"multiline":false,"value":"","password":false,"name":"input_key","advanced":false,"dynamic":false,"info":"The variable to be used as Chat Input when more than one variable is available.","type":"str","list":false},"memory_key":{"required":false,"placeholder":"","show":true,"multiline":false,"value":"history","password":false,"name":"memory_key","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"output_key":{"required":false,"placeholder":"","show":true,"multiline":false,"value":"","password":false,"name":"output_key","advanced":false,"dynamic":false,"info":"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)","type":"str","list":false},"return_messages":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"return_messages","advanced":false,"dynamic":false,"info":"","type":"bool","list":false},"_type":"ConversationBufferMemory"},"description":"Buffer for storing conversation memory.","base_classes":["BaseMemory","ConversationBufferMemory","BaseChatMemory"],"display_name":"ConversationBufferMemory","documentation":"https://python.langchain.com/docs/modules/memory/how_to/buffer"},"id":"ConversationBufferMemory-Lu2Nb","value":null},"selected":false,"positionAbsolute":{"x":802.1806448178158,"y":43.265745106870426},"dragging":false}],"edges":[{"source":"ChatOpenAI-vy7fV","sourceHandle":"ChatOpenAI|ChatOpenAI-vy7fV|ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLLM","target":"LLMChain-UjBh1","targetHandle":"BaseLanguageModel|llm|LLMChain-UjBh1","className":"","id":"reactflow__edge-ChatOpenAI-vy7fVChatOpenAI|ChatOpenAI-vy7fV|ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLLM-LLMChain-UjBh1BaseLanguageModel|llm|LLMChain-UjBh1","selected":false,"animated":false,"style":{"stroke":"#555"}},{"source":"PromptTemplate-5Q0W8","sourceHandle":"PromptTemplate|PromptTemplate-5Q0W8|StringPromptTemplate|PromptTemplate|BasePromptTemplate","target":"LLMChain-UjBh1","targetHandle":"BasePromptTemplate|prompt|LLMChain-UjBh1","className":"","id":"reactflow__edge-PromptTemplate-5Q0W8PromptTemplate|PromptTemplate-5Q0W8|StringPromptTemplate|PromptTemplate|BasePromptTemplate-LLMChain-UjBh1BasePromptTemplate|prompt|LLMChain-UjBh1","animated":false,"style":{"stroke":"#555"}},{"source":"ConversationBufferMemory-Lu2Nb","sourceHandle":"ConversationBufferMemory|ConversationBufferMemory-Lu2Nb|BaseMemory|ConversationBufferMemory|BaseChatMemory","target":"LLMChain-UjBh1","targetHandle":"BaseMemory|memory|LLMChain-UjBh1","className":"","id":"reactflow__edge-ConversationBufferMemory-Lu2NbConversationBufferMemory|ConversationBufferMemory-Lu2Nb|BaseMemory|ConversationBufferMemory|BaseChatMemory-LLMChain-UjBh1BaseMemory|memory|LLMChain-UjBh1","animated":false,"style":{"stroke":"#555"}}],"viewport":{"x":-64.70809474436828,"y":44.7801470275611,"zoom":0.6622606580990782}},"id":"0cdfb2f2-19de-4e15-99fa-fd5203b38053"} \ No newline at end of file diff --git a/tests/data/Vector_store.json b/tests/data/Vector_store.json new file mode 100644 index 000000000..2a1ddd5f3 --- /dev/null +++ b/tests/data/Vector_store.json @@ -0,0 +1,1283 @@ +{ + "name": "Vector Store", + "description": "An agent that can query a Vector Store.\nTry asking \"How do I upload examples to Langflow?\"\n\n\n\n", + "data": { + "nodes": [ + { + "width": 384, + "height": 267, + "id": "VectorStoreAgent-FOmxY", + "type": "genericNode", + "position": { + "x": 2115.5183674856203, + "y": -1277.6284872455249 + }, + "data": { + "type": "VectorStoreAgent", + "node": { + "template": { + "llm": { + "required": true, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "llm", + "display_name": "LLM", + "advanced": false, + "dynamic": false, + "info": "", + "type": "BaseLanguageModel", + "list": false + }, + "vectorstoreinfo": { + "required": true, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "vectorstoreinfo", + "display_name": "Vector Store Info", + "advanced": false, + "dynamic": false, + "info": "", + "type": "VectorStoreInfo", + "list": false + }, + "_type": "vectorstore_agent" + }, + "description": "Construct an agent from a Vector Store.", + "base_classes": [ + "AgentExecutor" + ], + "display_name": "VectorStoreAgent", + "documentation": "" + }, + "id": "VectorStoreAgent-FOmxY", + "value": null + }, + "selected": false, + "positionAbsolute": { + "x": 2115.5183674856203, + "y": -1277.6284872455249 + }, + "dragging": false + }, + { + "width": 384, + "height": 399, + "id": "VectorStoreInfo-z0sH5", + "type": "genericNode", + "position": { + "x": 1553.2875394928135, + "y": -1319.2113273706286 + }, + "data": { + "type": "VectorStoreInfo", + "node": { + "template": { + "vectorstore": { + "required": true, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "vectorstore", + "advanced": false, + "dynamic": false, + "info": "", + "type": "VectorStore", + "list": false + }, + "description": { + "required": true, + "placeholder": "", + "show": true, + "multiline": true, + "password": false, + "name": "description", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false, + "value": "Instructions to upload examples to Langflow Community Examples" + }, + "name": { + "required": true, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "name", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false, + "value": "UploadExamples" + }, + "_type": "VectorStoreInfo" + }, + "description": "Information about a VectorStore.", + "base_classes": [ + "VectorStoreInfo" + ], + "display_name": "VectorStoreInfo", + "documentation": "" + }, + "id": "VectorStoreInfo-z0sH5", + "value": null + }, + "selected": false, + "positionAbsolute": { + "x": 1553.2875394928135, + "y": -1319.2113273706286 + }, + "dragging": false + }, + { + "width": 384, + "height": 359, + "id": "OpenAIEmbeddings-lge2J", + "type": "genericNode", + "position": { + "x": 677.2699276778915, + "y": -734.4639958173494 + }, + "data": { + "type": "OpenAIEmbeddings", + "node": { + "template": { + "allowed_special": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": [], + "password": false, + "name": "allowed_special", + "advanced": true, + "dynamic": false, + "info": "", + "type": "Literal'all'", + "list": true + }, + "disallowed_special": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": "all", + "password": false, + "name": "disallowed_special", + "advanced": true, + "dynamic": false, + "info": "", + "type": "Literal'all'", + "list": true + }, + "chunk_size": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": 1000, + "password": false, + "name": "chunk_size", + "advanced": true, + "dynamic": false, + "info": "", + "type": "int", + "list": false + }, + "client": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "client", + "advanced": true, + "dynamic": false, + "info": "", + "type": "Any", + "list": false + }, + "deployment": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": "text-embedding-ada-002", + "password": false, + "name": "deployment", + "advanced": true, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "embedding_ctx_length": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": 8191, + "password": false, + "name": "embedding_ctx_length", + "advanced": true, + "dynamic": false, + "info": "", + "type": "int", + "list": false + }, + "headers": { + "required": false, + "placeholder": "", + "show": false, + "multiline": true, + "value": "{'Authorization':\n 'Bearer '}", + "password": false, + "name": "headers", + "advanced": true, + "dynamic": false, + "info": "", + "type": "Any", + "list": false + }, + "max_retries": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": 6, + "password": false, + "name": "max_retries", + "advanced": true, + "dynamic": false, + "info": "", + "type": "int", + "list": false + }, + "model": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": "text-embedding-ada-002", + "password": false, + "name": "model", + "advanced": true, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "model_kwargs": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "model_kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "type": "code", + "list": false + }, + "openai_api_base": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": true, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": true, + "dynamic": false, + "info": "", + "type": "str", + "list": false, + "value": "" + }, + "openai_api_key": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "openai_api_type": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": true, + "name": "openai_api_type", + "display_name": "OpenAI API Type", + "advanced": true, + "dynamic": false, + "info": "", + "type": "str", + "list": false, + "value": "" + }, + "openai_api_version": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": true, + "name": "openai_api_version", + "display_name": "OpenAI API Version", + "advanced": true, + "dynamic": false, + "info": "", + "type": "str", + "list": false, + "value": "" + }, + "openai_organization": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "openai_organization", + "display_name": "OpenAI Organization", + "advanced": true, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "openai_proxy": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "openai_proxy", + "display_name": "OpenAI Proxy", + "advanced": true, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "request_timeout": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "request_timeout", + "advanced": true, + "dynamic": false, + "info": "", + "type": "float", + "list": false + }, + "show_progress_bar": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": false, + "password": false, + "name": "show_progress_bar", + "advanced": true, + "dynamic": false, + "info": "", + "type": "bool", + "list": false + }, + "tiktoken_model_name": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": true, + "name": "tiktoken_model_name", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false, + "value": "" + }, + "_type": "OpenAIEmbeddings" + }, + "description": "OpenAI embedding models.", + "base_classes": [ + "OpenAIEmbeddings", + "Embeddings" + ], + "display_name": "OpenAIEmbeddings", + "documentation": "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/openai" + }, + "id": "OpenAIEmbeddings-lge2J", + "value": null + }, + "selected": false, + "positionAbsolute": { + "x": 677.2699276778915, + "y": -734.4639958173494 + }, + "dragging": false + }, + { + "width": 384, + "height": 515, + "id": "Chroma-UK4a8", + "type": "genericNode", + "position": { + "x": 1138.12587416446, + "y": -1289.1517285671812 + }, + "data": { + "type": "Chroma", + "node": { + "template": { + "client": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "client", + "advanced": false, + "dynamic": false, + "info": "", + "type": "chromadb.Client", + "list": false + }, + "client_settings": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "client_settings", + "advanced": false, + "dynamic": false, + "info": "", + "type": "chromadb.config.Setting", + "list": true + }, + "documents": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "documents", + "display_name": "Documents", + "advanced": false, + "dynamic": false, + "info": "", + "type": "Document", + "list": true + }, + "embedding": { + "required": true, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "embedding", + "display_name": "Embedding", + "advanced": false, + "dynamic": false, + "info": "", + "type": "Embeddings", + "list": false + }, + "chroma_server_cors_allow_origins": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "chroma_server_cors_allow_origins", + "display_name": "Chroma Server CORS Allow Origins", + "advanced": true, + "dynamic": false, + "info": "", + "type": "str", + "list": true + }, + "chroma_server_grpc_port": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "chroma_server_grpc_port", + "display_name": "Chroma Server GRPC Port", + "advanced": true, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "chroma_server_host": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "chroma_server_host", + "display_name": "Chroma Server Host", + "advanced": true, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "chroma_server_http_port": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "chroma_server_http_port", + "display_name": "Chroma Server HTTP Port", + "advanced": true, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "chroma_server_ssl_enabled": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": false, + "password": false, + "name": "chroma_server_ssl_enabled", + "display_name": "Chroma Server SSL Enabled", + "advanced": true, + "dynamic": false, + "info": "", + "type": "bool", + "list": false + }, + "collection_metadata": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "collection_metadata", + "advanced": false, + "dynamic": false, + "info": "", + "type": "code", + "list": false + }, + "collection_name": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": "langflow", + "password": false, + "name": "collection_name", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "ids": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "ids", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": true + }, + "metadatas": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "metadatas", + "advanced": false, + "dynamic": false, + "info": "", + "type": "code", + "list": true + }, + "persist": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": false, + "password": false, + "name": "persist", + "display_name": "Persist", + "advanced": false, + "dynamic": false, + "info": "", + "type": "bool", + "list": false + }, + "persist_directory": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "persist_directory", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "search_kwargs": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": "{}", + "password": false, + "name": "search_kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "type": "code", + "list": false + }, + "_type": "Chroma" + }, + "description": "Create a Chroma vectorstore from a raw documents.", + "base_classes": [ + "VectorStore", + "Chroma", + "BaseRetriever", + "VectorStoreRetriever" + ], + "display_name": "Chroma", + "custom_fields": {}, + "output_types": [], + "documentation": "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/chroma" + }, + "id": "Chroma-UK4a8", + "value": null + }, + "selected": false, + "positionAbsolute": { + "x": 1138.12587416446, + "y": -1289.1517285671812 + }, + "dragging": false + }, + { + "width": 384, + "height": 575, + "id": "RecursiveCharacterTextSplitter-AUWrU", + "type": "genericNode", + "position": { + "x": 607.3861456929772, + "y": -1343.8126308350086 + }, + "data": { + "type": "RecursiveCharacterTextSplitter", + "node": { + "template": { + "documents": { + "required": true, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "documents", + "advanced": false, + "dynamic": false, + "info": "", + "type": "Document", + "list": true + }, + "chunk_overlap": { + "required": true, + "placeholder": "", + "show": true, + "multiline": false, + "value": 200, + "password": false, + "name": "chunk_overlap", + "display_name": "Chunk Overlap", + "advanced": false, + "dynamic": false, + "info": "", + "type": "int", + "list": false + }, + "chunk_size": { + "required": true, + "placeholder": "", + "show": true, + "multiline": false, + "value": 1000, + "password": false, + "name": "chunk_size", + "display_name": "Chunk Size", + "advanced": false, + "dynamic": false, + "info": "", + "type": "int", + "list": false + }, + "separator_type": { + "required": true, + "placeholder": "", + "show": true, + "multiline": false, + "value": "Text", + "password": false, + "options": [ + "Text", + "cpp", + "go", + "html", + "java", + "js", + "latex", + "markdown", + "php", + "proto", + "python", + "rst", + "ruby", + "rust", + "scala", + "sol", + "swift" + ], + "name": "separator_type", + "display_name": "Separator Type", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": true + }, + "separators": { + "required": true, + "placeholder": "", + "show": true, + "multiline": false, + "value": ".", + "password": false, + "name": "separators", + "display_name": "Separator", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "_type": "RecursiveCharacterTextSplitter" + }, + "description": "Splitting text by recursively look at characters.", + "base_classes": [ + "Document" + ], + "display_name": "RecursiveCharacterTextSplitter", + "custom_fields": {}, + "output_types": [ + "Document" + ], + "documentation": "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/recursive_text_splitter" + }, + "id": "RecursiveCharacterTextSplitter-AUWrU", + "value": null + }, + "selected": false, + "positionAbsolute": { + "x": 607.3861456929772, + "y": -1343.8126308350086 + } + }, + { + "width": 384, + "height": 379, + "id": "WebBaseLoader-aUAEE", + "type": "genericNode", + "position": { + "x": 60.77712301470575, + "y": -1345.575885746874 + }, + "data": { + "type": "WebBaseLoader", + "node": { + "template": { + "metadata": { + "required": true, + "placeholder": "", + "show": true, + "multiline": false, + "value": "{}", + "password": false, + "name": "metadata", + "display_name": "Metadata", + "advanced": false, + "dynamic": false, + "info": "", + "type": "code", + "list": false + }, + "web_path": { + "required": true, + "placeholder": "", + "show": true, + "multiline": false, + "value": "http://docs.langflow.org/examples/how-upload-examples", + "password": false, + "name": "web_path", + "display_name": "Web Page", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "_type": "WebBaseLoader" + }, + "description": "Load HTML pages using `urllib` and parse them with `BeautifulSoup'.", + "base_classes": [ + "Document" + ], + "display_name": "WebBaseLoader", + "custom_fields": {}, + "output_types": [ + "Document" + ], + "documentation": "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/web_base" + }, + "id": "WebBaseLoader-aUAEE", + "value": null + }, + "selected": false, + "positionAbsolute": { + "x": 60.77712301470575, + "y": -1345.575885746874 + }, + "dragging": false + }, + { + "width": 384, + "height": 621, + "id": "ChatOpenAI-U4mZ2", + "type": "genericNode", + "position": { + "x": 1557.7805431884235, + "y": -897.7091381330642 + }, + "data": { + "type": "ChatOpenAI", + "node": { + "template": { + "callbacks": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "callbacks", + "advanced": false, + "dynamic": false, + "info": "", + "type": "langchain.callbacks.base.BaseCallbackHandler", + "list": true + }, + "cache": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "cache", + "advanced": false, + "dynamic": false, + "info": "", + "type": "bool", + "list": false + }, + "client": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "client", + "advanced": false, + "dynamic": false, + "info": "", + "type": "Any", + "list": false + }, + "max_retries": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "value": 6, + "password": false, + "name": "max_retries", + "advanced": false, + "dynamic": false, + "info": "", + "type": "int", + "list": false + }, + "max_tokens": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": true, + "name": "max_tokens", + "advanced": false, + "dynamic": false, + "info": "", + "type": "int", + "list": false, + "value": "" + }, + "metadata": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "metadata", + "advanced": false, + "dynamic": false, + "info": "", + "type": "code", + "list": false + }, + "model_kwargs": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "model_kwargs", + "advanced": true, + "dynamic": false, + "info": "", + "type": "code", + "list": false + }, + "model_name": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": "gpt-3.5-turbo-0613", + "password": false, + "options": [ + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo", + "gpt-3.5-turbo-16k-0613", + "gpt-3.5-turbo-16k", + "gpt-4-0613", + "gpt-4-32k-0613", + "gpt-4", + "gpt-4-32k" + ], + "name": "model_name", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": true + }, + "n": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "value": 1, + "password": false, + "name": "n", + "advanced": false, + "dynamic": false, + "info": "", + "type": "int", + "list": false + }, + "openai_api_base": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "password": false, + "name": "openai_api_base", + "display_name": "OpenAI API Base", + "advanced": false, + "dynamic": false, + "info": "\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\n", + "type": "str", + "list": false + }, + "openai_api_key": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": "", + "password": true, + "name": "openai_api_key", + "display_name": "OpenAI API Key", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "openai_organization": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "openai_organization", + "display_name": "OpenAI Organization", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "openai_proxy": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "openai_proxy", + "display_name": "OpenAI Proxy", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "request_timeout": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "request_timeout", + "advanced": false, + "dynamic": false, + "info": "", + "type": "float", + "list": false + }, + "streaming": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "value": false, + "password": false, + "name": "streaming", + "advanced": false, + "dynamic": false, + "info": "", + "type": "bool", + "list": false + }, + "tags": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "tags", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": true + }, + "temperature": { + "required": false, + "placeholder": "", + "show": true, + "multiline": false, + "value": "0.2", + "password": false, + "name": "temperature", + "advanced": false, + "dynamic": false, + "info": "", + "type": "float", + "list": false + }, + "tiktoken_model_name": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "password": false, + "name": "tiktoken_model_name", + "advanced": false, + "dynamic": false, + "info": "", + "type": "str", + "list": false + }, + "verbose": { + "required": false, + "placeholder": "", + "show": false, + "multiline": false, + "value": false, + "password": false, + "name": "verbose", + "advanced": false, + "dynamic": false, + "info": "", + "type": "bool", + "list": false + }, + "_type": "ChatOpenAI" + }, + "description": "`OpenAI` Chat large language models API.", + "base_classes": [ + "ChatOpenAI", + "BaseLanguageModel", + "BaseChatModel", + "BaseLLM" + ], + "display_name": "ChatOpenAI", + "custom_fields": {}, + "output_types": [], + "documentation": "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai" + }, + "id": "ChatOpenAI-U4mZ2", + "value": null + }, + "selected": false, + "positionAbsolute": { + "x": 1557.7805431884235, + "y": -897.7091381330642 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "VectorStoreInfo-z0sH5", + "sourceHandle": "VectorStoreInfo|VectorStoreInfo-z0sH5|VectorStoreInfo", + "target": "VectorStoreAgent-FOmxY", + "targetHandle": "VectorStoreInfo|vectorstoreinfo|VectorStoreAgent-FOmxY", + "className": "", + "id": "reactflow__edge-VectorStoreInfo-z0sH5VectorStoreInfo|VectorStoreInfo-z0sH5|VectorStoreInfo-VectorStoreAgent-FOmxYVectorStoreInfo|vectorstoreinfo|VectorStoreAgent-FOmxY", + "selected": false, + "style": { + "stroke": "#555" + }, + "animated": false + }, + { + "source": "Chroma-UK4a8", + "sourceHandle": "Chroma|Chroma-UK4a8|VectorStore|Chroma|BaseRetriever|VectorStoreRetriever", + "target": "VectorStoreInfo-z0sH5", + "targetHandle": "VectorStore|vectorstore|VectorStoreInfo-z0sH5", + "style": { + "stroke": "#555" + }, + "className": "", + "animated": false, + "id": "reactflow__edge-Chroma-UK4a8Chroma|Chroma-UK4a8|VectorStore|Chroma|BaseRetriever|VectorStoreRetriever-VectorStoreInfo-z0sH5VectorStore|vectorstore|VectorStoreInfo-z0sH5", + "selected": false + }, + { + "source": "WebBaseLoader-aUAEE", + "sourceHandle": "WebBaseLoader|WebBaseLoader-aUAEE|Document", + "target": "RecursiveCharacterTextSplitter-AUWrU", + "targetHandle": "Document|documents|RecursiveCharacterTextSplitter-AUWrU", + "style": { + "stroke": "#555" + }, + "className": "", + "animated": false, + "id": "reactflow__edge-WebBaseLoader-aUAEEWebBaseLoader|WebBaseLoader-aUAEE|Document-RecursiveCharacterTextSplitter-AUWrUDocument|documents|RecursiveCharacterTextSplitter-AUWrU", + "selected": false + }, + { + "source": "RecursiveCharacterTextSplitter-AUWrU", + "sourceHandle": "RecursiveCharacterTextSplitter|RecursiveCharacterTextSplitter-AUWrU|Document", + "target": "Chroma-UK4a8", + "targetHandle": "Document|documents|Chroma-UK4a8", + "style": { + "stroke": "#555" + }, + "className": "", + "animated": false, + "id": "reactflow__edge-RecursiveCharacterTextSplitter-AUWrURecursiveCharacterTextSplitter|RecursiveCharacterTextSplitter-AUWrU|Document-Chroma-UK4a8Document|documents|Chroma-UK4a8", + "selected": false + }, + { + "source": "ChatOpenAI-U4mZ2", + "sourceHandle": "ChatOpenAI|ChatOpenAI-U4mZ2|ChatOpenAI|BaseLanguageModel|BaseChatModel|BaseLLM", + "target": "VectorStoreAgent-FOmxY", + "targetHandle": "BaseLanguageModel|llm|VectorStoreAgent-FOmxY", + "style": { + "stroke": "#555" + }, + "className": "", + "animated": false, + "id": "reactflow__edge-ChatOpenAI-U4mZ2ChatOpenAI|ChatOpenAI-U4mZ2|ChatOpenAI|BaseLanguageModel|BaseChatModel|BaseLLM-VectorStoreAgent-FOmxYBaseLanguageModel|llm|VectorStoreAgent-FOmxY", + "selected": false + }, + { + "source": "OpenAIEmbeddings-lge2J", + "sourceHandle": "OpenAIEmbeddings|OpenAIEmbeddings-lge2J|OpenAIEmbeddings|Embeddings", + "target": "Chroma-UK4a8", + "targetHandle": "Embeddings|embedding|Chroma-UK4a8", + "style": { + "stroke": "#555" + }, + "className": "", + "animated": false, + "id": "reactflow__edge-OpenAIEmbeddings-lge2JOpenAIEmbeddings|OpenAIEmbeddings-lge2J|OpenAIEmbeddings|Embeddings-Chroma-UK4a8Embeddings|embedding|Chroma-UK4a8" + } + ], + "viewport": { + "x": 23.25459650899495, + "y": 727.4174391025257, + "zoom": 0.3802259585247222 + } + }, + "id": "cc9d45a0-a071-4435-9e90-32ccbd1a972b", + "user_id": "c65bfea3-3eea-4e71-8fc4-106238eb0583" +} \ No newline at end of file diff --git a/tests/data/basic_example.json b/tests/data/basic_example.json index 2c08512bb..35f515bc4 100644 --- a/tests/data/basic_example.json +++ b/tests/data/basic_example.json @@ -236,7 +236,7 @@ "placeholder": "", "show": true, "multiline": false, - "value": "abc", + "value": null, "password": true, "name": "openai_api_key", "display_name": "OpenAI API Key", diff --git a/tests/locust/locustfile.py b/tests/locust/locustfile.py new file mode 100644 index 000000000..aca0d1de9 --- /dev/null +++ b/tests/locust/locustfile.py @@ -0,0 +1,132 @@ +from locust import FastHttpUser, task, between +import random +import time +import orjson +from rich import print +import httpx +from pathlib import Path + + +class NameTest(FastHttpUser): + wait_time = between(1, 5) + + with open("names.txt", "r") as file: + names = [line.strip() for line in file.readlines()] + + headers = {} + + def poll_task(self, task_id, sleep_time=1): + while True: + with self.rest( + "GET", + f"/task/{task_id}", + name="task_status", + headers=self.headers, + ) as response: + status = response.js.get("status") + print(f"Poll Response: {response.js}") + if status == "SUCCESS": + return response.js.get("result") + elif status in ["FAILURE", "REVOKED"]: + raise ValueError(f"Task failed with status: {status}") + time.sleep(sleep_time) + + def process(self, name, flow_id, payload): + task_id = None + print(f"Processing {payload}") + with self.rest( + "POST", + f"/process/{flow_id}", + json=payload, + name="process", + headers=self.headers, + ) as response: + print(response.js) + if response.status_code != 200: + response.failure("Process call failed") + raise ValueError("Process call failed") + task_id = response.js.get("id") + session_id = response.js.get("session_id") + assert task_id, "Inner Task ID not found" + + assert task_id, "Task ID not found" + result = self.poll_task(task_id) + print(f"Result for {name}: {result}") + + return result, session_id + + @task + def send_name_and_check(self): + name = random.choice(self.names) + + payload1 = { + "inputs": {"text": f"Hello, My name is {name}"}, + "sync": False, + } + result1, session_id = self.process(name, self.flow_id, payload1) + + payload2 = { + "inputs": { + "text": "What is my name? Please, answer like this: Your name is " + }, + "session_id": session_id, + "sync": False, + } + result2, session_id = self.process(name, self.flow_id, payload2) + + assert f"Your name is {name}" in str(result2), "Name not found in response" + + def on_start(self): + print("Starting") + login_data = {"username": "superuser", "password": "superuser"} + response = httpx.post(f"{self.host}/login", data=login_data) + print(response.json()) + + tokens = response.json() + print(tokens) + a_token = tokens["access_token"] + logged_in_headers = {"Authorization": f"Bearer {a_token}"} + print("Logged in") + with open( + Path(__file__).parent.parent + / "data" + / "BasicChatwithPromptandHistory.json", + "r", + ) as f: + json_flow = f.read() + flow = orjson.loads(json_flow) + data = flow["data"] + # Create test data + flow = {"name": "Flow 1", "description": "description", "data": data} + print("Creating flow") + # Make request to endpoint + response = httpx.post( + f"{self.host}/flows/", + json=flow, + headers=logged_in_headers, + ) + self.flow_id = response.json()["id"] + print(f"Flow ID: {self.flow_id}") + + # read all users + response = httpx.get( + f"{self.host}/users/", + headers=logged_in_headers, + ) + print(response.json()) + user_id = next( + ( + user["id"] + for user in response.json()["users"] + if user["username"] == "superuser" + ), + None, + ) + # Create api key + response = httpx.post( + f"{self.host}/api_key/", + json={"user_id": user_id}, + headers=logged_in_headers, + ) + print(response.json()) + self.headers["x-api-key"] = response.json()["api_key"] diff --git a/tests/locust/names.txt b/tests/locust/names.txt new file mode 100644 index 000000000..555af7369 --- /dev/null +++ b/tests/locust/names.txt @@ -0,0 +1,5 @@ +Bob +Alice +John +Gabriel +Lily diff --git a/tests/test_agents_template.py b/tests/test_agents_template.py index 0b5fb7c3a..b12ad7dee 100644 --- a/tests/test_agents_template.py +++ b/tests/test_agents_template.py @@ -1,8 +1,8 @@ from fastapi.testclient import TestClient -def test_zero_shot_agent(client: TestClient): - response = client.get("api/v1/all") +def test_zero_shot_agent(client: TestClient, logged_in_headers): + response = client.get("api/v1/all", headers=logged_in_headers) assert response.status_code == 200 json_response = response.json() agents = json_response["agents"] @@ -113,8 +113,8 @@ def test_zero_shot_agent(client: TestClient): } -def test_json_agent(client: TestClient): - response = client.get("api/v1/all") +def test_json_agent(client: TestClient, logged_in_headers): + response = client.get("api/v1/all", headers=logged_in_headers) assert response.status_code == 200 json_response = response.json() agents = json_response["agents"] @@ -152,8 +152,8 @@ def test_json_agent(client: TestClient): } -def test_csv_agent(client: TestClient): - response = client.get("api/v1/all") +def test_csv_agent(client: TestClient, logged_in_headers): + response = client.get("api/v1/all", headers=logged_in_headers) assert response.status_code == 200 json_response = response.json() agents = json_response["agents"] @@ -195,8 +195,8 @@ def test_csv_agent(client: TestClient): } -def test_initialize_agent(client: TestClient): - response = client.get("api/v1/all") +def test_initialize_agent(client: TestClient, logged_in_headers): + response = client.get("api/v1/all", headers=logged_in_headers) assert response.status_code == 200 json_response = response.json() agents = json_response["agents"] diff --git a/tests/test_api_key.py b/tests/test_api_key.py new file mode 100644 index 000000000..43b91fa43 --- /dev/null +++ b/tests/test_api_key.py @@ -0,0 +1,50 @@ +import pytest +from langflow.services.database.models.api_key import ApiKeyCreate + + +@pytest.fixture +def api_key(client, logged_in_headers, active_user): + api_key = ApiKeyCreate(name="test-api-key") + + response = client.post( + "api/v1/api_key", data=api_key.json(), headers=logged_in_headers + ) + assert response.status_code == 200, response.text + return response.json() + + +def test_get_api_keys(client, logged_in_headers, api_key): + response = client.get("api/v1/api_key", headers=logged_in_headers) + assert response.status_code == 200, response.text + data = response.json() + assert "total_count" in data + assert "user_id" in data + assert "api_keys" in data + assert any("test-api-key" in api_key["name"] for api_key in data["api_keys"]) + # assert all api keys in data["api_keys"] are masked + assert all("**" in api_key["api_key"] for api_key in data["api_keys"]) + # Add more assertions as needed based on the expected data structure and content + + +def test_create_api_key(client, logged_in_headers): + api_key_name = "test-api-key" + response = client.post( + "api/v1/api_key", json={"name": api_key_name}, headers=logged_in_headers + ) + assert response.status_code == 200 + data = response.json() + assert "name" in data and data["name"] == api_key_name + assert "api_key" in data + # When creating the API key is returned which is + # the only time the API key is unmasked + assert "**" not in data["api_key"] + + +def test_delete_api_key(client, logged_in_headers, active_user, api_key): + # Assuming a function to create a test API key, returning the key ID + api_key_id = api_key["id"] + response = client.delete(f"api/v1/api_key/{api_key_id}", headers=logged_in_headers) + assert response.status_code == 200 + data = response.json() + assert data["detail"] == "API Key deleted" + # Optionally, add a follow-up check to ensure that the key is actually removed from the database diff --git a/tests/test_cache.py b/tests/test_cache.py index 50698c304..c2c706ee9 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -2,9 +2,6 @@ import json from langflow.graph import Graph import pytest -from langflow.interface.run import ( - build_langchain_object_with_caching, -) def get_graph(_type="basic"): @@ -40,32 +37,9 @@ def langchain_objects_are_equal(obj1, obj2): return str(obj1) == str(obj2) -# Test build_langchain_object_with_caching -def test_build_langchain_object_with_caching(basic_data_graph): - build_langchain_object_with_caching.clear_cache() - graph = build_langchain_object_with_caching(basic_data_graph) - assert graph is not None - - # Test build_graph -def test_build_graph(basic_data_graph): +def test_build_graph(client, basic_data_graph): graph = Graph.from_payload(basic_data_graph) assert graph is not None assert len(graph.nodes) == len(basic_data_graph["nodes"]) assert len(graph.edges) == len(basic_data_graph["edges"]) - - -# Test cache size limit -def test_cache_size_limit(basic_data_graph): - build_langchain_object_with_caching.clear_cache() - for i in range(11): - modified_data_graph = basic_data_graph.copy() - nodes = modified_data_graph["nodes"] - node_id = nodes[0]["id"] - # Now we replace all instances ode node_id with a new id in the json - json_string = json.dumps(modified_data_graph) - modified_json_string = json_string.replace(node_id, f"{node_id}_{i}") - modified_data_graph_new_id = json.loads(modified_json_string) - build_langchain_object_with_caching(modified_data_graph_new_id) - - assert len(build_langchain_object_with_caching.cache) == 10 diff --git a/tests/test_cache_manager.py b/tests/test_cache_manager.py index 660512634..bbc94d9fb 100644 --- a/tests/test_cache_manager.py +++ b/tests/test_cache_manager.py @@ -2,81 +2,81 @@ from io import StringIO import pandas as pd import pytest -from langflow.services.cache.manager import CacheManager +from langflow.services.chat.cache import CacheService from PIL import Image @pytest.fixture -def cache_manager(): - return CacheManager() +def cache_service(): + return CacheService() -def test_cache_manager_attach_detach_notify(cache_manager): +def test_cache_service_attach_detach_notify(cache_service): observer_called = False def observer(): nonlocal observer_called observer_called = True - cache_manager.attach(observer) - cache_manager.notify() + cache_service.attach(observer) + cache_service.notify() assert observer_called observer_called = False - cache_manager.detach(observer) - cache_manager.notify() + cache_service.detach(observer) + cache_service.notify() assert not observer_called -def test_cache_manager_client_context(cache_manager): - with cache_manager.set_client_id("client1"): - cache_manager.add("foo", "bar", "string") - assert cache_manager.get("foo") == { +def test_cache_service_client_context(cache_service): + with cache_service.set_client_id("client1"): + cache_service.add("foo", "bar", "string") + assert cache_service.get("foo") == { "obj": "bar", "type": "string", "extension": "str", } - with cache_manager.set_client_id("client2"): - cache_manager.add("baz", "qux", "string") - assert cache_manager.get("baz") == { + with cache_service.set_client_id("client2"): + cache_service.add("baz", "qux", "string") + assert cache_service.get("baz") == { "obj": "qux", "type": "string", "extension": "str", } with pytest.raises(KeyError): - cache_manager.get("foo") + cache_service.get("foo") -def test_cache_manager_add_pandas(cache_manager): +def test_cache_service_add_pandas(cache_service): df = pd.DataFrame({"col1": [1, 2], "col2": [3, 4]}) - with cache_manager.set_client_id("client1"): - cache_manager.add_pandas("test_df", df) - cached_df = cache_manager.get("test_df") + with cache_service.set_client_id("client1"): + cache_service.add_pandas("test_df", df) + cached_df = cache_service.get("test_df") assert cached_df["type"] == "pandas" assert cached_df["extension"] == "csv" read_df = pd.read_csv(StringIO(cached_df["obj"]), index_col=0) pd.testing.assert_frame_equal(df, read_df) -def test_cache_manager_add_image(cache_manager): +def test_cache_service_add_image(cache_service): img = Image.new("RGB", (50, 50), color="red") - with cache_manager.set_client_id("client1"): - cache_manager.add_image("test_image", img) - cached_img = cache_manager.get("test_image") + with cache_service.set_client_id("client1"): + cache_service.add_image("test_image", img) + cached_img = cache_service.get("test_image") assert cached_img["type"] == "image" assert cached_img["extension"] == "png" assert isinstance(cached_img["obj"], Image.Image) -def test_cache_manager_get_last(cache_manager): - with cache_manager.set_client_id("client1"): - cache_manager.add("foo", "bar", "string") - cache_manager.add("baz", "qux", "string") - last_item = cache_manager.get_last() +def test_cache_service_get_last(cache_service): + with cache_service.set_client_id("client1"): + cache_service.add("foo", "bar", "string") + cache_service.add("baz", "qux", "string") + last_item = cache_service.get_last() assert last_item == {"obj": "qux", "type": "string", "extension": "str"} diff --git a/tests/test_chains_template.py b/tests/test_chains_template.py index 4339dbe3b..eb20a0571 100644 --- a/tests/test_chains_template.py +++ b/tests/test_chains_template.py @@ -1,8 +1,8 @@ from fastapi.testclient import TestClient -# def test_chains_settings(client: TestClient): -# response = client.get("api/v1/all") +# def test_chains_settings(client: TestClient, logged_in_headers): +# response = client.get("api/v1/all", headers=logged_in_headers) # assert response.status_code == 200 # json_response = response.json() # chains = json_response["chains"] @@ -10,8 +10,8 @@ from fastapi.testclient import TestClient # Test the ConversationChain object -def test_conversation_chain(client: TestClient): - response = client.get("api/v1/all") +def test_conversation_chain(client: TestClient, logged_in_headers): + response = client.get("api/v1/all", headers=logged_in_headers) assert response.status_code == 200 json_response = response.json() chains = json_response["chains"] @@ -102,8 +102,8 @@ def test_conversation_chain(client: TestClient): ) -def test_llm_chain(client: TestClient): - response = client.get("api/v1/all") +def test_llm_chain(client: TestClient, logged_in_headers): + response = client.get("api/v1/all", headers=logged_in_headers) assert response.status_code == 200 json_response = response.json() chains = json_response["chains"] @@ -173,8 +173,8 @@ def test_llm_chain(client: TestClient): } -def test_llm_checker_chain(client: TestClient): - response = client.get("api/v1/all") +def test_llm_checker_chain(client: TestClient, logged_in_headers): + response = client.get("api/v1/all", headers=logged_in_headers) assert response.status_code == 200 json_response = response.json() chains = json_response["chains"] @@ -207,8 +207,8 @@ def test_llm_checker_chain(client: TestClient): assert chain["description"] == "" -def test_llm_math_chain(client: TestClient): - response = client.get("api/v1/all") +def test_llm_math_chain(client: TestClient, logged_in_headers): + response = client.get("api/v1/all", headers=logged_in_headers) assert response.status_code == 200 json_response = response.json() chains = json_response["chains"] @@ -299,8 +299,8 @@ def test_llm_math_chain(client: TestClient): ) -def test_series_character_chain(client: TestClient): - response = client.get("api/v1/all") +def test_series_character_chain(client: TestClient, logged_in_headers): + response = client.get("api/v1/all", headers=logged_in_headers) assert response.status_code == 200 json_response = response.json() chains = json_response["chains"] @@ -367,8 +367,8 @@ def test_series_character_chain(client: TestClient): ) -def test_mid_journey_prompt_chain(client: TestClient): - response = client.get("api/v1/all") +def test_mid_journey_prompt_chain(client: TestClient, logged_in_headers): + response = client.get("api/v1/all", headers=logged_in_headers) assert response.status_code == 200 json_response = response.json() chains = json_response["chains"] @@ -408,8 +408,8 @@ def test_mid_journey_prompt_chain(client: TestClient): ) -def test_time_travel_guide_chain(client: TestClient): - response = client.get("api/v1/all") +def test_time_travel_guide_chain(client: TestClient, logged_in_headers): + response = client.get("api/v1/all", headers=logged_in_headers) assert response.status_code == 200 json_response = response.json() chains = json_response["chains"] diff --git a/tests/test_cli.py b/tests/test_cli.py index c990ef9e8..604c313f8 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -3,7 +3,7 @@ from tempfile import tempdir from langflow.__main__ import app import pytest -from langflow.services import utils +from langflow.services import getters @pytest.fixture(scope="module") @@ -23,8 +23,19 @@ def test_components_path(runner, client, default_settings): result = runner.invoke( app, - ["--components-path", str(temp_dir), *default_settings], + ["run", "--components-path", str(temp_dir), *default_settings], ) assert result.exit_code == 0, result.stdout +<<<<<<< HEAD settings_manager = utils.get_settings_manager() assert str(temp_dir) in settings_manager.settings.COMPONENTS_PATH +======= + settings_service = getters.get_settings_service() + assert str(temp_dir) in settings_service.settings.COMPONENTS_PATH + + +def test_superuser(runner, client, session): + result = runner.invoke(app, ["superuser"], input="admin\nadmin\n") + assert result.exit_code == 0, result.stdout + assert "Superuser created successfully." in result.stdout +>>>>>>> origin/dev diff --git a/tests/test_creators.py b/tests/test_creators.py index 2098e87cd..177dd4105 100644 --- a/tests/test_creators.py +++ b/tests/test_creators.py @@ -32,6 +32,7 @@ def sample_agent_creator() -> AgentCreator: def test_lang_chain_type_creator_to_dict( + client, sample_lang_chain_type_creator: LangChainTypeCreator, ): type_dict = sample_lang_chain_type_creator.to_dict() diff --git a/tests/test_custom_component.py b/tests/test_custom_component.py index 4a4dcb910..da4f845b1 100644 --- a/tests/test_custom_component.py +++ b/tests/test_custom_component.py @@ -473,15 +473,16 @@ def test_build_config_no_code(): @pytest.fixture -def component(): +def component(client, active_user): return CustomComponent( + user_id=active_user.id, field_config={ "fields": { "llm": {"type": "str"}, "url": {"type": "str"}, "year": {"type": "int"}, } - } + }, ) @@ -517,13 +518,13 @@ def db(app): app.db.drop_all() -def test_list_flows_return_type(component, session_getter): - flows = component.list_flows(get_session=session_getter) +def test_list_flows_return_type(component): + flows = component.list_flows() assert isinstance(flows, list) -def test_list_flows_flow_objects(component, session_getter): - flows = component.list_flows(get_session=session_getter) +def test_list_flows_flow_objects(component): + flows = component.list_flows() assert all(isinstance(flow, Flow) for flow in flows) diff --git a/tests/test_database.py b/tests/test_database.py index 52a5daa4c..21f0cec17 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -1,8 +1,11 @@ -import json +from langflow.services.database.models.base import orjson_dumps +from langflow.services.database.utils import session_getter +from langflow.services.getters import get_db_service +import orjson import pytest from uuid import UUID, uuid4 -from sqlalchemy.orm import Session +from sqlmodel import Session from fastapi.testclient import TestClient @@ -16,7 +19,7 @@ def json_style(): # color: str = Field(index=True) # emoji: str = Field(index=False) # flow_id: UUID = Field(default=None, foreign_key="flow.id") - return json.dumps( + return orjson_dumps( { "color": "red", "emoji": "๐Ÿ‘", @@ -24,63 +27,69 @@ def json_style(): ) -def test_create_flow(client: TestClient, json_flow: str): - flow = json.loads(json_flow) +def test_create_flow( + client: TestClient, json_flow: str, active_user, logged_in_headers +): + flow = orjson.loads(json_flow) data = flow["data"] flow = FlowCreate(name="Test Flow", description="description", data=data) - response = client.post("api/v1/flows/", json=flow.dict()) + response = client.post("api/v1/flows/", json=flow.dict(), headers=logged_in_headers) assert response.status_code == 201 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data # flow is optional so we can create a flow without a flow flow = FlowCreate(name="Test Flow") - response = client.post("api/v1/flows/", json=flow.dict(exclude_unset=True)) + response = client.post( + "api/v1/flows/", json=flow.dict(exclude_unset=True), headers=logged_in_headers + ) assert response.status_code == 201 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data -def test_read_flows(client: TestClient, json_flow: str): - flow_data = json.loads(json_flow) +def test_read_flows(client: TestClient, json_flow: str, active_user, logged_in_headers): + flow_data = orjson.loads(json_flow) data = flow_data["data"] flow = FlowCreate(name="Test Flow", description="description", data=data) - response = client.post("api/v1/flows/", json=flow.dict()) + response = client.post("api/v1/flows/", json=flow.dict(), headers=logged_in_headers) assert response.status_code == 201 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data flow = FlowCreate(name="Test Flow", description="description", data=data) - response = client.post("api/v1/flows/", json=flow.dict()) + response = client.post("api/v1/flows/", json=flow.dict(), headers=logged_in_headers) assert response.status_code == 201 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data - response = client.get("api/v1/flows/") + response = client.get("api/v1/flows/", headers=logged_in_headers) assert response.status_code == 200 assert len(response.json()) > 0 -def test_read_flow(client: TestClient, json_flow: str): - flow = json.loads(json_flow) +def test_read_flow(client: TestClient, json_flow: str, active_user, logged_in_headers): + flow = orjson.loads(json_flow) data = flow["data"] flow = FlowCreate(name="Test Flow", description="description", data=data) - response = client.post("api/v1/flows/", json=flow.dict()) + response = client.post("api/v1/flows/", json=flow.dict(), headers=logged_in_headers) flow_id = response.json()["id"] # flow_id should be a UUID but is a string # turn it into a UUID flow_id = UUID(flow_id) - response = client.get(f"api/v1/flows/{flow_id}") + response = client.get(f"api/v1/flows/{flow_id}", headers=logged_in_headers) assert response.status_code == 200 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data -def test_update_flow(client: TestClient, json_flow: str): - flow = json.loads(json_flow) +def test_update_flow( + client: TestClient, json_flow: str, active_user, logged_in_headers +): + flow = orjson.loads(json_flow) data = flow["data"] flow = FlowCreate(name="Test Flow", description="description", data=data) - response = client.post("api/v1/flows/", json=flow.dict()) + response = client.post("api/v1/flows/", json=flow.dict(), headers=logged_in_headers) flow_id = response.json()["id"] updated_flow = FlowUpdate( @@ -88,7 +97,9 @@ def test_update_flow(client: TestClient, json_flow: str): description="updated description", data=data, ) - response = client.patch(f"api/v1/flows/{flow_id}", json=updated_flow.dict()) + response = client.patch( + f"api/v1/flows/{flow_id}", json=updated_flow.dict(), headers=logged_in_headers + ) assert response.status_code == 200 assert response.json()["name"] == updated_flow.name @@ -96,19 +107,23 @@ def test_update_flow(client: TestClient, json_flow: str): # assert response.json()["data"] == updated_flow.data -def test_delete_flow(client: TestClient, json_flow: str): - flow = json.loads(json_flow) +def test_delete_flow( + client: TestClient, json_flow: str, active_user, logged_in_headers +): + flow = orjson.loads(json_flow) data = flow["data"] flow = FlowCreate(name="Test Flow", description="description", data=data) - response = client.post("api/v1/flows/", json=flow.dict()) + response = client.post("api/v1/flows/", json=flow.dict(), headers=logged_in_headers) flow_id = response.json()["id"] - response = client.delete(f"api/v1/flows/{flow_id}") + response = client.delete(f"api/v1/flows/{flow_id}", headers=logged_in_headers) assert response.status_code == 200 assert response.json()["message"] == "Flow deleted successfully" -def test_create_flows(client: TestClient, session: Session, json_flow: str): - flow = json.loads(json_flow) +def test_create_flows( + client: TestClient, session: Session, json_flow: str, logged_in_headers +): + flow = orjson.loads(json_flow) data = flow["data"] # Create test data flow_list = FlowListCreate( @@ -118,7 +133,9 @@ def test_create_flows(client: TestClient, session: Session, json_flow: str): ] ) # Make request to endpoint - response = client.post("api/v1/flows/batch/", json=flow_list.dict()) + response = client.post( + "api/v1/flows/batch/", json=flow_list.dict(), headers=logged_in_headers + ) # Check response status code assert response.status_code == 201 # Check response data @@ -132,8 +149,10 @@ def test_create_flows(client: TestClient, session: Session, json_flow: str): assert response_data[1]["data"] == data -def test_upload_file(client: TestClient, session: Session, json_flow: str): - flow = json.loads(json_flow) +def test_upload_file( + client: TestClient, session: Session, json_flow: str, logged_in_headers +): + flow = orjson.loads(json_flow) data = flow["data"] # Create test data flow_list = FlowListCreate( @@ -142,10 +161,11 @@ def test_upload_file(client: TestClient, session: Session, json_flow: str): FlowCreate(name="Flow 2", description="description", data=data), ] ) - file_contents = json.dumps(flow_list.dict()) + file_contents = orjson_dumps(flow_list.dict()) response = client.post( "api/v1/flows/upload/", files={"file": ("examples.json", file_contents, "application/json")}, + headers=logged_in_headers, ) # Check response status code assert response.status_code == 201 @@ -160,8 +180,14 @@ def test_upload_file(client: TestClient, session: Session, json_flow: str): assert response_data[1]["data"] == data -def test_download_file(client: TestClient, session: Session, json_flow): - flow = json.loads(json_flow) +def test_download_file( + client: TestClient, + session: Session, + json_flow, + active_user, + logged_in_headers, +): + flow = orjson.loads(json_flow) data = flow["data"] # Create test data flow_list = FlowListCreate( @@ -170,17 +196,20 @@ def test_download_file(client: TestClient, session: Session, json_flow): FlowCreate(name="Flow 2", description="description", data=data), ] ) - for flow in flow_list.flows: - db_flow = Flow.from_orm(flow) - session.add(db_flow) - session.commit() + db_manager = get_db_service() + with session_getter(db_manager) as session: + for flow in flow_list.flows: + flow.user_id = active_user.id + db_flow = Flow.from_orm(flow) + session.add(db_flow) + session.commit() # Make request to endpoint - response = client.get("api/v1/flows/download/") + response = client.get("api/v1/flows/download/", headers=logged_in_headers) # Check response status code - assert response.status_code == 200 + assert response.status_code == 200, response.json() # Check response data response_data = response.json()["flows"] - assert len(response_data) == 2 + assert len(response_data) == 2, response_data assert response_data[0]["name"] == "Flow 1" assert response_data[0]["description"] == "description" assert response_data[0]["data"] == data @@ -189,32 +218,44 @@ def test_download_file(client: TestClient, session: Session, json_flow): assert response_data[1]["data"] == data -def test_create_flow_with_invalid_data(client: TestClient): +def test_create_flow_with_invalid_data( + client: TestClient, active_user, logged_in_headers +): flow = {"name": "a" * 256, "data": "Invalid flow data"} - response = client.post("api/v1/flows/", json=flow) + response = client.post("api/v1/flows/", json=flow, headers=logged_in_headers) assert response.status_code == 422 -def test_get_nonexistent_flow(client: TestClient): +def test_get_nonexistent_flow(client: TestClient, active_user, logged_in_headers): uuid = uuid4() - response = client.get(f"api/v1/flows/{uuid}") + response = client.get(f"api/v1/flows/{uuid}", headers=logged_in_headers) assert response.status_code == 404 -def test_update_flow_idempotency(client: TestClient, json_flow: str): - flow_data = json.loads(json_flow) +def test_update_flow_idempotency( + client: TestClient, json_flow: str, active_user, logged_in_headers +): + flow_data = orjson.loads(json_flow) data = flow_data["data"] flow_data = FlowCreate(name="Test Flow", description="description", data=data) - response = client.post("api/v1/flows/", json=flow_data.dict()) + response = client.post( + "api/v1/flows/", json=flow_data.dict(), headers=logged_in_headers + ) flow_id = response.json()["id"] updated_flow = FlowCreate(name="Updated Flow", description="description", data=data) - response1 = client.put(f"api/v1/flows/{flow_id}", json=updated_flow.dict()) - response2 = client.put(f"api/v1/flows/{flow_id}", json=updated_flow.dict()) + response1 = client.put( + f"api/v1/flows/{flow_id}", json=updated_flow.dict(), headers=logged_in_headers + ) + response2 = client.put( + f"api/v1/flows/{flow_id}", json=updated_flow.dict(), headers=logged_in_headers + ) assert response1.json() == response2.json() -def test_update_nonexistent_flow(client: TestClient, json_flow: str): - flow_data = json.loads(json_flow) +def test_update_nonexistent_flow( + client: TestClient, json_flow: str, active_user, logged_in_headers +): + flow_data = orjson.loads(json_flow) data = flow_data["data"] uuid = uuid4() updated_flow = FlowCreate( @@ -222,17 +263,19 @@ def test_update_nonexistent_flow(client: TestClient, json_flow: str): description="description", data=data, ) - response = client.patch(f"api/v1/flows/{uuid}", json=updated_flow.dict()) + response = client.patch( + f"api/v1/flows/{uuid}", json=updated_flow.dict(), headers=logged_in_headers + ) assert response.status_code == 404 -def test_delete_nonexistent_flow(client: TestClient): +def test_delete_nonexistent_flow(client: TestClient, active_user, logged_in_headers): uuid = uuid4() - response = client.delete(f"api/v1/flows/{uuid}") + response = client.delete(f"api/v1/flows/{uuid}", headers=logged_in_headers) assert response.status_code == 404 -def test_read_empty_flows(client: TestClient): - response = client.get("api/v1/flows/") +def test_read_empty_flows(client: TestClient, active_user, logged_in_headers): + response = client.get("api/v1/flows/", headers=logged_in_headers) assert response.status_code == 200 assert len(response.json()) == 0 diff --git a/tests/test_endpoints.py b/tests/test_endpoints.py index 045af1ba5..1de7c9deb 100644 --- a/tests/test_endpoints.py +++ b/tests/test_endpoints.py @@ -1,8 +1,44 @@ +from collections import namedtuple +import uuid +from langflow.processing.process import Result +from langflow.services.auth.utils import get_password_hash +from langflow.services.database.models.api_key.api_key import ApiKey +from langflow.services.getters import get_settings_service +from langflow.services.database.utils import session_getter +from langflow.services.getters import get_db_service import pytest from fastapi.testclient import TestClient from langflow.interface.tools.constants import CUSTOM_TOOLS from langflow.template.frontend_node.chains import TimeTravelGuideChainNode +import time + + +def run_post(client, flow_id, headers, post_data): + response = client.post( + f"api/v1/process/{flow_id}", + headers=headers, + json=post_data, + ) + assert response.status_code == 200, response.json() + return response.json() + + +# Helper function to poll task status +def poll_task_status(client, headers, href, max_attempts=20, sleep_time=1): + for _ in range(max_attempts): + task_status_response = client.get( + href, + headers=headers, + ) + if ( + task_status_response.status_code == 200 + and task_status_response.json()["status"] == "SUCCESS" + ): + return task_status_response.json() + time.sleep(sleep_time) + return None # Return None if task did not complete in time + PROMPT_REQUEST = { "name": "string", @@ -83,8 +119,186 @@ PROMPT_REQUEST = { } -def test_get_all(client: TestClient): - response = client.get("api/v1/all") +@pytest.fixture +def created_api_key(active_user): + hashed = get_password_hash("random_key") + api_key = ApiKey( + name="test_api_key", + user_id=active_user.id, + api_key="random_key", + hashed_api_key=hashed, + ) + db_manager = get_db_service() + with session_getter(db_manager) as session: + if ( + existing_api_key := session.query(ApiKey) + .filter(ApiKey.api_key == api_key.api_key) + .first() + ): + return existing_api_key + session.add(api_key) + session.commit() + session.refresh(api_key) + return api_key + + +def test_process_flow_invalid_api_key(client, flow, monkeypatch): + # Mock de process_graph_cached + from langflow.api.v1 import endpoints + from langflow.services.database.models.api_key import crud + + settings_service = get_settings_service() + settings_service.auth_settings.AUTO_LOGIN = False + + async def mock_process_graph_cached(*args, **kwargs): + return Result(result={}, session_id="session_id_mock") + + def mock_update_total_uses(*args, **kwargs): + return created_api_key + + monkeypatch.setattr(endpoints, "process_graph_cached", mock_process_graph_cached) + monkeypatch.setattr(crud, "update_total_uses", mock_update_total_uses) + + headers = {"x-api-key": "invalid_api_key"} + + post_data = { + "inputs": {"key": "value"}, + "tweaks": None, + "clear_cache": False, + "session_id": None, + } + + response = client.post(f"api/v1/process/{flow.id}", headers=headers, json=post_data) + + assert response.status_code == 403 + assert response.json() == {"detail": "Invalid or missing API key"} + + +def test_process_flow_invalid_id(client, monkeypatch, created_api_key): + async def mock_process_graph_cached(*args, **kwargs): + return Result(result={}, session_id="session_id_mock") + + from langflow.api.v1 import endpoints + + monkeypatch.setattr(endpoints, "process_graph_cached", mock_process_graph_cached) + + api_key = created_api_key.api_key + headers = {"x-api-key": api_key} + + post_data = { + "inputs": {"key": "value"}, + "tweaks": None, + "clear_cache": False, + "session_id": None, + } + + invalid_id = uuid.uuid4() + response = client.post( + f"api/v1/process/{invalid_id}", headers=headers, json=post_data + ) + + assert response.status_code == 404 + assert f"Flow {invalid_id} not found" in response.json()["detail"] + + +def test_process_flow_without_autologin(client, flow, monkeypatch, created_api_key): + # Mock de process_graph_cached + from langflow.api.v1 import endpoints + from langflow.services.database.models.api_key import crud + + settings_service = get_settings_service() + settings_service.auth_settings.AUTO_LOGIN = False + + async def mock_process_graph_cached(*args, **kwargs): + return Result(result={}, session_id="session_id_mock") + + def mock_process_graph_cached_task(*args, **kwargs): + return Result(result={}, session_id="session_id_mock") + + # The task function is ran like this: + # if not self.use_celery: + # return None, await task_func(*args, **kwargs) + # if not hasattr(task_func, "apply"): + # raise ValueError(f"Task function {task_func} does not have an apply method") + # task = task_func.apply(args=args, kwargs=kwargs) + # result = task.get() + # return task.id, result + # So we need to mock the task function to return a task object + # and then mock the task object to return a result + # maybe a named tuple would be better here + task = namedtuple("task", ["id", "get"]) + mock_process_graph_cached_task.apply = lambda *args, **kwargs: task( + id="task_id_mock", get=lambda: Result(result={}, session_id="session_id_mock") + ) + + def mock_update_total_uses(*args, **kwargs): + return created_api_key + + monkeypatch.setattr(endpoints, "process_graph_cached", mock_process_graph_cached) + monkeypatch.setattr(crud, "update_total_uses", mock_update_total_uses) + monkeypatch.setattr( + endpoints, "process_graph_cached_task", mock_process_graph_cached_task + ) + + api_key = created_api_key.api_key + headers = {"x-api-key": api_key} + + # Dummy POST data + post_data = { + "inputs": {"input": "value"}, + "tweaks": None, + "clear_cache": False, + "session_id": None, + } + + # Make the request to the FastAPI TestClient + + response = client.post(f"api/v1/process/{flow.id}", headers=headers, json=post_data) + + # Check the response + assert response.status_code == 200, response.json() + assert response.json()["result"] == {}, response.json() + assert response.json()["session_id"] == "session_id_mock", response.json() + + +def test_process_flow_fails_autologin_off(client, flow, monkeypatch): + # Mock de process_graph_cached + from langflow.api.v1 import endpoints + from langflow.services.database.models.api_key import crud + + settings_service = get_settings_service() + settings_service.auth_settings.AUTO_LOGIN = False + + async def mock_process_graph_cached(*args, **kwargs): + return Result(result={}, session_id="session_id_mock") + + async def mock_update_total_uses(*args, **kwargs): + return created_api_key + + monkeypatch.setattr(endpoints, "process_graph_cached", mock_process_graph_cached) + monkeypatch.setattr(crud, "update_total_uses", mock_update_total_uses) + + headers = {"x-api-key": "api_key"} + + # Dummy POST data + post_data = { + "inputs": {"key": "value"}, + "tweaks": None, + "clear_cache": False, + "session_id": None, + } + + # Make the request to the FastAPI TestClient + + response = client.post(f"api/v1/process/{flow.id}", headers=headers, json=post_data) + + # Check the response + assert response.status_code == 403, response.json() + assert response.json() == {"detail": "Invalid or missing API key"} + + +def test_get_all(client: TestClient, logged_in_headers): + response = client.get("api/v1/all", headers=logged_in_headers) assert response.status_code == 200 json_response = response.json() # We need to test the custom nodes @@ -206,3 +420,180 @@ def test_various_prompts(client, prompt, expected_input_variables): response = client.post("api/v1/validate/prompt", json=PROMPT_REQUEST) assert response.status_code == 200 assert response.json()["input_variables"] == expected_input_variables + + +def test_basic_chat_in_process(client, added_flow, created_api_key): + # Run the /api/v1/process/{flow_id} endpoint + headers = {"x-api-key": created_api_key.api_key} + post_data = {"inputs": {"text": "Hi, My name is Gabriel"}} + response = client.post( + f"api/v1/process/{added_flow.get('id')}", + headers=headers, + json=post_data, + ) + assert response.status_code == 200, response.json() + # Check the response + assert "Gabriel" in response.json()["result"]["text"] + # session_id should be returned + assert "session_id" in response.json() + assert response.json()["session_id"] is not None + # New request with the same session_id + # asking "What is my name?" should return "Gabriel" + post_data = { + "inputs": {"text": "What is my name?"}, + "session_id": response.json()["session_id"], + } + response = client.post( + f"api/v1/process/{added_flow.get('id')}", + headers=headers, + json=post_data, + ) + assert response.status_code == 200, response.json() + assert "Gabriel" in response.json()["result"]["text"] + + +def test_basic_chat_different_session_ids(client, added_flow, created_api_key): + # Run the /api/v1/process/{flow_id} endpoint + headers = {"x-api-key": created_api_key.api_key} + post_data = {"inputs": {"text": "Hi, My name is Gabriel"}} + response = client.post( + f"api/v1/process/{added_flow.get('id')}", + headers=headers, + json=post_data, + ) + assert response.status_code == 200, response.json() + # Check the response + assert "Gabriel" in response.json()["result"]["text"] + # session_id should be returned + assert "session_id" in response.json() + assert response.json()["session_id"] is not None + session_id1 = response.json()["session_id"] + # New request with a different session_id + # asking "What is my name?" should return "Gabriel" + post_data = { + "inputs": {"text": "What is my name?"}, + } + response = client.post( + f"api/v1/process/{added_flow.get('id')}", + headers=headers, + json=post_data, + ) + assert response.status_code == 200, response.json() + assert "Gabriel" not in response.json()["result"]["text"] + assert session_id1 != response.json()["session_id"] + + +def test_basic_chat_with_two_session_ids_and_names(client, added_flow, created_api_key): + headers = {"x-api-key": created_api_key.api_key} + flow_id = added_flow.get("id") + names = ["Gabriel", "John"] + session_ids = [] + + for name in names: + post_data = {"inputs": {"text": f"Hi, My name is {name}"}} + response_json = run_post(client, flow_id, headers, post_data) + + assert name in response_json["result"]["text"] + assert "session_id" in response_json + assert response_json["session_id"] is not None + + session_ids.append(response_json["session_id"]) + + for i, name in enumerate(names): + post_data = { + "inputs": {"text": "What is my name?"}, + "session_id": session_ids[i], + } + response_json = run_post(client, flow_id, headers, post_data) + + assert name in response_json["result"]["text"] + + +@pytest.mark.async_test +def test_vector_store_in_process( + distributed_client, added_vector_store, created_api_key +): + # Run the /api/v1/process/{flow_id} endpoint + headers = {"x-api-key": created_api_key.api_key} + post_data = {"inputs": {"input": "What is Langflow?"}} + response = distributed_client.post( + f"api/v1/process/{added_vector_store.get('id')}", + headers=headers, + json=post_data, + ) + assert response.status_code == 200, response.json() + # Check the response + assert "Langflow" in response.json()["result"]["output"] + # session_id should be returned + assert "session_id" in response.json() + assert response.json()["session_id"] is not None + + +# Test function without loop +@pytest.mark.async_test +def test_async_task_processing(distributed_client, added_flow, created_api_key): + headers = {"x-api-key": created_api_key.api_key} + post_data = {"inputs": {"text": "Hi, My name is Gabriel"}} + + # Run the /api/v1/process/{flow_id} endpoint with sync=False + response = distributed_client.post( + f"api/v1/process/{added_flow.get('id')}", + headers=headers, + json={**post_data, "sync": False}, + ) + assert response.status_code == 200, response.json() + + # Extract the task ID from the response + task = response.json().get("task") + task_id = task.get("id") + task_href = task.get("href") + assert task_id is not None + assert task_href is not None + assert task_href == f"api/v1/task/{task_id}" + + # Polling the task status using the helper function + task_status_json = poll_task_status(distributed_client, headers, task_href) + assert task_status_json is not None, "Task did not complete in time" + + # Validate that the task completed successfully and the result is as expected + assert "result" in task_status_json, task_status_json + assert "text" in task_status_json["result"], task_status_json["result"] + assert "Gabriel" in task_status_json["result"]["text"], task_status_json["result"] + + +# Test function without loop +@pytest.mark.async_test +def test_async_task_processing_vector_store( + client, added_vector_store, created_api_key +): + headers = {"x-api-key": created_api_key.api_key} + post_data = {"inputs": {"input": "How do I upload examples?"}} + + # Run the /api/v1/process/{flow_id} endpoint with sync=False + response = client.post( + f"api/v1/process/{added_vector_store.get('id')}", + headers=headers, + json={**post_data, "sync": False}, + ) + assert response.status_code == 200, response.json() + assert "result" in response.json() + assert "FAILURE" not in response.json()["result"] + + # Extract the task ID from the response + task = response.json().get("task") + task_id = task.get("id") + task_href = task.get("href") + assert task_id is not None + assert task_href is not None + assert task_href == f"api/v1/task/{task_id}" + + # Polling the task status using the helper function + task_status_json = poll_task_status(client, headers, task_href) + assert task_status_json is not None, "Task did not complete in time" + + # Validate that the task completed successfully and the result is as expected + assert "result" in task_status_json, task_status_json + assert "output" in task_status_json["result"], task_status_json["result"] + assert "Langflow" in task_status_json["result"]["output"], task_status_json[ + "result" + ] diff --git a/tests/test_graph.py b/tests/test_graph.py index f3efe3614..fdb249eda 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -1,9 +1,11 @@ +import json import os from pathlib import Path +import pickle from typing import Type, Union from langflow.graph.edge.base import Edge from langflow.graph.vertex.base import Vertex - +from langchain.agents import AgentExecutor import pytest from langchain.chains.base import Chain from langchain.llms.fake import FakeListLLM @@ -185,7 +187,7 @@ def test_build_edges(basic_graph): assert isinstance(edge.target, Vertex) -def test_get_root_node(basic_graph, complex_graph): +def test_get_root_node(client, basic_graph, complex_graph): """Test getting root node""" assert isinstance(basic_graph, Graph) root = get_root_node(basic_graph) @@ -261,7 +263,7 @@ def test_llm_node_build(basic_graph): assert built_object is not None -def test_toolkit_node_build(openapi_graph): +def test_toolkit_node_build(client, openapi_graph): # Write a file to the disk file_path = "api-with-examples.yaml" with open(file_path, "w") as f: @@ -276,7 +278,7 @@ def test_toolkit_node_build(openapi_graph): assert not Path(file_path).exists() -def test_file_tool_node_build(openapi_graph): +def test_file_tool_node_build(client, openapi_graph): file_path = "api-with-examples.yaml" with open(file_path, "w") as f: f.write("openapi: 3.0.0") @@ -318,3 +320,29 @@ def test_get_result_and_thought(basic_graph): # Get the result and thought result = get_result_and_thought(langchain_object, message) assert isinstance(result, dict) + + +def test_pickle_graph(json_vector_store): + loaded_json = json.loads(json_vector_store) + graph = Graph.from_payload(loaded_json) + assert isinstance(graph, Graph) + first_result = graph.build() + assert isinstance(first_result, AgentExecutor) + pickled = pickle.dumps(graph) + assert pickled is not None + unpickled = pickle.loads(pickled) + assert unpickled is not None + result = unpickled.build() + assert isinstance(result, AgentExecutor) + + +def test_pickle_each_vertex(json_vector_store): + loaded_json = json.loads(json_vector_store) + graph = Graph.from_payload(loaded_json) + assert isinstance(graph, Graph) + for vertex in graph.nodes: + vertex.build() + pickled = pickle.dumps(vertex) + assert pickled is not None + unpickled = pickle.loads(pickled) + assert unpickled is not None diff --git a/tests/test_llms_template.py b/tests/test_llms_template.py index 5c494766e..0a30a825e 100644 --- a/tests/test_llms_template.py +++ b/tests/test_llms_template.py @@ -1,110 +1,8 @@ from fastapi.testclient import TestClient -from langflow.services.utils import get_settings_manager -def test_llms_settings(client: TestClient): - settings_manager = get_settings_manager() - response = client.get("api/v1/all") - assert response.status_code == 200 - json_response = response.json() - llms = json_response["llms"] - assert set(llms.keys()) == set(settings_manager.settings.LLMS) - - -# def test_hugging_face_hub(client: TestClient): -# response = client.get("api/v1/all") -# assert response.status_code == 200 -# json_response = response.json() -# language_models = json_response["llms"] - -# model = language_models["HuggingFaceHub"] -# template = model["template"] - -# assert template["cache"] == { -# "required": False, -# "placeholder": "", -# "show": False, -# "multiline": False, -# "password": False, -# "name": "cache", -# "type": "bool", -# "list": False, -# "advanced": False, -# } -# assert template["verbose"] == { -# "required": False, -# "placeholder": "", -# "show": False, -# "multiline": False, -# "value": False, -# "password": False, -# "name": "verbose", -# "type": "bool", -# "list": False, -# "advanced": False, -# } -# assert template["client"] == { -# "required": False, -# "placeholder": "", -# "show": False, -# "multiline": False, -# "password": False, -# "name": "client", -# "type": "Any", -# "list": False, -# "advanced": False, -# } -# assert template["repo_id"] == { -# "required": False, -# "placeholder": "", -# "show": True, -# "multiline": False, -# "value": "gpt2", -# "password": False, -# "name": "repo_id", -# "type": "str", -# "list": False, -# "advanced": False, -# } -# assert template["task"] == { -# "required": True, -# "placeholder": "", -# "show": True, -# "multiline": False, -# "password": False, -# "options": ["text-generation", "text2text-generation"], -# "name": "task", -# "type": "str", -# "list": True, -# "advanced": True, -# } -# assert template["model_kwargs"] == { -# "required": False, -# "placeholder": "", -# "show": True, -# "multiline": False, -# "password": False, -# "name": "model_kwargs", -# "type": "code", -# "list": False, -# "advanced": True, -# } -# assert template["huggingfacehub_api_token"] == { -# "required": False, -# "placeholder": "", -# "show": True, -# "multiline": False, -# "password": True, -# "name": "huggingfacehub_api_token", -# "display_name": "HuggingFace Hub API Token", -# "type": "str", -# "list": False, -# "advanced": False, -# } - - -def test_openai(client: TestClient): - response = client.get("api/v1/all") +def test_openai(client: TestClient, logged_in_headers): + response = client.get("api/v1/all", headers=logged_in_headers) assert response.status_code == 200 json_response = response.json() language_models = json_response["llms"] @@ -279,7 +177,7 @@ def test_openai(client: TestClient): "multiline": False, "password": False, "name": "model_kwargs", - "type": "code", + "type": "dict", "list": False, "advanced": True, "info": "", @@ -334,7 +232,7 @@ def test_openai(client: TestClient): "multiline": False, "password": False, "name": "logit_bias", - "type": "code", + "type": "dict", "list": False, "advanced": False, "info": "", @@ -369,8 +267,8 @@ def test_openai(client: TestClient): } -def test_chat_open_ai(client: TestClient): - response = client.get("api/v1/all") +def test_chat_open_ai(client: TestClient, logged_in_headers): + response = client.get("api/v1/all", headers=logged_in_headers) assert response.status_code == 200 json_response = response.json() language_models = json_response["llms"] @@ -451,7 +349,7 @@ def test_chat_open_ai(client: TestClient): "multiline": False, "password": False, "name": "model_kwargs", - "type": "code", + "type": "dict", "list": False, "advanced": True, "info": "", diff --git a/tests/test_loading.py b/tests/test_loading.py index 11fa8e471..e5c409c93 100644 --- a/tests/test_loading.py +++ b/tests/test_loading.py @@ -1,5 +1,4 @@ import json - import pytest from langchain.chains.base import Chain from langflow.processing.process import load_flow_from_json diff --git a/tests/test_login.py b/tests/test_login.py new file mode 100644 index 000000000..f505f4100 --- /dev/null +++ b/tests/test_login.py @@ -0,0 +1,50 @@ +from langflow.services.database.utils import session_getter +from langflow.services.getters import get_db_service +import pytest +from langflow.services.database.models.user import User +from langflow.services.auth.utils import get_password_hash + + +@pytest.fixture +def test_user(): + return User( + username="testuser", + password=get_password_hash( + "testpassword" + ), # Assuming password needs to be hashed + is_active=True, + is_superuser=False, + ) + + +def test_login_successful(client, test_user): + # Adding the test user to the database + with session_getter(get_db_service()) as session: + session.add(test_user) + session.commit() + + response = client.post( + "api/v1/login", data={"username": "testuser", "password": "testpassword"} + ) + assert response.status_code == 200 + assert "access_token" in response.json() + + +def test_login_unsuccessful_wrong_username(client): + response = client.post( + "api/v1/login", data={"username": "wrongusername", "password": "testpassword"} + ) + assert response.status_code == 401 + assert response.json()["detail"] == "Incorrect username or password" + + +def test_login_unsuccessful_wrong_password(client, test_user, session): + # Adding the test user to the database + session.add(test_user) + session.commit() + + response = client.post( + "api/v1/login", data={"username": "testuser", "password": "wrongpassword"} + ) + assert response.status_code == 401 + assert response.json()["detail"] == "Incorrect username or password" diff --git a/tests/test_process.py b/tests/test_process.py index a0d91b5df..0588800dc 100644 --- a/tests/test_process.py +++ b/tests/test_process.py @@ -1,5 +1,5 @@ -from langflow.interface.run import build_sorted_vertices_with_caching -from langflow.processing.process import load_langchain_object, process_tweaks +from langflow.processing.process import process_tweaks +from langflow.services.getters import get_session_service def test_no_tweaks(): @@ -198,51 +198,38 @@ def test_tweak_not_in_template(): def test_load_langchain_object_with_cached_session(client, basic_graph_data): - # Build the langchain_object once and get the session_id - langchain_object1, artifacts1, session_id1 = load_langchain_object( - basic_graph_data, None - ) - # Use the same session_id to get the langchain_object again - langchain_object2, artifacts2, session_id2 = load_langchain_object( - basic_graph_data, session_id1 - ) + # Provide a non-existent session_id + session_service = get_session_service() + session_id1 = "non-existent-session-id" + graph1, artifacts1 = session_service.load_session(session_id1, basic_graph_data) + # Use the new session_id to get the langchain_object again + graph2, artifacts2 = session_service.load_session(session_id1, basic_graph_data) - assert session_id1 == session_id2 - assert id(langchain_object1) == id(langchain_object2) + assert graph1 == graph2 assert artifacts1 == artifacts2 def test_load_langchain_object_with_no_cached_session(client, basic_graph_data): # Provide a non-existent session_id - langchain_object1, artifacts1, session_id1 = load_langchain_object( - basic_graph_data, "non_existent_session" - ) + session_service = get_session_service() + session_id1 = "non-existent-session-id" + session_id = session_service.build_key(session_id1, basic_graph_data) + graph1, artifacts1 = session_service.load_session(session_id, basic_graph_data) # Clear the cache - build_sorted_vertices_with_caching.clear_cache() + session_service.clear_session(session_id) # Use the new session_id to get the langchain_object again - langchain_object2, artifacts2, session_id2 = load_langchain_object( - basic_graph_data, session_id1 - ) + graph2, artifacts2 = session_service.load_session(session_id, basic_graph_data) - assert session_id1 == session_id2 - assert id(langchain_object1) != id( - langchain_object2 - ) # Since the cache was cleared, objects should be different + assert id(graph1) != id(graph2) + # Since the cache was cleared, objects should be different def test_load_langchain_object_without_session_id(client, basic_graph_data): - # Build the langchain_object without providing a session_id - langchain_object1, artifacts1, session_id1 = load_langchain_object( - basic_graph_data, None - ) - # Build the langchain_object again without providing a session_id - langchain_object2, artifacts2, session_id2 = load_langchain_object( - basic_graph_data, None - ) + # Provide a non-existent session_id + session_service = get_session_service() + session_id1 = None + graph1, artifacts1 = session_service.load_session(session_id1, basic_graph_data) + # Use the new session_id to get the langchain_object again + graph2, artifacts2 = session_service.load_session(session_id1, basic_graph_data) - assert session_id1 == session_id2 - - assert id(langchain_object1) == id( - langchain_object2 - ) # Since no session_id was provided, the hash will be based on the graph_data - assert artifacts1 == artifacts2 + assert graph1 == graph2 diff --git a/tests/test_prompts_template.py b/tests/test_prompts_template.py index dde313c20..ae9c1b4f6 100644 --- a/tests/test_prompts_template.py +++ b/tests/test_prompts_template.py @@ -1,18 +1,18 @@ from fastapi.testclient import TestClient -from langflow.services.utils import get_settings_manager +from langflow.services.getters import get_settings_service -def test_prompts_settings(client: TestClient): - settings_manager = get_settings_manager() - response = client.get("api/v1/all") +def test_prompts_settings(client: TestClient, logged_in_headers): + settings_service = get_settings_service() + response = client.get("api/v1/all", headers=logged_in_headers) assert response.status_code == 200 json_response = response.json() prompts = json_response["prompts"] - assert set(prompts.keys()) == set(settings_manager.settings.PROMPTS) + assert set(prompts.keys()) == set(settings_service.settings.PROMPTS) -def test_prompt_template(client: TestClient): - response = client.get("api/v1/all") +def test_prompt_template(client: TestClient, logged_in_headers): + response = client.get("api/v1/all", headers=logged_in_headers) assert response.status_code == 200 json_response = response.json() prompts = json_response["prompts"] @@ -55,7 +55,7 @@ def test_prompt_template(client: TestClient): "multiline": False, "password": False, "name": "partial_variables", - "type": "code", + "type": "dict", "list": False, "advanced": False, "info": "", diff --git a/tests/test_setup_superuser.py b/tests/test_setup_superuser.py new file mode 100644 index 000000000..6cf4b3a3a --- /dev/null +++ b/tests/test_setup_superuser.py @@ -0,0 +1,143 @@ +from unittest.mock import patch, Mock, MagicMock, call +from langflow.services.database.models.user.user import User +from langflow.services.settings.constants import ( + DEFAULT_SUPERUSER, + DEFAULT_SUPERUSER_PASSWORD, +) +from langflow.services.utils import ( + setup_superuser, + teardown_superuser, +) + + +@patch("langflow.services.getters.get_settings_service") +@patch("langflow.services.utils.create_super_user") +@patch("langflow.services.getters.get_session") +def test_setup_superuser( + mock_get_session, mock_create_super_user, mock_get_settings_service +): + # Test when AUTO_LOGIN is True + calls = [] + mock_settings_service = Mock() + mock_settings_service.auth_settings.AUTO_LOGIN = True + mock_settings_service.auth_settings.SUPERUSER = DEFAULT_SUPERUSER + mock_settings_service.auth_settings.SUPERUSER_PASSWORD = DEFAULT_SUPERUSER_PASSWORD + mock_get_settings_service.return_value = mock_settings_service + mock_session = Mock() + mock_session.query.return_value.filter.return_value.first.return_value = ( + mock_session + ) + # return value of get_session is a generator + mock_get_session.return_value = iter([mock_session, mock_session, mock_session]) + setup_superuser(mock_settings_service, mock_session) + mock_session.query.assert_called_once_with(User) + actual_expr = mock_session.query.return_value.filter.call_args[0][0] + expected_expr = User.username == DEFAULT_SUPERUSER + + assert str(actual_expr) == str(expected_expr) + create_call = call( + db=mock_session, username=DEFAULT_SUPERUSER, password=DEFAULT_SUPERUSER_PASSWORD + ) + calls.append(create_call) + # mock_create_super_user.assert_has_calls(calls) + assert 1 == mock_create_super_user.call_count + + def reset_mock_credentials(): + mock_settings_service.auth_settings.SUPERUSER = DEFAULT_SUPERUSER + mock_settings_service.auth_settings.SUPERUSER_PASSWORD = ( + DEFAULT_SUPERUSER_PASSWORD + ) + + ADMIN_USER_NAME = "admin_user" + # Test when username and password are default + mock_settings_service.auth_settings = Mock() + mock_settings_service.auth_settings.AUTO_LOGIN = False + mock_settings_service.auth_settings.SUPERUSER = ADMIN_USER_NAME + mock_settings_service.auth_settings.SUPERUSER_PASSWORD = "password" + mock_settings_service.auth_settings.reset_credentials = Mock( + side_effect=reset_mock_credentials + ) + + mock_get_settings_service.return_value = mock_settings_service + + setup_superuser(mock_settings_service, mock_session) + mock_session.query.assert_called_with(User) + actual_expr = mock_session.query.return_value.filter.call_args[0][0] + expected_expr = User.username == ADMIN_USER_NAME + + assert str(actual_expr) == str(expected_expr) + create_call = call(db=mock_session, username=ADMIN_USER_NAME, password="password") + calls.append(create_call) + # mock_create_super_user.assert_has_calls(calls) + assert 2 == mock_create_super_user.call_count + # Test that superuser credentials are reset + mock_settings_service.auth_settings.reset_credentials.assert_called_once() + assert mock_settings_service.auth_settings.SUPERUSER != ADMIN_USER_NAME + assert mock_settings_service.auth_settings.SUPERUSER_PASSWORD != "password" + + # Test when superuser already exists + mock_settings_service.auth_settings.AUTO_LOGIN = False + mock_settings_service.auth_settings.SUPERUSER = ADMIN_USER_NAME + mock_settings_service.auth_settings.SUPERUSER_PASSWORD = "password" + mock_user = Mock() + mock_user.is_superuser = True + mock_session.query.return_value.filter.return_value.first.return_value = mock_user + setup_superuser(mock_settings_service, mock_session) + mock_session.query.assert_called_with(User) + actual_expr = mock_session.query.return_value.filter.call_args[0][0] + expected_expr = User.username == ADMIN_USER_NAME + + assert str(actual_expr) == str(expected_expr) + + +@patch("langflow.services.getters.get_settings_service") +@patch("langflow.services.getters.get_session") +def test_teardown_superuser_default_superuser( + mock_get_session, mock_get_settings_service +): + mock_settings_service = MagicMock() + mock_settings_service.auth_settings.AUTO_LOGIN = True + mock_settings_service.auth_settings.SUPERUSER = DEFAULT_SUPERUSER + mock_settings_service.auth_settings.SUPERUSER_PASSWORD = DEFAULT_SUPERUSER_PASSWORD + mock_get_settings_service.return_value = mock_settings_service + + mock_session = MagicMock() + mock_user = MagicMock() + mock_user.is_superuser = True + mock_session.query.return_value.filter.return_value.first.return_value = mock_user + mock_get_session.return_value = iter([mock_session]) + + teardown_superuser(mock_settings_service, mock_session) + + mock_session.query.assert_called_once_with(User) + actual_expr = mock_session.query.return_value.filter.call_args[0][0] + expected_expr = User.username == DEFAULT_SUPERUSER + + assert str(actual_expr) == str(expected_expr) + mock_session.delete.assert_called_once_with(mock_user) + mock_session.commit.assert_called_once() + + +@patch("langflow.services.getters.get_settings_service") +@patch("langflow.services.getters.get_session") +def test_teardown_superuser_no_default_superuser( + mock_get_session, mock_get_settings_service +): + ADMIN_USER_NAME = "admin_user" + mock_settings_service = MagicMock() + mock_settings_service.auth_settings.AUTO_LOGIN = False + mock_settings_service.auth_settings.SUPERUSER = ADMIN_USER_NAME + mock_settings_service.auth_settings.SUPERUSER_PASSWORD = "password" + mock_get_settings_service.return_value = mock_settings_service + + mock_session = MagicMock() + mock_user = MagicMock() + mock_user.is_superuser = False + mock_session.query.return_value.filter.return_value.first.return_value = mock_user + mock_get_session.return_value = [mock_session] + + teardown_superuser(mock_settings_service, mock_session) + + mock_session.query.assert_not_called() + mock_session.delete.assert_not_called() + mock_session.commit.assert_not_called() diff --git a/tests/test_template.py b/tests/test_template.py index 4be6dff06..81f2a6020 100644 --- a/tests/test_template.py +++ b/tests/test_template.py @@ -135,7 +135,7 @@ def test_format_dict(): } expected_output = { "field1": { - "type": "code", # Mapping type is replaced with dict which is replaced with code + "type": "dict[str, int]", # Mapping type is replaced with dict which is replaced with code "required": False, "list": False, "show": False, @@ -249,7 +249,7 @@ def test_format_dict(): } expected_output = { "field1": { - "type": "code", + "type": "Dict[str, int]", "required": False, "list": False, "show": False, diff --git a/tests/test_user.py b/tests/test_user.py new file mode 100644 index 000000000..49962c8d1 --- /dev/null +++ b/tests/test_user.py @@ -0,0 +1,249 @@ +from datetime import datetime +from langflow.services.auth.utils import create_super_user, get_password_hash + +from langflow.services.database.models.user.user import User +from langflow.services.database.utils import session_getter +from langflow.services.getters import get_db_service, get_settings_service +import pytest +from langflow.services.database.models.user import UserUpdate + + +@pytest.fixture +def super_user(client): + settings_manager = get_settings_service() + auth_settings = settings_manager.auth_settings + with session_getter(get_db_service()) as session: + return create_super_user( + db=session, + username=auth_settings.SUPERUSER, + password=auth_settings.SUPERUSER_PASSWORD, + ) + + +@pytest.fixture +def super_user_headers(client, super_user): + settings_service = get_settings_service() + auth_settings = settings_service.auth_settings + login_data = { + "username": auth_settings.SUPERUSER, + "password": auth_settings.SUPERUSER_PASSWORD, + } + response = client.post("/api/v1/login", data=login_data) + assert response.status_code == 200 + tokens = response.json() + a_token = tokens["access_token"] + return {"Authorization": f"Bearer {a_token}"} + + +@pytest.fixture +def deactivated_user(): + with session_getter(get_db_service()) as session: + user = User( + username="deactivateduser", + password=get_password_hash("testpassword"), + is_active=False, + is_superuser=False, + last_login_at=datetime.now(), + ) + session.add(user) + session.commit() + session.refresh(user) + return user + + +def test_user_waiting_for_approval( + client, +): + # Create a user that is not active and has never logged in + with session_getter(get_db_service()) as session: + user = User( + username="waitingforapproval", + password=get_password_hash("testpassword"), + is_active=False, + last_login_at=None, + ) + session.add(user) + session.commit() + + login_data = {"username": "waitingforapproval", "password": "testpassword"} + response = client.post("/api/v1/login", data=login_data) + assert response.status_code == 400 + assert response.json()["detail"] == "Waiting for approval" + + +def test_deactivated_user_cannot_login(client, deactivated_user): + login_data = {"username": deactivated_user.username, "password": "testpassword"} + response = client.post("/api/v1/login", data=login_data) + assert response.status_code == 400, response.json() + assert response.json()["detail"] == "Inactive user" + + +def test_deactivated_user_cannot_access(client, deactivated_user, logged_in_headers): + # Assuming the headers for deactivated_user + response = client.get("/api/v1/users", headers=logged_in_headers) + assert response.status_code == 400, response.json() + assert response.json()["detail"] == "The user doesn't have enough privileges" + + +def test_data_consistency_after_update( + client, active_user, logged_in_headers, super_user_headers +): + user_id = active_user.id + update_data = UserUpdate(is_active=False) + + response = client.patch( + f"/api/v1/users/{user_id}", json=update_data.dict(), headers=super_user_headers + ) + assert response.status_code == 200, response.json() + + # Fetch the updated user from the database + response = client.get("/api/v1/users/whoami", headers=logged_in_headers) + assert response.status_code == 401, response.json() + assert response.json()["detail"] == "Could not validate credentials" + + +def test_data_consistency_after_delete(client, test_user, super_user_headers): + user_id = test_user.get("id") + response = client.delete(f"/api/v1/users/{user_id}", headers=super_user_headers) + assert response.status_code == 200, response.json() + + # Attempt to fetch the deleted user from the database + response = client.get("/api/v1/users", headers=super_user_headers) + assert response.status_code == 200 + assert all(user["id"] != user_id for user in response.json()["users"]) + + +def test_inactive_user(client): + # Create a user that is not active and has a last_login_at value + with session_getter(get_db_service()) as session: + user = User( + username="inactiveuser", + password=get_password_hash("testpassword"), + is_active=False, + last_login_at="2023-01-01T00:00:00", # Set to a valid datetime string + ) + session.add(user) + session.commit() + + login_data = {"username": "inactiveuser", "password": "testpassword"} + response = client.post("/api/v1/login", data=login_data) + assert response.status_code == 400 + assert response.json()["detail"] == "Inactive user" + + +def test_add_user(client, test_user): + assert test_user["username"] == "testuser" + + +# This is not used in the Frontend at the moment +# def test_read_current_user(client: TestClient, active_user): +# # First we need to login to get the access token +# login_data = {"username": "testuser", "password": "testpassword"} +# response = client.post("/api/v1/login", data=login_data) +# assert response.status_code == 200 + +# headers = {"Authorization": f"Bearer {response.json()['access_token']}"} + +# response = client.get("/api/v1/user", headers=headers) +# assert response.status_code == 200, response.json() +# assert response.json()["username"] == "testuser" + + +def test_read_all_users(client, super_user_headers): + response = client.get("/api/v1/users", headers=super_user_headers) + assert response.status_code == 200, response.json() + assert isinstance(response.json()["users"], list) + + +def test_normal_user_cant_read_all_users(client, logged_in_headers): + response = client.get("/api/v1/users", headers=logged_in_headers) + assert response.status_code == 400, response.json() + assert response.json() == {"detail": "The user doesn't have enough privileges"} + + +def test_patch_user(client, active_user, logged_in_headers): + user_id = active_user.id + update_data = UserUpdate( + username="newname", + ) + + response = client.patch( + f"/api/v1/users/{user_id}", json=update_data.dict(), headers=logged_in_headers + ) + assert response.status_code == 200, response.json() + update_data = UserUpdate( + profile_image="new_image", + ) + + response = client.patch( + f"/api/v1/users/{user_id}", json=update_data.dict(), headers=logged_in_headers + ) + assert response.status_code == 200, response.json() + + +def test_patch_reset_password(client, active_user, logged_in_headers): + user_id = active_user.id + update_data = UserUpdate( + password="newpassword", + ) + + response = client.patch( + f"/api/v1/users/{user_id}/reset-password", + json=update_data.dict(), + headers=logged_in_headers, + ) + assert response.status_code == 200, response.json() + # Now we need to test if the new password works + login_data = {"username": active_user.username, "password": "newpassword"} + response = client.post("/api/v1/login", data=login_data) + assert response.status_code == 200 + + +def test_patch_user_wrong_id(client, active_user, logged_in_headers): + user_id = "wrong_id" + update_data = UserUpdate( + username="newname", + ) + + response = client.patch( + f"/api/v1/users/{user_id}", json=update_data.dict(), headers=logged_in_headers + ) + assert response.status_code == 422, response.json() + assert response.json() == { + "detail": [ + { + "loc": ["path", "user_id"], + "msg": "value is not a valid uuid", + "type": "type_error.uuid", + } + ] + } + + +def test_delete_user(client, test_user, super_user_headers): + user_id = test_user["id"] + response = client.delete(f"/api/v1/users/{user_id}", headers=super_user_headers) + assert response.status_code == 200 + assert response.json() == {"detail": "User deleted"} + + +def test_delete_user_wrong_id(client, test_user, super_user_headers): + user_id = "wrong_id" + response = client.delete(f"/api/v1/users/{user_id}", headers=super_user_headers) + assert response.status_code == 422 + assert response.json() == { + "detail": [ + { + "loc": ["path", "user_id"], + "msg": "value is not a valid uuid", + "type": "type_error.uuid", + } + ] + } + + +def test_normal_user_cant_delete_user(client, test_user, logged_in_headers): + user_id = test_user["id"] + response = client.delete(f"/api/v1/users/{user_id}", headers=logged_in_headers) + assert response.status_code == 400 + assert response.json() == {"detail": "The user doesn't have enough privileges"} diff --git a/tests/test_vectorstore_template.py b/tests/test_vectorstore_template.py index 4baa7f4b6..9dd131dbc 100644 --- a/tests/test_vectorstore_template.py +++ b/tests/test_vectorstore_template.py @@ -1,14 +1,14 @@ from fastapi.testclient import TestClient -from langflow.services.utils import get_settings_manager +from langflow.services.getters import get_settings_service # check that all agents are in settings.agents # are in json_response["agents"] -def test_vectorstores_settings(client: TestClient): - settings_manager = get_settings_manager() - response = client.get("api/v1/all") +def test_vectorstores_settings(client: TestClient, logged_in_headers): + settings_service = get_settings_service() + response = client.get("api/v1/all", headers=logged_in_headers) assert response.status_code == 200 json_response = response.json() vectorstores = json_response["vectorstores"] - settings_vecs = set(settings_manager.settings.VECTORSTORES) + settings_vecs = set(settings_service.settings.VECTORSTORES) assert all(vs in vectorstores for vs in settings_vecs) diff --git a/tests/test_websocket.py b/tests/test_websocket.py index dd668c287..5016eb704 100644 --- a/tests/test_websocket.py +++ b/tests/test_websocket.py @@ -1,13 +1,16 @@ from fastapi import WebSocketDisconnect +from fastapi.testclient import TestClient -# from langflow.services.chat.manager import ChatManager +# from langflow.services.chat.manager import ChatService import pytest -def test_init_build(client): +def test_init_build(client, active_user, logged_in_headers): response = client.post( - "api/v1/build/init/test", json={"id": "test", "data": {"key": "value"}} + "api/v1/build/init/test", + json={"id": "test", "data": {"key": "value"}}, + headers=logged_in_headers, ) assert response.status_code == 201 assert response.json() == {"flowId": "test"} @@ -24,10 +27,12 @@ def test_init_build(client): # assert response.headers["content-type"] == "text/event-stream; charset=utf-8" -def test_websocket_endpoint(client): +def test_websocket_endpoint(client: TestClient, active_user, logged_in_headers): + # Assuming your websocket_endpoint uses chat_service which caches data from stream_build + access_token = logged_in_headers["Authorization"].split(" ")[1] with pytest.raises(WebSocketDisconnect): with client.websocket_connect( - "api/v1/chat/non_existing_client_id" + f"api/v1/chat/non_existing_client_id?token={access_token}" ) as websocket: websocket.send_json({"type": "test"}) data = websocket.receive_json() @@ -35,12 +40,12 @@ def test_websocket_endpoint(client): def test_websocket_endpoint_after_build(client, basic_graph_data): - # Assuming your websocket_endpoint uses chat_manager which caches data from stream_build + # Assuming your websocket_endpoint uses chat_service which caches data from stream_build client.post("api/v1/build/init", json=basic_graph_data) client.get("api/v1/build/stream/websocket_test") # There should be more to test here, but it depends on the inner workings of your websocket handler - # and how your chat_manager and other classes behave. The following is just an example structure. + # and how your chat_service and other classes behave. The following is just an example structure. with pytest.raises(WebSocketDisconnect): with client.websocket_connect("api/v1/chat/websocket_test") as websocket: websocket.send_json({"input": "test"}) diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 000000000..e69de29bb