diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 000000000..c2c42d27d
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,28 @@
+---
+name: Bug report
+about: Create a report to help us improve
+title: ''
+labels: ''
+assignees: ''
+
+---
+
+**Describe the bug**
+A clear and concise description of what the bug is.
+
+**Browser and Version**
+ - Browser [e.g. chrome, safari]
+ - Version [e.g. 22]
+
+**To Reproduce**
+Steps to reproduce the behavior:
+1. Go to '...'
+2. Click on '....'
+3. Scroll down to '....'
+4. See error
+
+**Screenshots**
+If applicable, add screenshots to help explain your problem.
+
+**Additional context**
+Add any other context about the problem here.
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index 5a94a09e7..a807555c4 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -6,7 +6,7 @@ on:
pull_request:
env:
- POETRY_VERSION: "1.3.1"
+ POETRY_VERSION: "1.4.0"
jobs:
build:
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index e48b80203..3955f2bb5 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -10,7 +10,7 @@ on:
- "pyproject.toml"
env:
- POETRY_VERSION: "1.3.1"
+ POETRY_VERSION: "1.4.0"
jobs:
if_release:
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
new file mode 100644
index 000000000..52109f944
--- /dev/null
+++ b/.github/workflows/test.yml
@@ -0,0 +1,33 @@
+name: test
+
+on:
+ push:
+ branches: [main]
+ pull_request:
+ branches: [dev]
+
+env:
+ POETRY_VERSION: "1.4.0"
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ python-version:
+ - "3.10"
+ - "3.11"
+ steps:
+ - uses: actions/checkout@v3
+ - name: Install poetry
+ run: pipx install poetry==$POETRY_VERSION
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v4
+ with:
+ python-version: ${{ matrix.python-version }}
+ cache: "poetry"
+ - name: Install dependencies
+ run: poetry install
+ - name: Run unit tests
+ run: |
+ make test
diff --git a/.gitignore b/.gitignore
index 96512b650..c2ffb8276 100644
--- a/.gitignore
+++ b/.gitignore
@@ -9,6 +9,11 @@ lerna-debug.log*
# Mac
.DS_Store
+# VSCode
+.vscode
+.chroma
+.ruff_cache
+
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
@@ -233,5 +238,5 @@ venv.bak/
.dmypy.json
dmypy.json
-# Poetry
-.testenv/*
\ No newline at end of file
+# Poetry
+.testenv/*
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 98dddaa84..f98336ba9 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,6 +1,6 @@
# Contributing to LangFlow
-Hello there! I appreciate your interest in contributing to LangFlow.
+Hello there! We appreciate your interest in contributing to LangFlow.
As an open-source project in a rapidly developing field, we are extremely open
to contributions, whether it be in the form of a new feature, improved infra, or better documentation.
diff --git a/GCP_DEPLOYMENT.md b/GCP_DEPLOYMENT.md
new file mode 100644
index 000000000..edb7e043c
--- /dev/null
+++ b/GCP_DEPLOYMENT.md
@@ -0,0 +1,28 @@
+# Run Langflow from a New Google Cloud Project
+
+This guide will help you set up a Langflow development VM in a Google Cloud Platform project using Google Cloud Shell.
+
+> **Note**: When Cloud Shell opens, be sure to select **Trust repo**. Some `gcloud` commands might not run in an ephemeral Cloud Shell environment.
+
+
+## Standard VM
+[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/genome21/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial.md)
+
+This script sets up a Debian-based VM with the Langflow package, Nginx, and the necessary configurations to run the Langflow Dev environment.
+
+
+## Spot/Preemptible Instance
+
+[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/genome21/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial.md)
+
+When running as a [spot (preemptible) instance](https://cloud.google.com/compute/docs/instances/preemptible), the code and VM will behave the same way as in a regular instance, executing the startup script to configure the environment, install necessary dependencies, and run the Langflow application. However, **due to the nature of spot instances, the VM may be terminated at any time if Google Cloud needs to reclaim the resources**. This makes spot instances suitable for fault-tolerant, stateless, or interruptible workloads that can handle unexpected terminations and restarts.
+
+## Pricing (approximate)
+> For a more accurate breakdown of costs, please use the [**GCP Pricing Calculator**](https://cloud.google.com/products/calculator)
+
+
+| Component | Regular Cost (Hourly) | Regular Cost (Monthly) | Spot/Preemptible Cost (Hourly) | Spot/Preemptible Cost (Monthly) | Notes |
+| -------------- | --------------------- | ---------------------- | ------------------------------ | ------------------------------- | ----- |
+| 100 GB Disk | - | $10/month | - | $10/month | Disk cost remains the same for both regular and Spot/Preemptible VMs |
+| VM (n1-standard-4) | $0.15/hr | ~$108/month | ~$0.04/hr | ~$29/month | The VM cost can be significantly reduced using a Spot/Preemptible instance |
+| **Total** | **$0.15/hr** | **~$118/month** | **~$0.04/hr** | **~$39/month** | Total costs for running the VM and disk 24/7 for an entire month |
diff --git a/Makefile b/Makefile
index 70407436b..328ac8580 100644
--- a/Makefile
+++ b/Makefile
@@ -42,14 +42,13 @@ build:
dev:
make install_frontend
ifeq ($(build),1)
- @echo 'Running docker compose up with build'
- docker compose up --build
+ @echo 'Running docker compose up with build'
+ docker compose $(if $(debug),-f docker-compose.debug.yml) up --build
else
- @echo 'Running docker compose up without build'
- docker compose up
+ @echo 'Running docker compose up without build'
+ docker compose $(if $(debug),-f docker-compose.debug.yml) up
endif
-
publish:
make build
poetry publish
diff --git a/README.md b/README.md
index 8de32742a..9b0a0cb3a 100644
--- a/README.md
+++ b/README.md
@@ -5,6 +5,7 @@
~ A User Interface For [LangChain](https://github.com/hwchase17/langchain) ~
+
@@ -19,14 +20,31 @@
LangFlow is a GUI for [LangChain](https://github.com/hwchase17/langchain), designed with [react-flow](https://github.com/wbkd/react-flow) to provide an effortless way to experiment and prototype flows with drag-and-drop components and a chat box.
## 📦 Installation
-
+### Locally
You can install LangFlow from pip:
-`pip install langflow`
+```shell
+pip install langflow
+```
Next, run:
-`langflow`
+```shell
+python -m langflow
+```
+or
+```shell
+langflow
+```
+
+### Deploy Langflow on Google Cloud Platform
+
+Follow our step-by-step guide to deploy Langflow on Google Cloud Platform (GCP) using Google Cloud Shell. The guide is available in the [**Langflow in Google Cloud Platform**](GCP_DEPLOYMENT.md) document.
+
+Alternatively, click the **"Open in Cloud Shell"** button below to launch Google Cloud Shell, clone the Langflow repository, and start an **interactive tutorial** that will guide you through the process of setting up the necessary resources and deploying Langflow on your GCP project.
+
+[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/genome21/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial_spot.md)
+
## 🎨 Creating Flows
@@ -49,7 +67,7 @@ flow("Hey, have you heard of LangFlow?")
## 👋 Contributing
-We welcome contributions from developers of all levels to our open-source project on GitHub. If you'd like to contribute, please check our contributing guidelines and help make LangFlow more accessible.
+We welcome contributions from developers of all levels to our open-source project on GitHub. If you'd like to contribute, please check our [contributing guidelines](./CONTRIBUTING.md) and help make LangFlow more accessible.
[](https://star-history.com/#logspace-ai/langflow&Date)
diff --git a/dev.Dockerfile b/dev.Dockerfile
index 0f559a0cf..b38929db2 100644
--- a/dev.Dockerfile
+++ b/dev.Dockerfile
@@ -3,7 +3,7 @@ FROM python:3.10-slim
WORKDIR /app
# Install Poetry
-RUN apt-get update && apt-get install gcc curl -y
+RUN apt-get update && apt-get install gcc g++ curl build-essential postgresql-server-dev-all -y
RUN curl -sSL https://install.python-poetry.org | python3 -
# # Add Poetry to PATH
ENV PATH="${PATH}:/root/.local/bin"
@@ -15,4 +15,4 @@ COPY ./ ./
# Install dependencies
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi
-CMD ["uvicorn", "langflow.main:app", "--host", "0.0.0.0", "--port", "5003", "--reload"]
\ No newline at end of file
+CMD ["uvicorn", "langflow.main:app", "--host", "0.0.0.0", "--port", "5003", "--reload", "log-level", "debug"]
\ No newline at end of file
diff --git a/docker-compose.debug.yml b/docker-compose.debug.yml
new file mode 100644
index 000000000..581bdc6da
--- /dev/null
+++ b/docker-compose.debug.yml
@@ -0,0 +1,28 @@
+version: '3.4'
+
+services:
+ backend:
+ volumes:
+ - ./:/app
+ build:
+ context: ./
+ dockerfile: ./dev.Dockerfile
+ command: ["sh", "-c", "pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 -m uvicorn langflow.main:app --host 0.0.0.0 --port 7860 --reload"]
+ ports:
+ - 7860:7860
+ - 5678:5678
+ restart: on-failure
+
+ frontend:
+ build:
+ context: ./src/frontend
+ dockerfile: ./dev.Dockerfile
+ args:
+ - BACKEND_URL=http://backend:7860
+ ports:
+ - "3000:3000"
+ volumes:
+ - ./src/frontend/public:/home/node/app/public
+ - ./src/frontend/src:/home/node/app/src
+ - ./src/frontend/package.json:/home/node/app/package.json
+ restart: on-failure
\ No newline at end of file
diff --git a/poetry.lock b/poetry.lock
index 7f6365839..e6dd6bf7d 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -157,6 +157,18 @@ files = [
{file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"},
]
+[[package]]
+name = "argilla"
+version = "0.0.1"
+description = ""
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+ {file = "argilla-0.0.1-py3-none-any.whl", hash = "sha256:8bdc3c505bcfb47ba4b91f5658034eae53bf7d4f9317980397605c0c55817396"},
+ {file = "argilla-0.0.1.tar.gz", hash = "sha256:5017854754e89f573b31af25b25b803f51cea9ca1fa0bcf00505dee1f45cf7c9"},
+]
+
[[package]]
name = "asttokens"
version = "2.2.1"
@@ -189,22 +201,22 @@ files = [
[[package]]
name = "attrs"
-version = "22.2.0"
+version = "23.1.0"
description = "Classes Without Boilerplate"
category = "main"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
files = [
- {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"},
- {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"},
+ {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"},
+ {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"},
]
[package.extras]
-cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"]
-dev = ["attrs[docs,tests]"]
-docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"]
-tests = ["attrs[tests-no-zope]", "zope.interface"]
-tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"]
+cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
+dev = ["attrs[docs,tests]", "pre-commit"]
+docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
+tests = ["attrs[tests-no-zope]", "zope-interface"]
+tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
[[package]]
name = "backcall"
@@ -218,6 +230,18 @@ files = [
{file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"},
]
+[[package]]
+name = "backoff"
+version = "2.2.1"
+description = "Function decoration for backoff and retry"
+category = "main"
+optional = false
+python-versions = ">=3.7,<4.0"
+files = [
+ {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"},
+ {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"},
+]
+
[[package]]
name = "beautifulsoup4"
version = "4.12.0"
@@ -315,7 +339,7 @@ files = [
name = "cffi"
version = "1.15.1"
description = "Foreign Function Interface for Python calling C code."
-category = "dev"
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -388,6 +412,18 @@ files = [
[package.dependencies]
pycparser = "*"
+[[package]]
+name = "chardet"
+version = "5.1.0"
+description = "Universal encoding detector for Python 3"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "chardet-5.1.0-py3-none-any.whl", hash = "sha256:362777fb014af596ad31334fde1e8c327dfdb076e1960d1694662d46a6917ab9"},
+ {file = "chardet-5.1.0.tar.gz", hash = "sha256:0d62712b956bc154f85fb0a266e2a3c5913c2967e00348701b32411d6def31e5"},
+]
+
[[package]]
name = "charset-normalizer"
version = "3.1.0"
@@ -473,6 +509,31 @@ files = [
{file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"},
]
+[[package]]
+name = "chromadb"
+version = "0.3.21"
+description = "Chroma."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "chromadb-0.3.21-py3-none-any.whl", hash = "sha256:b497516ef403d357944742b2363eb729019d68ec0d1a7062a6abe8e127ccf28f"},
+ {file = "chromadb-0.3.21.tar.gz", hash = "sha256:7b3417892666dc90df10eafae719ee189037c448c1c96e6c7964daa870483c3a"},
+]
+
+[package.dependencies]
+clickhouse-connect = ">=0.5.7"
+duckdb = ">=0.7.1"
+fastapi = ">=0.85.1"
+hnswlib = ">=0.7"
+numpy = ">=1.21.6"
+pandas = ">=1.3"
+posthog = ">=2.4.0"
+pydantic = ">=1.9"
+requests = ">=2.28"
+sentence-transformers = ">=2.2.2"
+uvicorn = {version = ">=0.18.3", extras = ["standard"]}
+
[[package]]
name = "click"
version = "8.1.3"
@@ -488,6 +549,126 @@ files = [
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
+[[package]]
+name = "clickhouse-connect"
+version = "0.5.20"
+description = "ClickHouse core driver, SqlAlchemy, and Superset libraries"
+category = "main"
+optional = false
+python-versions = "~=3.7"
+files = [
+ {file = "clickhouse-connect-0.5.20.tar.gz", hash = "sha256:5fc9a84849f3c3b6f6928b45a0df17fa63ebcf4e518b3a48ec70720957e18683"},
+ {file = "clickhouse_connect-0.5.20-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c29cf8b2c90eed6b83366c13ab5ad471ff6ef2e334f35818729330854b9747ac"},
+ {file = "clickhouse_connect-0.5.20-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5c03ded1b006fa2cf8f7d823f0ff9c6d294e442a123c96ca2a9ebc4b293bfb7f"},
+ {file = "clickhouse_connect-0.5.20-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eb0024160412d9c6079fa6982cb29abda4db8412b4f63918de7a1bde1dcb7aa"},
+ {file = "clickhouse_connect-0.5.20-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:170bd258d21bc828557f8a55f23affe22cc4e671c93f645a6316ef874e359f8e"},
+ {file = "clickhouse_connect-0.5.20-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc70fee875fdba42c0a6f519fa376659a08253fd36d188b8b304f4ccda572177"},
+ {file = "clickhouse_connect-0.5.20-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:18837e06846797db475b6aee13f03928fb169f64d0efb268e2bb04e015990b5b"},
+ {file = "clickhouse_connect-0.5.20-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:76f7a7d2d41377e6f382a7ada825be594c2d316481f3194bfffd025727633258"},
+ {file = "clickhouse_connect-0.5.20-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3bac453f1199af29ec7292d2fd2a8cb0cc0e6692bec9c9da50ce5aec10ff0339"},
+ {file = "clickhouse_connect-0.5.20-cp310-cp310-win32.whl", hash = "sha256:14983562d2687b18d03a35f27b4e7f28cf013c280ff4fee726501e03bae7528d"},
+ {file = "clickhouse_connect-0.5.20-cp310-cp310-win_amd64.whl", hash = "sha256:3d618a9c15ee4d2facc7a79e59a646262da64e6ec39d2a1ac6a68167d52266bf"},
+ {file = "clickhouse_connect-0.5.20-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6bdfb74ba2bf5157230f576e16c7d708f20ffa7e4b19c54288d7db2b55ebcd17"},
+ {file = "clickhouse_connect-0.5.20-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fce7e54ad14b732479c5630948324f7088c3092a74a2442bf015a7cab4bc0a41"},
+ {file = "clickhouse_connect-0.5.20-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e6a2b6d123f5de362d49f079c509a0a43cfbaecae0130c860706ef738af12b7"},
+ {file = "clickhouse_connect-0.5.20-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a9391128387013755de8e420bb7e17c6c809f77ca3233fdc966a1df023fa85d"},
+ {file = "clickhouse_connect-0.5.20-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1df976816913675b46134e8dd9dee2cf315cc4bf42e258211f8036099b8fc280"},
+ {file = "clickhouse_connect-0.5.20-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f1ddeb651bc75b87ec5fa1fbe17fe3a589d00f42cad76d6e64918067f5025798"},
+ {file = "clickhouse_connect-0.5.20-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:caf60b4bfb7214d80455137eee45ca0943a370885d65f4298fafde0d431e837a"},
+ {file = "clickhouse_connect-0.5.20-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5c0bdcb72607244dc920f543ee6363a6094e836770aaac07f20556936af85813"},
+ {file = "clickhouse_connect-0.5.20-cp311-cp311-win32.whl", hash = "sha256:cc3f77df2b1cab2aa99b59f529aead2cc96beac1639ed18f7fd8dba392957623"},
+ {file = "clickhouse_connect-0.5.20-cp311-cp311-win_amd64.whl", hash = "sha256:e44c3b7e40402ce0650f69cbc31f2f503073e2bec9f2b31befbd823150f2431d"},
+ {file = "clickhouse_connect-0.5.20-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ba78e7d270d78f9559e4a836c6c4f55ab54d9f2b6505c0d05db6260e8e2a4f6a"},
+ {file = "clickhouse_connect-0.5.20-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e8924824cd19b739cc920d867bf291a31a5da406637e0c575f6eb961cfb0557"},
+ {file = "clickhouse_connect-0.5.20-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:672c260c471fd18a87a4f5130e6d72590cd4f57289669c58feff5be934810d28"},
+ {file = "clickhouse_connect-0.5.20-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69887898f8f5ea6e70c30aa51c756f8a752ef0eb1df747d4aec7b7d10de5e103"},
+ {file = "clickhouse_connect-0.5.20-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c4da55465a52e0e440772e289e6959cc6acbb2efa0561a7ea4f9a7108159958d"},
+ {file = "clickhouse_connect-0.5.20-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2087b64ab47969e603cd9735e7c0433bdf15c6d83025abd00c50ca9a617ed39b"},
+ {file = "clickhouse_connect-0.5.20-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:28b72cabb1d4fc3f04392ed1f654bd925b6c950305869971186f73b2d13d835a"},
+ {file = "clickhouse_connect-0.5.20-cp37-cp37m-win32.whl", hash = "sha256:a481e13216de227aa624449f5f6ead9e51fe7c8f18bbd783c41e4b396919fa08"},
+ {file = "clickhouse_connect-0.5.20-cp37-cp37m-win_amd64.whl", hash = "sha256:c1dc77bdc15240d6d4d375e098c77403aeabbc6f8b1c2ce524f4389a5d8c6d74"},
+ {file = "clickhouse_connect-0.5.20-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4fe527b6b4306cad58dde934493d5f018166f78f5914f6abf6ed93750ca7ecbd"},
+ {file = "clickhouse_connect-0.5.20-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c07b9ca21d302e843aa8c031ef15f85c86280c5730858edfe4eeb952d3991d1d"},
+ {file = "clickhouse_connect-0.5.20-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71e427b3cd1f611bcb8315ea9bc17f0329329ca21043f1a5ef068e2903457b9b"},
+ {file = "clickhouse_connect-0.5.20-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9319037b437c8d1297b00d8bc3f92239cc2296db409b5bfc2ff22b05c5f3a26f"},
+ {file = "clickhouse_connect-0.5.20-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8c3c533fd2baff653dc40e7b88ca86ce9b8d0923c34fb33ce5ce1d1b7370fe6"},
+ {file = "clickhouse_connect-0.5.20-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c850bc0cf5a00bd144202a6926b646baa60fb4e6c449b62d46c230c548ec760a"},
+ {file = "clickhouse_connect-0.5.20-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:632922c90cd71fcb8e1b7e6e2a9b4487dee2e67b91846dc1778cfd9d5198d047"},
+ {file = "clickhouse_connect-0.5.20-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a6c7733b5754ea048bd7928b0cce6625d71c709570c97f1819ba36054850d915"},
+ {file = "clickhouse_connect-0.5.20-cp38-cp38-win32.whl", hash = "sha256:738b35e061a3c665e9a099a3b5cb50338bed89a6eee3ce29190cd525a1bc1892"},
+ {file = "clickhouse_connect-0.5.20-cp38-cp38-win_amd64.whl", hash = "sha256:58da16eac95126d441f106d27c8e3ae931fcc784f263d7d916b5a8086bdcf757"},
+ {file = "clickhouse_connect-0.5.20-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b9c57f6958021ec0b22eabaa02e567df3ff5f85fdfd9d052e3aface655bdf3d1"},
+ {file = "clickhouse_connect-0.5.20-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e9c9a2de183a85fc32ef70973cfad5c9af2a8d73733aa30b9523c1400b813c13"},
+ {file = "clickhouse_connect-0.5.20-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50fd663b132c4edc1fc5dae33c5cbd2538dd2e0c94bd9fff5e98ca3ca12059a2"},
+ {file = "clickhouse_connect-0.5.20-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26a98b165fa2c8420e5219db244f0790b13f401a0932c6a7d5e5c1a959a26b80"},
+ {file = "clickhouse_connect-0.5.20-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9686bd02a16e3b6cbf976b2476e54bc7caaf1a95fd129fd44b2692d082dfcef6"},
+ {file = "clickhouse_connect-0.5.20-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d01a51871dde0cd0d24efafd61ab27c57293a0456a26ec7e8a5a585623239ab1"},
+ {file = "clickhouse_connect-0.5.20-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2c1096ebad10964fcdd646f41228accf182d24b066cefd18d9b33f021e3017cd"},
+ {file = "clickhouse_connect-0.5.20-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1f0407cc9ea9d2cf51edfe59993c536c256ae54c40c6b36fb7f738edd48f51b5"},
+ {file = "clickhouse_connect-0.5.20-cp39-cp39-win32.whl", hash = "sha256:184f7c119c9725b25ecaa3011420de8dc06530999653508a983b27c90894146c"},
+ {file = "clickhouse_connect-0.5.20-cp39-cp39-win_amd64.whl", hash = "sha256:f7d2cbde4543cccddef8465afed221f81095eec3d3b763d7570c22ae99819ab4"},
+ {file = "clickhouse_connect-0.5.20-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7f83a6e61b9832fc9184bf67e3f7bc041f3b940c066b8162bfadf02aa484b1c4"},
+ {file = "clickhouse_connect-0.5.20-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61b22a7038553813a8f5432cd3b1e57b6d94c629d599d775f57c64c4700a5df"},
+ {file = "clickhouse_connect-0.5.20-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbae752fadbd9fa9390f2246c5ce6e75a91225d03adb3451beb49bd3f1ea48f0"},
+ {file = "clickhouse_connect-0.5.20-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9da5c94be2255d6e07e255899411a5e009723f331d90359e5b21c66e8007630"},
+ {file = "clickhouse_connect-0.5.20-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:205a3dc992548891150d42856e418398d135d9dfa5f30f53bb7c3633d6b449d0"},
+ {file = "clickhouse_connect-0.5.20-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5e0c42adc692f2fb285f5f898d166cf4ed9b5779e5f3effab8f612cd3362f004"},
+ {file = "clickhouse_connect-0.5.20-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e8a2d9dfbfd7c3075f5d1c7011e32b5b62853000d16f93684fa69d8b8979a04"},
+ {file = "clickhouse_connect-0.5.20-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f8bb09db27aba694193073137bd69f8404e53c2ee80f2dbf41c829c081175a"},
+ {file = "clickhouse_connect-0.5.20-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52e07d91e3bcaf3989d698a4d9ad9b36f1dcf357673cc4c44a6663ab78581066"},
+ {file = "clickhouse_connect-0.5.20-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7832b2c4c4c4b316258bd078b54a82c84aeccd62c917eb986059de738b13b56b"},
+ {file = "clickhouse_connect-0.5.20-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2e7dad00ce8df847f896c50aa9644c685259a995a15823fec788348e736fb893"},
+ {file = "clickhouse_connect-0.5.20-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34b6c4f16d8b4c5c458504da64e87fb2ec1390640ed7345bf051cfbba18526f4"},
+ {file = "clickhouse_connect-0.5.20-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ce3896158cbac451253bc3632140920a57bb775a82d68370de9ace97ce96a8"},
+ {file = "clickhouse_connect-0.5.20-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65f1e552c4efdab1937ff824f062561fe0b6901044ea06b373a35c8a1a679cea"},
+ {file = "clickhouse_connect-0.5.20-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:05d1cfd70fd90b5d7cdb4e93d603d34f74d34327811e8f573fbbd87838cfd4a3"},
+]
+
+[package.dependencies]
+certifi = "*"
+lz4 = "*"
+pytz = "*"
+urllib3 = ">=1.26"
+zstandard = "*"
+
+[package.extras]
+arrow = ["pyarrow"]
+numpy = ["numpy"]
+orjson = ["orjson"]
+pandas = ["pandas"]
+sqlalchemy = ["sqlalchemy (>1.3.21,<1.4)"]
+superset = ["apache-superset (>=1.4.1)"]
+
+[[package]]
+name = "cmake"
+version = "3.26.3"
+description = "CMake is an open-source, cross-platform family of tools designed to build, test and package software"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+ {file = "cmake-3.26.3-py2.py3-none-macosx_10_10_universal2.macosx_10_10_x86_64.macosx_11_0_arm64.macosx_11_0_universal2.whl", hash = "sha256:9d38ea5b4999f8f042a071bea3e213f085bac26d7ab54cb5a4c6a193c4baf132"},
+ {file = "cmake-3.26.3-py2.py3-none-manylinux2010_i686.manylinux_2_12_i686.whl", hash = "sha256:6e5fcd1cfaac33d015e2709e0dd1b7ad352a315367012ac359c9adc062cf075b"},
+ {file = "cmake-3.26.3-py2.py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:4d3185738a6405aa15801e684f8d589b00570da4cc676cb1b5bbc902e3023e53"},
+ {file = "cmake-3.26.3-py2.py3-none-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b20f7f7ea316ce7bb158df0e3c3453cfab5048939f1291017d16a8a36ad33ae6"},
+ {file = "cmake-3.26.3-py2.py3-none-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:46aa385e19c9e4fc95d7d6ce5ee0bbe0d69bdeac4e9bc95c61f78f3973c2f626"},
+ {file = "cmake-3.26.3-py2.py3-none-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:71e1df5587ad860b9829211380c42fc90ef2413363f12805b1fa2d87769bf876"},
+ {file = "cmake-3.26.3-py2.py3-none-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:543b6958d1615327f484a07ab041029b1740918a8baa336adc9f5f0cbcd8fbd8"},
+ {file = "cmake-3.26.3-py2.py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1bc7b47456256bdcc41069f5c658f232bd6e15bf4796d115f6ec98800793daff"},
+ {file = "cmake-3.26.3-py2.py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:2ae3db2c2be50fdaf0c9f3a23b2206e9dcd55ca124f16486a841b939f50b595e"},
+ {file = "cmake-3.26.3-py2.py3-none-musllinux_1_1_i686.whl", hash = "sha256:1798547b23b89030518c5668dc55aed0e1d01867cf91d7a94e15d33f62a56fd0"},
+ {file = "cmake-3.26.3-py2.py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:d3017a08e6ba53ec2486d89a7953a81d4c4a068fc9f29d83e209f295dd9c59f3"},
+ {file = "cmake-3.26.3-py2.py3-none-musllinux_1_1_s390x.whl", hash = "sha256:a922a6f6c1580d0db17b0b75f82e619441dd43c7f1d6a35f7d27e709db48bdbb"},
+ {file = "cmake-3.26.3-py2.py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:e0ed796530641c8a21a423f9bb7882117dbbeee11ec78dbc335402a678d937ae"},
+ {file = "cmake-3.26.3-py2.py3-none-win32.whl", hash = "sha256:27a6fa1b97744311a7993d6a1e0ce14bd73696dab9ceb96701f1ec11edbd5053"},
+ {file = "cmake-3.26.3-py2.py3-none-win_amd64.whl", hash = "sha256:cf910bbb488659d300c86b1dac77e44eeb0457bde2cf76a42d7e51f691544b21"},
+ {file = "cmake-3.26.3-py2.py3-none-win_arm64.whl", hash = "sha256:24741a304ada699b339034958777d9a1472ac8ddb9b6194d74f814287ca091ae"},
+ {file = "cmake-3.26.3.tar.gz", hash = "sha256:b54cde1f1c0573321b22382bd2ffaf5d08f65188572d128cd4867fb9669723c5"},
+]
+
+[package.extras]
+test = ["codecov (>=2.0.5)", "coverage (>=4.2)", "flake8 (>=3.0.4)", "path.py (>=11.5.0)", "pytest (>=3.0.3)", "pytest-cov (>=2.4.0)", "pytest-runner (>=2.9)", "pytest-virtualenv (>=1.7.0)", "scikit-build (>=0.10.0)", "setuptools (>=28.0.0)", "virtualenv (>=15.0.3)", "wheel"]
+
[[package]]
name = "colorama"
version = "0.4.6"
@@ -542,29 +723,30 @@ dev = ["flake8", "hypothesis", "ipython", "mypy (>=0.710)", "portray", "pytest (
[[package]]
name = "debugpy"
-version = "1.6.6"
+version = "1.6.7"
description = "An implementation of the Debug Adapter Protocol for Python"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "debugpy-1.6.6-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:0ea1011e94416e90fb3598cc3ef5e08b0a4dd6ce6b9b33ccd436c1dffc8cd664"},
- {file = "debugpy-1.6.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dff595686178b0e75580c24d316aa45a8f4d56e2418063865c114eef651a982e"},
- {file = "debugpy-1.6.6-cp310-cp310-win32.whl", hash = "sha256:87755e173fcf2ec45f584bb9d61aa7686bb665d861b81faa366d59808bbd3494"},
- {file = "debugpy-1.6.6-cp310-cp310-win_amd64.whl", hash = "sha256:72687b62a54d9d9e3fb85e7a37ea67f0e803aaa31be700e61d2f3742a5683917"},
- {file = "debugpy-1.6.6-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:78739f77c58048ec006e2b3eb2e0cd5a06d5f48c915e2fc7911a337354508110"},
- {file = "debugpy-1.6.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23c29e40e39ad7d869d408ded414f6d46d82f8a93b5857ac3ac1e915893139ca"},
- {file = "debugpy-1.6.6-cp37-cp37m-win32.whl", hash = "sha256:7aa7e103610e5867d19a7d069e02e72eb2b3045b124d051cfd1538f1d8832d1b"},
- {file = "debugpy-1.6.6-cp37-cp37m-win_amd64.whl", hash = "sha256:f6383c29e796203a0bba74a250615ad262c4279d398e89d895a69d3069498305"},
- {file = "debugpy-1.6.6-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:23363e6d2a04d726bbc1400bd4e9898d54419b36b2cdf7020e3e215e1dcd0f8e"},
- {file = "debugpy-1.6.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b5d1b13d7c7bf5d7cf700e33c0b8ddb7baf030fcf502f76fc061ddd9405d16c"},
- {file = "debugpy-1.6.6-cp38-cp38-win32.whl", hash = "sha256:70ab53918fd907a3ade01909b3ed783287ede362c80c75f41e79596d5ccacd32"},
- {file = "debugpy-1.6.6-cp38-cp38-win_amd64.whl", hash = "sha256:c05349890804d846eca32ce0623ab66c06f8800db881af7a876dc073ac1c2225"},
- {file = "debugpy-1.6.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a771739902b1ae22a120dbbb6bd91b2cae6696c0e318b5007c5348519a4211c6"},
- {file = "debugpy-1.6.6-cp39-cp39-win32.whl", hash = "sha256:549ae0cb2d34fc09d1675f9b01942499751d174381b6082279cf19cdb3c47cbe"},
- {file = "debugpy-1.6.6-cp39-cp39-win_amd64.whl", hash = "sha256:de4a045fbf388e120bb6ec66501458d3134f4729faed26ff95de52a754abddb1"},
- {file = "debugpy-1.6.6-py2.py3-none-any.whl", hash = "sha256:be596b44448aac14eb3614248c91586e2bc1728e020e82ef3197189aae556115"},
- {file = "debugpy-1.6.6.zip", hash = "sha256:b9c2130e1c632540fbf9c2c88341493797ddf58016e7cba02e311de9b0a96b67"},
+ {file = "debugpy-1.6.7-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b3e7ac809b991006ad7f857f016fa92014445085711ef111fdc3f74f66144096"},
+ {file = "debugpy-1.6.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3876611d114a18aafef6383695dfc3f1217c98a9168c1aaf1a02b01ec7d8d1e"},
+ {file = "debugpy-1.6.7-cp310-cp310-win32.whl", hash = "sha256:33edb4afa85c098c24cc361d72ba7c21bb92f501104514d4ffec1fb36e09c01a"},
+ {file = "debugpy-1.6.7-cp310-cp310-win_amd64.whl", hash = "sha256:ed6d5413474e209ba50b1a75b2d9eecf64d41e6e4501977991cdc755dc83ab0f"},
+ {file = "debugpy-1.6.7-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:38ed626353e7c63f4b11efad659be04c23de2b0d15efff77b60e4740ea685d07"},
+ {file = "debugpy-1.6.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:279d64c408c60431c8ee832dfd9ace7c396984fd7341fa3116aee414e7dcd88d"},
+ {file = "debugpy-1.6.7-cp37-cp37m-win32.whl", hash = "sha256:dbe04e7568aa69361a5b4c47b4493d5680bfa3a911d1e105fbea1b1f23f3eb45"},
+ {file = "debugpy-1.6.7-cp37-cp37m-win_amd64.whl", hash = "sha256:f90a2d4ad9a035cee7331c06a4cf2245e38bd7c89554fe3b616d90ab8aab89cc"},
+ {file = "debugpy-1.6.7-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:5224eabbbeddcf1943d4e2821876f3e5d7d383f27390b82da5d9558fd4eb30a9"},
+ {file = "debugpy-1.6.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae1123dff5bfe548ba1683eb972329ba6d646c3a80e6b4c06cd1b1dd0205e9b"},
+ {file = "debugpy-1.6.7-cp38-cp38-win32.whl", hash = "sha256:9cd10cf338e0907fdcf9eac9087faa30f150ef5445af5a545d307055141dd7a4"},
+ {file = "debugpy-1.6.7-cp38-cp38-win_amd64.whl", hash = "sha256:aaf6da50377ff4056c8ed470da24632b42e4087bc826845daad7af211e00faad"},
+ {file = "debugpy-1.6.7-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:0679b7e1e3523bd7d7869447ec67b59728675aadfc038550a63a362b63029d2c"},
+ {file = "debugpy-1.6.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de86029696e1b3b4d0d49076b9eba606c226e33ae312a57a46dca14ff370894d"},
+ {file = "debugpy-1.6.7-cp39-cp39-win32.whl", hash = "sha256:d71b31117779d9a90b745720c0eab54ae1da76d5b38c8026c654f4a066b0130a"},
+ {file = "debugpy-1.6.7-cp39-cp39-win_amd64.whl", hash = "sha256:c0ff93ae90a03b06d85b2c529eca51ab15457868a377c4cc40a23ab0e4e552a3"},
+ {file = "debugpy-1.6.7-py2.py3-none-any.whl", hash = "sha256:53f7a456bc50706a0eaabecf2d3ce44c4d5010e46dfc65b6b81a518b42866267"},
+ {file = "debugpy-1.6.7.zip", hash = "sha256:c4c2f0810fa25323abfdfa36cbbbb24e5c3b1a42cb762782de64439c575d67f2"},
]
[[package]]
@@ -579,6 +761,117 @@ files = [
{file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
]
+[[package]]
+name = "dill"
+version = "0.3.6"
+description = "serialize all of python"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"},
+ {file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"},
+]
+
+[package.extras]
+graph = ["objgraph (>=1.7.2)"]
+
+[[package]]
+name = "docstring-parser"
+version = "0.15"
+description = "Parse Python docstrings in reST, Google and Numpydoc format"
+category = "main"
+optional = false
+python-versions = ">=3.6,<4.0"
+files = [
+ {file = "docstring_parser-0.15-py3-none-any.whl", hash = "sha256:d1679b86250d269d06a99670924d6bce45adc00b08069dae8c47d98e89b667a9"},
+ {file = "docstring_parser-0.15.tar.gz", hash = "sha256:48ddc093e8b1865899956fcc03b03e66bb7240c310fac5af81814580c55bf682"},
+]
+
+[[package]]
+name = "duckdb"
+version = "0.7.1"
+description = "DuckDB embedded database"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+ {file = "duckdb-0.7.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3e0170be6cc315c179169dfa3e06485ef7009ef8ce399cd2908f29105ef2c67b"},
+ {file = "duckdb-0.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6360d41023e726646507d5479ba60960989a09f04527b36abeef3643c61d8c48"},
+ {file = "duckdb-0.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:578c269d7aa27184e8d45421694f89deda3f41fe6bd2a8ce48b262b9fc975326"},
+ {file = "duckdb-0.7.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:36aae9a923c9f78da1cf3fcf75873f62d32ea017d4cef7c706d16d3eca527ca2"},
+ {file = "duckdb-0.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:630e0122a02f19bb1fafae00786350b2c31ae8422fce97c827bd3686e7c386af"},
+ {file = "duckdb-0.7.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b9ca2d294725e523ce207bc37f28787478ae6f7a223e2cf3a213a2d498596c3"},
+ {file = "duckdb-0.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0bd89f388205b6c99b62650169efe9a02933555ee1d46ddf79fbd0fb9e62652b"},
+ {file = "duckdb-0.7.1-cp310-cp310-win32.whl", hash = "sha256:a9e987565a268fd8da9f65e54621d28f39c13105b8aee34c96643074babe6d9c"},
+ {file = "duckdb-0.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:5d986b5ad1307b069309f9707c0c5051323e29865aefa059eb6c3b22dc9751b6"},
+ {file = "duckdb-0.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:54606dfd24d7181d3098030ca6858f6be52f3ccbf42fff05f7587f2d9cdf4343"},
+ {file = "duckdb-0.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bd9367ae650b6605ffe00412183cf0edb688a5fc9fbb03ed757e8310e7ec3b6c"},
+ {file = "duckdb-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aaf33aeb543c7816bd915cd10141866d54f92f698e1b5712de9d8b7076da19df"},
+ {file = "duckdb-0.7.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e56b0329c38c0356b40449917bab6fce6ac27d356257b9a9da613d2a0f064e0"},
+ {file = "duckdb-0.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:604b8b476d6cc6bf91625d8c2722ef9c50c402b3d64bc518c838d6c279e6d93b"},
+ {file = "duckdb-0.7.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:32a268508c6d7fdc99d5442736051de74c28a5166c4cc3dcbbf35d383299b941"},
+ {file = "duckdb-0.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:90794406fa2111414877ee9db154fef940911f3920c312c1cf69947621737c8d"},
+ {file = "duckdb-0.7.1-cp311-cp311-win32.whl", hash = "sha256:bf20c5ee62cbbf10b39ebdfd70d454ce914e70545c7cb6cb78cb5befef96328a"},
+ {file = "duckdb-0.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:bb2700785cab37cd1e7a76c4547a5ab0f8a7c28ad3f3e4d02a8fae52be223090"},
+ {file = "duckdb-0.7.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b09741cfa31388b8f9cdf5c5200e0995d55a5b54d2d1a75b54784e2f5c042f7f"},
+ {file = "duckdb-0.7.1-cp36-cp36m-win32.whl", hash = "sha256:766e6390f7ace7f1e322085c2ca5d0ad94767bde78a38d168253d2b0b4d5cd5c"},
+ {file = "duckdb-0.7.1-cp36-cp36m-win_amd64.whl", hash = "sha256:6a3f3315e2b553db3463f07324f62dfebaf3b97656a87558e59e2f1f816eaf15"},
+ {file = "duckdb-0.7.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:278edb8c912d836b3b77fd1695887e1dbd736137c3912478af3608c9d7307bb0"},
+ {file = "duckdb-0.7.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e189b558d10b58fe6ed85ce79f728e143eb4115db1e63147a44db613cd4dd0d9"},
+ {file = "duckdb-0.7.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b91ec3544ee4dc9e6abbdf2669475d5adedaaea51987c67acf161673e6b7443"},
+ {file = "duckdb-0.7.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3fe3f3dbd62b76a773144eef31aa29794578c359da932e77fef04516535318ca"},
+ {file = "duckdb-0.7.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1e78c7f59325e99f0b3d9fe7c2bad4aaadf42d2c7711925cc26331d7647a91b2"},
+ {file = "duckdb-0.7.1-cp37-cp37m-win32.whl", hash = "sha256:bc2a12d9f4fc8ef2fd1022d610287c9fc9972ea06b7510fc87387f1fa256a390"},
+ {file = "duckdb-0.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:53e3db1bc0f445ee48b23cde47bfba08c7fa5a69976c740ec8cdf89543d2405d"},
+ {file = "duckdb-0.7.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1247cc11bac17f2585d11681329806c86295e32242f84a10a604665e697d5c81"},
+ {file = "duckdb-0.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5feaff16a012075b49dfa09d4cb24455938d6b0e06b08e1404ec00089119dba2"},
+ {file = "duckdb-0.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b411a0c361eab9b26dcd0d0c7a0d1bc0ad6b214068555de7e946fbdd2619961a"},
+ {file = "duckdb-0.7.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7c76d8694ecdb579241ecfeaf03c51d640b984dbbe8e1d9f919089ebf3cdea6"},
+ {file = "duckdb-0.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193b896eed44d8751a755ccf002a137630020af0bc3505affa21bf19fdc90df3"},
+ {file = "duckdb-0.7.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7da132ee452c80a3784b8daffd86429fa698e1b0e3ecb84660db96d36c27ad55"},
+ {file = "duckdb-0.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5fd08c97c3e8cb5bec3822cf78b966b489213dcaab24b25c05a99f7caf8db467"},
+ {file = "duckdb-0.7.1-cp38-cp38-win32.whl", hash = "sha256:9cb956f94fa55c4782352dac7cc7572a58312bd7ce97332bb14591d6059f0ea4"},
+ {file = "duckdb-0.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:289a5f65213e66d320ebcd51a94787e7097b9d1c3492d01a121a2c809812bf19"},
+ {file = "duckdb-0.7.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8085ad58c9b5854ee3820804fa1797e6b3134429c1506c3faab3cb96e71b07e9"},
+ {file = "duckdb-0.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b47c19d1f2f662a5951fc6c5f6939d0d3b96689604b529cdcffd9afdcc95bff2"},
+ {file = "duckdb-0.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6a611f598226fd634b7190f509cc6dd668132ffe436b0a6b43847b4b32b99e4a"},
+ {file = "duckdb-0.7.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6730f03b5b78f3943b752c90bdf37b62ae3ac52302282a942cc675825b4a8dc9"},
+ {file = "duckdb-0.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe23e938d29cd8ea6953d77dc828b7f5b95a4dbc7cd7fe5bcc3531da8cec3dba"},
+ {file = "duckdb-0.7.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:feffe503c2e2a99480e1e5e15176f37796b3675e4dadad446fe7c2cc672aed3c"},
+ {file = "duckdb-0.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:72fceb06f5bf24ad6bb5974c60d397a7a7e61b3d847507a22276de076f3392e2"},
+ {file = "duckdb-0.7.1-cp39-cp39-win32.whl", hash = "sha256:c4d5217437d20d05fe23317bbc161befa1f9363f3622887cd1d2f4719b407936"},
+ {file = "duckdb-0.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:066885e1883464ce3b7d1fd844f9431227dcffe1ee39bfd2a05cd6d53f304557"},
+ {file = "duckdb-0.7.1.tar.gz", hash = "sha256:a7db6da0366b239ea1e4541fcc19556b286872f5015c9a54c2e347146e25a2ad"},
+]
+
+[[package]]
+name = "et-xmlfile"
+version = "1.1.0"
+description = "An implementation of lxml.xmlfile for the standard library"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"},
+ {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"},
+]
+
+[[package]]
+name = "exceptiongroup"
+version = "1.1.1"
+description = "Backport of PEP 654 (exception groups)"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"},
+ {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"},
+]
+
+[package.extras]
+test = ["pytest (>=6)"]
+
[[package]]
name = "executing"
version = "1.2.0"
@@ -594,6 +887,21 @@ files = [
[package.extras]
tests = ["asttokens", "littleutils", "pytest", "rich"]
+[[package]]
+name = "fake-useragent"
+version = "1.1.3"
+description = "Up-to-date simple useragent faker with real world database"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+ {file = "fake-useragent-1.1.3.tar.gz", hash = "sha256:1c06f0aa7d6e4894b919b30b9c7ebd72ff497325191057fbb5df3d5db06b93fc"},
+ {file = "fake_useragent-1.1.3-py3-none-any.whl", hash = "sha256:695d3b1bf7d11d04ab0f971fb73b0ca8de98b78bbadfbc8bacbc9a48423f7531"},
+]
+
+[package.dependencies]
+importlib-resources = {version = ">=5.0", markers = "python_version < \"3.10\""}
+
[[package]]
name = "fastapi"
version = "0.92.0"
@@ -616,6 +924,22 @@ dev = ["pre-commit (>=2.17.0,<3.0.0)", "ruff (==0.0.138)", "uvicorn[standard] (>
doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer[all] (>=0.6.1,<0.8.0)"]
test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.10.0)", "coverage[toml] (>=6.5.0,<8.0)", "databases[sqlite] (>=0.3.2,<0.7.0)", "email-validator (>=1.1.1,<2.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.23.0,<0.24.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.982)", "orjson (>=3.2.1,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=7.1.3,<8.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "ruff (==0.0.138)", "sqlalchemy (>=1.3.18,<1.4.43)", "types-orjson (==3.6.2)", "types-ujson (==5.6.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"]
+[[package]]
+name = "filelock"
+version = "3.12.0"
+description = "A platform independent file lock."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "filelock-3.12.0-py3-none-any.whl", hash = "sha256:ad98852315c2ab702aeb628412cbf7e95b7ce8c3bf9565670b4eaecf1db370a9"},
+ {file = "filelock-3.12.0.tar.gz", hash = "sha256:fc03ae43288c013d2ea83c8597001b1129db351aad9c57fe2409327916b8e718"},
+]
+
+[package.extras]
+docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"]
+testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"]
+
[[package]]
name = "frozenlist"
version = "1.3.3"
@@ -923,6 +1247,42 @@ files = [
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
]
+[[package]]
+name = "hnswlib"
+version = "0.7.0"
+description = "hnswlib"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+ {file = "hnswlib-0.7.0.tar.gz", hash = "sha256:bc459668e7e44bb7454b256b90c98c5af750653919d9a91698dafcf416cf64c4"},
+]
+
+[package.dependencies]
+numpy = "*"
+
+[[package]]
+name = "httpcore"
+version = "0.16.3"
+description = "A minimal low-level HTTP client."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"},
+ {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"},
+]
+
+[package.dependencies]
+anyio = ">=3.0,<5.0"
+certifi = "*"
+h11 = ">=0.13,<0.15"
+sniffio = ">=1.0.0,<2.0.0"
+
+[package.extras]
+http2 = ["h2 (>=3,<5)"]
+socks = ["socksio (>=1.0.0,<2.0.0)"]
+
[[package]]
name = "httplib2"
version = "0.22.0"
@@ -938,6 +1298,115 @@ files = [
[package.dependencies]
pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""}
+[[package]]
+name = "httptools"
+version = "0.5.0"
+description = "A collection of framework independent HTTP protocol utils."
+category = "main"
+optional = false
+python-versions = ">=3.5.0"
+files = [
+ {file = "httptools-0.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8f470c79061599a126d74385623ff4744c4e0f4a0997a353a44923c0b561ee51"},
+ {file = "httptools-0.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e90491a4d77d0cb82e0e7a9cb35d86284c677402e4ce7ba6b448ccc7325c5421"},
+ {file = "httptools-0.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1d2357f791b12d86faced7b5736dea9ef4f5ecdc6c3f253e445ee82da579449"},
+ {file = "httptools-0.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f90cd6fd97c9a1b7fe9215e60c3bd97336742a0857f00a4cb31547bc22560c2"},
+ {file = "httptools-0.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5230a99e724a1bdbbf236a1b58d6e8504b912b0552721c7c6b8570925ee0ccde"},
+ {file = "httptools-0.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a47a34f6015dd52c9eb629c0f5a8a5193e47bf2a12d9a3194d231eaf1bc451a"},
+ {file = "httptools-0.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:24bb4bb8ac3882f90aa95403a1cb48465de877e2d5298ad6ddcfdebec060787d"},
+ {file = "httptools-0.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e67d4f8734f8054d2c4858570cc4b233bf753f56e85217de4dfb2495904cf02e"},
+ {file = "httptools-0.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7e5eefc58d20e4c2da82c78d91b2906f1a947ef42bd668db05f4ab4201a99f49"},
+ {file = "httptools-0.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0297822cea9f90a38df29f48e40b42ac3d48a28637368f3ec6d15eebefd182f9"},
+ {file = "httptools-0.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:557be7fbf2bfa4a2ec65192c254e151684545ebab45eca5d50477d562c40f986"},
+ {file = "httptools-0.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:54465401dbbec9a6a42cf737627fb0f014d50dc7365a6b6cd57753f151a86ff0"},
+ {file = "httptools-0.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4d9ebac23d2de960726ce45f49d70eb5466725c0087a078866043dad115f850f"},
+ {file = "httptools-0.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:e8a34e4c0ab7b1ca17b8763613783e2458e77938092c18ac919420ab8655c8c1"},
+ {file = "httptools-0.5.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f659d7a48401158c59933904040085c200b4be631cb5f23a7d561fbae593ec1f"},
+ {file = "httptools-0.5.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef1616b3ba965cd68e6f759eeb5d34fbf596a79e84215eeceebf34ba3f61fdc7"},
+ {file = "httptools-0.5.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3625a55886257755cb15194efbf209584754e31d336e09e2ffe0685a76cb4b60"},
+ {file = "httptools-0.5.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:72ad589ba5e4a87e1d404cc1cb1b5780bfcb16e2aec957b88ce15fe879cc08ca"},
+ {file = "httptools-0.5.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:850fec36c48df5a790aa735417dca8ce7d4b48d59b3ebd6f83e88a8125cde324"},
+ {file = "httptools-0.5.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f222e1e9d3f13b68ff8a835574eda02e67277d51631d69d7cf7f8e07df678c86"},
+ {file = "httptools-0.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3cb8acf8f951363b617a8420768a9f249099b92e703c052f9a51b66342eea89b"},
+ {file = "httptools-0.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:550059885dc9c19a072ca6d6735739d879be3b5959ec218ba3e013fd2255a11b"},
+ {file = "httptools-0.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a04fe458a4597aa559b79c7f48fe3dceabef0f69f562daf5c5e926b153817281"},
+ {file = "httptools-0.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d0c1044bce274ec6711f0770fd2d5544fe392591d204c68328e60a46f88843b"},
+ {file = "httptools-0.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c6eeefd4435055a8ebb6c5cc36111b8591c192c56a95b45fe2af22d9881eee25"},
+ {file = "httptools-0.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5b65be160adcd9de7a7e6413a4966665756e263f0d5ddeffde277ffeee0576a5"},
+ {file = "httptools-0.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fe9c766a0c35b7e3d6b6939393c8dfdd5da3ac5dec7f971ec9134f284c6c36d6"},
+ {file = "httptools-0.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:85b392aba273566c3d5596a0a490978c085b79700814fb22bfd537d381dd230c"},
+ {file = "httptools-0.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5e3088f4ed33947e16fd865b8200f9cfae1144f41b64a8cf19b599508e096bc"},
+ {file = "httptools-0.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c2a56b6aad7cc8f5551d8e04ff5a319d203f9d870398b94702300de50190f63"},
+ {file = "httptools-0.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9b571b281a19762adb3f48a7731f6842f920fa71108aff9be49888320ac3e24d"},
+ {file = "httptools-0.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa47ffcf70ba6f7848349b8a6f9b481ee0f7637931d91a9860a1838bfc586901"},
+ {file = "httptools-0.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:bede7ee075e54b9a5bde695b4fc8f569f30185891796b2e4e09e2226801d09bd"},
+ {file = "httptools-0.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:64eba6f168803a7469866a9c9b5263a7463fa8b7a25b35e547492aa7322036b6"},
+ {file = "httptools-0.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4b098e4bb1174096a93f48f6193e7d9aa7071506a5877da09a783509ca5fff42"},
+ {file = "httptools-0.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9423a2de923820c7e82e18980b937893f4aa8251c43684fa1772e341f6e06887"},
+ {file = "httptools-0.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca1b7becf7d9d3ccdbb2f038f665c0f4857e08e1d8481cbcc1a86a0afcfb62b2"},
+ {file = "httptools-0.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:50d4613025f15f4b11f1c54bbed4761c0020f7f921b95143ad6d58c151198142"},
+ {file = "httptools-0.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8ffce9d81c825ac1deaa13bc9694c0562e2840a48ba21cfc9f3b4c922c16f372"},
+ {file = "httptools-0.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:1af91b3650ce518d226466f30bbba5b6376dbd3ddb1b2be8b0658c6799dd450b"},
+ {file = "httptools-0.5.0.tar.gz", hash = "sha256:295874861c173f9101960bba332429bb77ed4dcd8cdf5cee9922eb00e4f6bc09"},
+]
+
+[package.extras]
+test = ["Cython (>=0.29.24,<0.30.0)"]
+
+[[package]]
+name = "httpx"
+version = "0.23.3"
+description = "The next generation HTTP client."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "httpx-0.23.3-py3-none-any.whl", hash = "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6"},
+ {file = "httpx-0.23.3.tar.gz", hash = "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9"},
+]
+
+[package.dependencies]
+certifi = "*"
+httpcore = ">=0.15.0,<0.17.0"
+rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]}
+sniffio = "*"
+
+[package.extras]
+brotli = ["brotli", "brotlicffi"]
+cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"]
+http2 = ["h2 (>=3,<5)"]
+socks = ["socksio (>=1.0.0,<2.0.0)"]
+
+[[package]]
+name = "huggingface-hub"
+version = "0.13.4"
+description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub"
+category = "main"
+optional = false
+python-versions = ">=3.7.0"
+files = [
+ {file = "huggingface_hub-0.13.4-py3-none-any.whl", hash = "sha256:4d3d40593de6673d624a4baaaf249b9bf5165bfcafd1ad58de361931f0b4fda5"},
+ {file = "huggingface_hub-0.13.4.tar.gz", hash = "sha256:db83d9c2f76aed8cf49893ffadd6be24e82074da2f64b1d36b8ba40eb255e115"},
+]
+
+[package.dependencies]
+filelock = "*"
+packaging = ">=20.9"
+pyyaml = ">=5.1"
+requests = "*"
+tqdm = ">=4.42.1"
+typing-extensions = ">=3.7.4.3"
+
+[package.extras]
+all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "black (>=23.1,<24.0)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "ruff (>=0.0.241)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"]
+cli = ["InquirerPy (==0.3.4)"]
+dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "black (>=23.1,<24.0)", "jedi", "mypy (==0.982)", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "ruff (>=0.0.241)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"]
+fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"]
+quality = ["black (>=23.1,<24.0)", "mypy (==0.982)", "ruff (>=0.0.241)"]
+tensorflow = ["graphviz", "pydot", "tensorflow"]
+testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "jedi", "pytest", "pytest-cov", "pytest-env", "pytest-xdist", "soundfile"]
+torch = ["torch"]
+typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3"]
+
[[package]]
name = "idna"
version = "3.4"
@@ -952,14 +1421,14 @@ files = [
[[package]]
name = "importlib-metadata"
-version = "6.1.0"
+version = "6.6.0"
description = "Read metadata from Python packages"
-category = "dev"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "importlib_metadata-6.1.0-py3-none-any.whl", hash = "sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09"},
- {file = "importlib_metadata-6.1.0.tar.gz", hash = "sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20"},
+ {file = "importlib_metadata-6.6.0-py3-none-any.whl", hash = "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed"},
+ {file = "importlib_metadata-6.6.0.tar.gz", hash = "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705"},
]
[package.dependencies]
@@ -970,6 +1439,37 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker
perf = ["ipython"]
testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
+[[package]]
+name = "importlib-resources"
+version = "5.12.0"
+description = "Read resources from Python packages"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "importlib_resources-5.12.0-py3-none-any.whl", hash = "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a"},
+ {file = "importlib_resources-5.12.0.tar.gz", hash = "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6"},
+]
+
+[package.dependencies]
+zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
+
+[[package]]
+name = "iniconfig"
+version = "2.0.0"
+description = "brain-dead simple config-ini parsing"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
+ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
+]
+
[[package]]
name = "ipykernel"
version = "6.22.0"
@@ -1064,6 +1564,36 @@ docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alab
qa = ["flake8 (==3.8.3)", "mypy (==0.782)"]
testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"]
+[[package]]
+name = "jinja2"
+version = "3.1.2"
+description = "A very fast and expressive template engine."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"},
+ {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"},
+]
+
+[package.dependencies]
+MarkupSafe = ">=2.0"
+
+[package.extras]
+i18n = ["Babel (>=2.7)"]
+
+[[package]]
+name = "joblib"
+version = "1.2.0"
+description = "Lightweight pipelining with Python functions"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "joblib-1.2.0-py3-none-any.whl", hash = "sha256:091138ed78f800342968c523bdde947e7a305b8594b910a0fea2ab83c3c6d385"},
+ {file = "joblib-1.2.0.tar.gz", hash = "sha256:e1cee4a79e4af22881164f218d4311f60074197fb707e082e803b61f6d137018"},
+]
+
[[package]]
name = "jupyter-client"
version = "8.1.0"
@@ -1086,7 +1616,7 @@ traitlets = ">=5.3"
[package.extras]
docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"]
-test = ["codecov", "coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"]
+test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"]
[[package]]
name = "jupyter-core"
@@ -1135,6 +1665,277 @@ tenacity = ">=8.1.0,<9.0.0"
all = ["aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.2.4,<0.3.0)", "beautifulsoup4 (>=4,<5)", "boto3 (>=1.26.96,<2.0.0)", "cohere (>=3,<4)", "deeplake (>=3.2.9,<4.0.0)", "elasticsearch (>=8,<9)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-search-results (>=2,<3)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "jinja2 (>=3,<4)", "manifest-ml (>=0.0.1,<0.0.2)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "opensearch-py (>=2.0.0,<3.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "qdrant-client (>=1.0.4,<2.0.0)", "redis (>=4,<5)", "sentence-transformers (>=2,<3)", "spacy (>=3,<4)", "tensorflow-text (>=2.11.0,<3.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<2)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
llms = ["anthropic (>=0.2.4,<0.3.0)", "cohere (>=3,<4)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "torch (>=1,<2)", "transformers (>=4,<5)"]
+[[package]]
+name = "lit"
+version = "16.0.2"
+description = "A Software Testing Tool"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+ {file = "lit-16.0.2.tar.gz", hash = "sha256:d743ef55cb58764bba85768c502e2d68d87aeb4303d508a18abaa8a35077ab25"},
+]
+
+[[package]]
+name = "llama-cpp-python"
+version = "0.1.23"
+description = "A Python wrapper for llama.cpp"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "llama_cpp_python-0.1.23.tar.gz", hash = "sha256:323a937e68e04251b5ad1804922e05d15c8b6bfbcf7c3e683a7b39a20e165ebf"},
+]
+
+[package.dependencies]
+typing-extensions = ">=4.5.0,<5.0.0"
+
+[[package]]
+name = "lxml"
+version = "4.9.2"
+description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API."
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*"
+files = [
+ {file = "lxml-4.9.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2"},
+ {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892"},
+ {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a"},
+ {file = "lxml-4.9.2-cp27-cp27m-win32.whl", hash = "sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de"},
+ {file = "lxml-4.9.2-cp27-cp27m-win_amd64.whl", hash = "sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3"},
+ {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50"},
+ {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975"},
+ {file = "lxml-4.9.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c"},
+ {file = "lxml-4.9.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a"},
+ {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4"},
+ {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4"},
+ {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7"},
+ {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184"},
+ {file = "lxml-4.9.2-cp310-cp310-win32.whl", hash = "sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda"},
+ {file = "lxml-4.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab"},
+ {file = "lxml-4.9.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9"},
+ {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf"},
+ {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380"},
+ {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92"},
+ {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1"},
+ {file = "lxml-4.9.2-cp311-cp311-win32.whl", hash = "sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33"},
+ {file = "lxml-4.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd"},
+ {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0"},
+ {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e"},
+ {file = "lxml-4.9.2-cp35-cp35m-win32.whl", hash = "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df"},
+ {file = "lxml-4.9.2-cp35-cp35m-win_amd64.whl", hash = "sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5"},
+ {file = "lxml-4.9.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53"},
+ {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7"},
+ {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe"},
+ {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c"},
+ {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1"},
+ {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e"},
+ {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74"},
+ {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38"},
+ {file = "lxml-4.9.2-cp36-cp36m-win32.whl", hash = "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5"},
+ {file = "lxml-4.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3"},
+ {file = "lxml-4.9.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03"},
+ {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941"},
+ {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726"},
+ {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b"},
+ {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894"},
+ {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45"},
+ {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e"},
+ {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b"},
+ {file = "lxml-4.9.2-cp37-cp37m-win32.whl", hash = "sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe"},
+ {file = "lxml-4.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9"},
+ {file = "lxml-4.9.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8"},
+ {file = "lxml-4.9.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24"},
+ {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889"},
+ {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f"},
+ {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03"},
+ {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c"},
+ {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f"},
+ {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457"},
+ {file = "lxml-4.9.2-cp38-cp38-win32.whl", hash = "sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b"},
+ {file = "lxml-4.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7"},
+ {file = "lxml-4.9.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1"},
+ {file = "lxml-4.9.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140"},
+ {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4"},
+ {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf"},
+ {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947"},
+ {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5"},
+ {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5"},
+ {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2"},
+ {file = "lxml-4.9.2-cp39-cp39-win32.whl", hash = "sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1"},
+ {file = "lxml-4.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f"},
+ {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c"},
+ {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a"},
+ {file = "lxml-4.9.2-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419"},
+ {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05"},
+ {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f"},
+ {file = "lxml-4.9.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9"},
+ {file = "lxml-4.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5"},
+ {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746"},
+ {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7"},
+ {file = "lxml-4.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409"},
+ {file = "lxml-4.9.2.tar.gz", hash = "sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67"},
+]
+
+[package.extras]
+cssselect = ["cssselect (>=0.7)"]
+html5 = ["html5lib"]
+htmlsoup = ["BeautifulSoup4"]
+source = ["Cython (>=0.29.7)"]
+
+[[package]]
+name = "lz4"
+version = "4.3.2"
+description = "LZ4 Bindings for Python"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "lz4-4.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1c4c100d99eed7c08d4e8852dd11e7d1ec47a3340f49e3a96f8dfbba17ffb300"},
+ {file = "lz4-4.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:edd8987d8415b5dad25e797043936d91535017237f72fa456601be1479386c92"},
+ {file = "lz4-4.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7c50542b4ddceb74ab4f8b3435327a0861f06257ca501d59067a6a482535a77"},
+ {file = "lz4-4.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f5614d8229b33d4a97cb527db2a1ac81308c6e796e7bdb5d1309127289f69d5"},
+ {file = "lz4-4.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f00a9ba98f6364cadda366ae6469b7b3568c0cced27e16a47ddf6b774169270"},
+ {file = "lz4-4.3.2-cp310-cp310-win32.whl", hash = "sha256:b10b77dc2e6b1daa2f11e241141ab8285c42b4ed13a8642495620416279cc5b2"},
+ {file = "lz4-4.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:86480f14a188c37cb1416cdabacfb4e42f7a5eab20a737dac9c4b1c227f3b822"},
+ {file = "lz4-4.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7c2df117def1589fba1327dceee51c5c2176a2b5a7040b45e84185ce0c08b6a3"},
+ {file = "lz4-4.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1f25eb322eeb24068bb7647cae2b0732b71e5c639e4e4026db57618dcd8279f0"},
+ {file = "lz4-4.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8df16c9a2377bdc01e01e6de5a6e4bbc66ddf007a6b045688e285d7d9d61d1c9"},
+ {file = "lz4-4.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f571eab7fec554d3b1db0d666bdc2ad85c81f4b8cb08906c4c59a8cad75e6e22"},
+ {file = "lz4-4.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7211dc8f636ca625abc3d4fb9ab74e5444b92df4f8d58ec83c8868a2b0ff643d"},
+ {file = "lz4-4.3.2-cp311-cp311-win32.whl", hash = "sha256:867664d9ca9bdfce840ac96d46cd8838c9ae891e859eb98ce82fcdf0e103a947"},
+ {file = "lz4-4.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:a6a46889325fd60b8a6b62ffc61588ec500a1883db32cddee9903edfba0b7584"},
+ {file = "lz4-4.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a85b430138882f82f354135b98c320dafb96fc8fe4656573d95ab05de9eb092"},
+ {file = "lz4-4.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65d5c93f8badacfa0456b660285e394e65023ef8071142e0dcbd4762166e1be0"},
+ {file = "lz4-4.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b50f096a6a25f3b2edca05aa626ce39979d63c3b160687c8c6d50ac3943d0ba"},
+ {file = "lz4-4.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:200d05777d61ba1ff8d29cb51c534a162ea0b4fe6d3c28be3571a0a48ff36080"},
+ {file = "lz4-4.3.2-cp37-cp37m-win32.whl", hash = "sha256:edc2fb3463d5d9338ccf13eb512aab61937be50aa70734bcf873f2f493801d3b"},
+ {file = "lz4-4.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:83acfacab3a1a7ab9694333bcb7950fbeb0be21660d236fd09c8337a50817897"},
+ {file = "lz4-4.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7a9eec24ec7d8c99aab54de91b4a5a149559ed5b3097cf30249b665689b3d402"},
+ {file = "lz4-4.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:31d72731c4ac6ebdce57cd9a5cabe0aecba229c4f31ba3e2c64ae52eee3fdb1c"},
+ {file = "lz4-4.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83903fe6db92db0be101acedc677aa41a490b561567fe1b3fe68695b2110326c"},
+ {file = "lz4-4.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:926b26db87ec8822cf1870efc3d04d06062730ec3279bbbd33ba47a6c0a5c673"},
+ {file = "lz4-4.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e05afefc4529e97c08e65ef92432e5f5225c0bb21ad89dee1e06a882f91d7f5e"},
+ {file = "lz4-4.3.2-cp38-cp38-win32.whl", hash = "sha256:ad38dc6a7eea6f6b8b642aaa0683253288b0460b70cab3216838747163fb774d"},
+ {file = "lz4-4.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:7e2dc1bd88b60fa09b9b37f08553f45dc2b770c52a5996ea52b2b40f25445676"},
+ {file = "lz4-4.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:edda4fb109439b7f3f58ed6bede59694bc631c4b69c041112b1b7dc727fffb23"},
+ {file = "lz4-4.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ca83a623c449295bafad745dcd399cea4c55b16b13ed8cfea30963b004016c9"},
+ {file = "lz4-4.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5ea0e788dc7e2311989b78cae7accf75a580827b4d96bbaf06c7e5a03989bd5"},
+ {file = "lz4-4.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a98b61e504fb69f99117b188e60b71e3c94469295571492a6468c1acd63c37ba"},
+ {file = "lz4-4.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4931ab28a0d1c133104613e74eec1b8bb1f52403faabe4f47f93008785c0b929"},
+ {file = "lz4-4.3.2-cp39-cp39-win32.whl", hash = "sha256:ec6755cacf83f0c5588d28abb40a1ac1643f2ff2115481089264c7630236618a"},
+ {file = "lz4-4.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:4caedeb19e3ede6c7a178968b800f910db6503cb4cb1e9cc9221157572139b49"},
+ {file = "lz4-4.3.2.tar.gz", hash = "sha256:e1431d84a9cfb23e6773e72078ce8e65cad6745816d4cbf9ae67da5ea419acda"},
+]
+
+[package.extras]
+docs = ["sphinx (>=1.6.0)", "sphinx-bootstrap-theme"]
+flake8 = ["flake8"]
+tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"]
+
+[[package]]
+name = "markdown"
+version = "3.4.3"
+description = "Python implementation of John Gruber's Markdown."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "Markdown-3.4.3-py3-none-any.whl", hash = "sha256:065fd4df22da73a625f14890dd77eb8040edcbd68794bcd35943be14490608b2"},
+ {file = "Markdown-3.4.3.tar.gz", hash = "sha256:8bf101198e004dc93e84a12a7395e31aac6a9c9942848ae1d99b9d72cf9b3520"},
+]
+
+[package.dependencies]
+importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""}
+
+[package.extras]
+testing = ["coverage", "pyyaml"]
+
+[[package]]
+name = "markdown-it-py"
+version = "2.2.0"
+description = "Python port of markdown-it. Markdown parsing, done right!"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"},
+ {file = "markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30"},
+]
+
+[package.dependencies]
+mdurl = ">=0.1,<1.0"
+
+[package.extras]
+benchmarking = ["psutil", "pytest", "pytest-benchmark"]
+code-style = ["pre-commit (>=3.0,<4.0)"]
+compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"]
+linkify = ["linkify-it-py (>=1,<3)"]
+plugins = ["mdit-py-plugins"]
+profiling = ["gprof2dot"]
+rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
+testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
+
+[[package]]
+name = "markupsafe"
+version = "2.1.2"
+description = "Safely add untrusted strings to HTML/XML markup."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"},
+ {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"},
+ {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"},
+ {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"},
+ {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"},
+ {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"},
+ {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"},
+]
+
[[package]]
name = "marshmallow"
version = "3.19.0"
@@ -1186,6 +1987,66 @@ files = [
[package.dependencies]
traitlets = "*"
+[[package]]
+name = "mdurl"
+version = "0.1.2"
+description = "Markdown URL utilities"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
+ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
+]
+
+[[package]]
+name = "monotonic"
+version = "1.6"
+description = "An implementation of time.monotonic() for Python 2 & < 3.3"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+ {file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"},
+ {file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"},
+]
+
+[[package]]
+name = "mpmath"
+version = "1.3.0"
+description = "Python library for arbitrary-precision floating-point arithmetic"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+ {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"},
+ {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"},
+]
+
+[package.extras]
+develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"]
+docs = ["sphinx"]
+gmpy = ["gmpy2 (>=2.1.0a4)"]
+tests = ["pytest (>=4.6)"]
+
+[[package]]
+name = "msg-parser"
+version = "1.2.0"
+description = "This module enables reading, parsing and converting Microsoft Outlook MSG E-Mail files."
+category = "main"
+optional = false
+python-versions = ">=3.4"
+files = [
+ {file = "msg_parser-1.2.0-py2.py3-none-any.whl", hash = "sha256:d47a2f0b2a359cb189fad83cc991b63ea781ecc70d91410324273fbf93e95375"},
+ {file = "msg_parser-1.2.0.tar.gz", hash = "sha256:0de858d4fcebb6c8f6f028da83a17a20fe01cdce67c490779cf43b3b0162aa66"},
+]
+
+[package.dependencies]
+olefile = ">=0.46"
+
+[package.extras]
+rtf = ["compressed-rtf (>=1.0.5)"]
+
[[package]]
name = "multidict"
version = "6.0.4"
@@ -1272,38 +2133,38 @@ files = [
[[package]]
name = "mypy"
-version = "1.1.1"
+version = "1.2.0"
description = "Optional static typing for Python"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "mypy-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39c7119335be05630611ee798cc982623b9e8f0cff04a0b48dfc26100e0b97af"},
- {file = "mypy-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61bf08362e93b6b12fad3eab68c4ea903a077b87c90ac06c11e3d7a09b56b9c1"},
- {file = "mypy-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbb19c9f662e41e474e0cff502b7064a7edc6764f5262b6cd91d698163196799"},
- {file = "mypy-1.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:315ac73cc1cce4771c27d426b7ea558fb4e2836f89cb0296cbe056894e3a1f78"},
- {file = "mypy-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5cb14ff9919b7df3538590fc4d4c49a0f84392237cbf5f7a816b4161c061829e"},
- {file = "mypy-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:26cdd6a22b9b40b2fd71881a8a4f34b4d7914c679f154f43385ca878a8297389"},
- {file = "mypy-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b5f81b40d94c785f288948c16e1f2da37203c6006546c5d947aab6f90aefef2"},
- {file = "mypy-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21b437be1c02712a605591e1ed1d858aba681757a1e55fe678a15c2244cd68a5"},
- {file = "mypy-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d809f88734f44a0d44959d795b1e6f64b2bbe0ea4d9cc4776aa588bb4229fc1c"},
- {file = "mypy-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:a380c041db500e1410bb5b16b3c1c35e61e773a5c3517926b81dfdab7582be54"},
- {file = "mypy-1.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b7c7b708fe9a871a96626d61912e3f4ddd365bf7f39128362bc50cbd74a634d5"},
- {file = "mypy-1.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1c10fa12df1232c936830839e2e935d090fc9ee315744ac33b8a32216b93707"},
- {file = "mypy-1.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0a28a76785bf57655a8ea5eb0540a15b0e781c807b5aa798bd463779988fa1d5"},
- {file = "mypy-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:ef6a01e563ec6a4940784c574d33f6ac1943864634517984471642908b30b6f7"},
- {file = "mypy-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d64c28e03ce40d5303450f547e07418c64c241669ab20610f273c9e6290b4b0b"},
- {file = "mypy-1.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64cc3afb3e9e71a79d06e3ed24bb508a6d66f782aff7e56f628bf35ba2e0ba51"},
- {file = "mypy-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce61663faf7a8e5ec6f456857bfbcec2901fbdb3ad958b778403f63b9e606a1b"},
- {file = "mypy-1.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2b0c373d071593deefbcdd87ec8db91ea13bd8f1328d44947e88beae21e8d5e9"},
- {file = "mypy-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:2888ce4fe5aae5a673386fa232473014056967f3904f5abfcf6367b5af1f612a"},
- {file = "mypy-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:19ba15f9627a5723e522d007fe708007bae52b93faab00f95d72f03e1afa9598"},
- {file = "mypy-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:59bbd71e5c58eed2e992ce6523180e03c221dcd92b52f0e792f291d67b15a71c"},
- {file = "mypy-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9401e33814cec6aec8c03a9548e9385e0e228fc1b8b0a37b9ea21038e64cdd8a"},
- {file = "mypy-1.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b398d8b1f4fba0e3c6463e02f8ad3346f71956b92287af22c9b12c3ec965a9f"},
- {file = "mypy-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:69b35d1dcb5707382810765ed34da9db47e7f95b3528334a3c999b0c90fe523f"},
- {file = "mypy-1.1.1-py3-none-any.whl", hash = "sha256:4e4e8b362cdf99ba00c2b218036002bdcdf1e0de085cdb296a49df03fb31dfc4"},
- {file = "mypy-1.1.1.tar.gz", hash = "sha256:ae9ceae0f5b9059f33dbc62dea087e942c0ccab4b7a003719cb70f9b8abfa32f"},
+ {file = "mypy-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:701189408b460a2ff42b984e6bd45c3f41f0ac9f5f58b8873bbedc511900086d"},
+ {file = "mypy-1.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fe91be1c51c90e2afe6827601ca14353bbf3953f343c2129fa1e247d55fd95ba"},
+ {file = "mypy-1.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d26b513225ffd3eacece727f4387bdce6469192ef029ca9dd469940158bc89e"},
+ {file = "mypy-1.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a2d219775a120581a0ae8ca392b31f238d452729adbcb6892fa89688cb8306a"},
+ {file = "mypy-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:2e93a8a553e0394b26c4ca683923b85a69f7ccdc0139e6acd1354cc884fe0128"},
+ {file = "mypy-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3efde4af6f2d3ccf58ae825495dbb8d74abd6d176ee686ce2ab19bd025273f41"},
+ {file = "mypy-1.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:695c45cea7e8abb6f088a34a6034b1d273122e5530aeebb9c09626cea6dca4cb"},
+ {file = "mypy-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0e9464a0af6715852267bf29c9553e4555b61f5904a4fc538547a4d67617937"},
+ {file = "mypy-1.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8293a216e902ac12779eb7a08f2bc39ec6c878d7c6025aa59464e0c4c16f7eb9"},
+ {file = "mypy-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:f46af8d162f3d470d8ffc997aaf7a269996d205f9d746124a179d3abe05ac602"},
+ {file = "mypy-1.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:031fc69c9a7e12bcc5660b74122ed84b3f1c505e762cc4296884096c6d8ee140"},
+ {file = "mypy-1.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:390bc685ec209ada4e9d35068ac6988c60160b2b703072d2850457b62499e336"},
+ {file = "mypy-1.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4b41412df69ec06ab141808d12e0bf2823717b1c363bd77b4c0820feaa37249e"},
+ {file = "mypy-1.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4e4a682b3f2489d218751981639cffc4e281d548f9d517addfd5a2917ac78119"},
+ {file = "mypy-1.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a197ad3a774f8e74f21e428f0de7f60ad26a8d23437b69638aac2764d1e06a6a"},
+ {file = "mypy-1.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c9a084bce1061e55cdc0493a2ad890375af359c766b8ac311ac8120d3a472950"},
+ {file = "mypy-1.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaeaa0888b7f3ccb7bcd40b50497ca30923dba14f385bde4af78fac713d6d6f6"},
+ {file = "mypy-1.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bea55fc25b96c53affab852ad94bf111a3083bc1d8b0c76a61dd101d8a388cf5"},
+ {file = "mypy-1.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:4c8d8c6b80aa4a1689f2a179d31d86ae1367ea4a12855cc13aa3ba24bb36b2d8"},
+ {file = "mypy-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70894c5345bea98321a2fe84df35f43ee7bb0feec117a71420c60459fc3e1eed"},
+ {file = "mypy-1.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4a99fe1768925e4a139aace8f3fb66db3576ee1c30b9c0f70f744ead7e329c9f"},
+ {file = "mypy-1.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023fe9e618182ca6317ae89833ba422c411469156b690fde6a315ad10695a521"},
+ {file = "mypy-1.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4d19f1a239d59f10fdc31263d48b7937c585810288376671eaf75380b074f238"},
+ {file = "mypy-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:2de7babe398cb7a85ac7f1fd5c42f396c215ab3eff731b4d761d68d0f6a80f48"},
+ {file = "mypy-1.2.0-py3-none-any.whl", hash = "sha256:d8e9187bfcd5ffedbe87403195e1fc340189a68463903c39e2b63307c9fa0394"},
+ {file = "mypy-1.2.0.tar.gz", hash = "sha256:f70a40410d774ae23fcb4afbbeca652905a04de7948eaf0b1789c8d1426b72d1"},
]
[package.dependencies]
@@ -1341,54 +2202,279 @@ files = [
{file = "nest_asyncio-1.5.6.tar.gz", hash = "sha256:d267cc1ff794403f7df692964d1d2a3fa9418ffea2a3f6859a439ff482fef290"},
]
+[[package]]
+name = "networkx"
+version = "3.1"
+description = "Python package for creating and manipulating graphs and networks"
+category = "main"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "networkx-3.1-py3-none-any.whl", hash = "sha256:4f33f68cb2afcf86f28a45f43efc27a9386b535d567d2127f8f61d51dec58d36"},
+ {file = "networkx-3.1.tar.gz", hash = "sha256:de346335408f84de0eada6ff9fafafff9bcda11f0a0dfaa931133debb146ab61"},
+]
+
+[package.extras]
+default = ["matplotlib (>=3.4)", "numpy (>=1.20)", "pandas (>=1.3)", "scipy (>=1.8)"]
+developer = ["mypy (>=1.1)", "pre-commit (>=3.2)"]
+doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.13)", "sphinx (>=6.1)", "sphinx-gallery (>=0.12)", "texext (>=0.6.7)"]
+extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.10)", "sympy (>=1.10)"]
+test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"]
+
+[[package]]
+name = "nltk"
+version = "3.8.1"
+description = "Natural Language Toolkit"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "nltk-3.8.1-py3-none-any.whl", hash = "sha256:fd5c9109f976fa86bcadba8f91e47f5e9293bd034474752e92a520f81c93dda5"},
+ {file = "nltk-3.8.1.zip", hash = "sha256:1834da3d0682cba4f2cede2f9aad6b0fafb6461ba451db0efb6f9c39798d64d3"},
+]
+
+[package.dependencies]
+click = "*"
+joblib = "*"
+regex = ">=2021.8.3"
+tqdm = "*"
+
+[package.extras]
+all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"]
+corenlp = ["requests"]
+machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"]
+plot = ["matplotlib"]
+tgrep = ["pyparsing"]
+twitter = ["twython"]
+
[[package]]
name = "numpy"
-version = "1.24.2"
+version = "1.24.3"
description = "Fundamental package for array computing in Python"
category = "main"
optional = false
python-versions = ">=3.8"
files = [
- {file = "numpy-1.24.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eef70b4fc1e872ebddc38cddacc87c19a3709c0e3e5d20bf3954c147b1dd941d"},
- {file = "numpy-1.24.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8d2859428712785e8a8b7d2b3ef0a1d1565892367b32f915c4a4df44d0e64f5"},
- {file = "numpy-1.24.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6524630f71631be2dabe0c541e7675db82651eb998496bbe16bc4f77f0772253"},
- {file = "numpy-1.24.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a51725a815a6188c662fb66fb32077709a9ca38053f0274640293a14fdd22978"},
- {file = "numpy-1.24.2-cp310-cp310-win32.whl", hash = "sha256:2620e8592136e073bd12ee4536149380695fbe9ebeae845b81237f986479ffc9"},
- {file = "numpy-1.24.2-cp310-cp310-win_amd64.whl", hash = "sha256:97cf27e51fa078078c649a51d7ade3c92d9e709ba2bfb97493007103c741f1d0"},
- {file = "numpy-1.24.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7de8fdde0003f4294655aa5d5f0a89c26b9f22c0a58790c38fae1ed392d44a5a"},
- {file = "numpy-1.24.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4173bde9fa2a005c2c6e2ea8ac1618e2ed2c1c6ec8a7657237854d42094123a0"},
- {file = "numpy-1.24.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cecaed30dc14123020f77b03601559fff3e6cd0c048f8b5289f4eeabb0eb281"},
- {file = "numpy-1.24.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a23f8440561a633204a67fb44617ce2a299beecf3295f0d13c495518908e910"},
- {file = "numpy-1.24.2-cp311-cp311-win32.whl", hash = "sha256:e428c4fbfa085f947b536706a2fc349245d7baa8334f0c5723c56a10595f9b95"},
- {file = "numpy-1.24.2-cp311-cp311-win_amd64.whl", hash = "sha256:557d42778a6869c2162deb40ad82612645e21d79e11c1dc62c6e82a2220ffb04"},
- {file = "numpy-1.24.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d0a2db9d20117bf523dde15858398e7c0858aadca7c0f088ac0d6edd360e9ad2"},
- {file = "numpy-1.24.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c72a6b2f4af1adfe193f7beb91ddf708ff867a3f977ef2ec53c0ffb8283ab9f5"},
- {file = "numpy-1.24.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29e6bd0ec49a44d7690ecb623a8eac5ab8a923bce0bea6293953992edf3a76a"},
- {file = "numpy-1.24.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2eabd64ddb96a1239791da78fa5f4e1693ae2dadc82a76bc76a14cbb2b966e96"},
- {file = "numpy-1.24.2-cp38-cp38-win32.whl", hash = "sha256:e3ab5d32784e843fc0dd3ab6dcafc67ef806e6b6828dc6af2f689be0eb4d781d"},
- {file = "numpy-1.24.2-cp38-cp38-win_amd64.whl", hash = "sha256:76807b4063f0002c8532cfeac47a3068a69561e9c8715efdad3c642eb27c0756"},
- {file = "numpy-1.24.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4199e7cfc307a778f72d293372736223e39ec9ac096ff0a2e64853b866a8e18a"},
- {file = "numpy-1.24.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:adbdce121896fd3a17a77ab0b0b5eedf05a9834a18699db6829a64e1dfccca7f"},
- {file = "numpy-1.24.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:889b2cc88b837d86eda1b17008ebeb679d82875022200c6e8e4ce6cf549b7acb"},
- {file = "numpy-1.24.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f64bb98ac59b3ea3bf74b02f13836eb2e24e48e0ab0145bbda646295769bd780"},
- {file = "numpy-1.24.2-cp39-cp39-win32.whl", hash = "sha256:63e45511ee4d9d976637d11e6c9864eae50e12dc9598f531c035265991910468"},
- {file = "numpy-1.24.2-cp39-cp39-win_amd64.whl", hash = "sha256:a77d3e1163a7770164404607b7ba3967fb49b24782a6ef85d9b5f54126cc39e5"},
- {file = "numpy-1.24.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92011118955724465fb6853def593cf397b4a1367495e0b59a7e69d40c4eb71d"},
- {file = "numpy-1.24.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9006288bcf4895917d02583cf3411f98631275bc67cce355a7f39f8c14338fa"},
- {file = "numpy-1.24.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:150947adbdfeceec4e5926d956a06865c1c690f2fd902efede4ca6fe2e657c3f"},
- {file = "numpy-1.24.2.tar.gz", hash = "sha256:003a9f530e880cb2cd177cba1af7220b9aa42def9c4afc2a2fc3ee6be7eb2b22"},
+ {file = "numpy-1.24.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c1104d3c036fb81ab923f507536daedc718d0ad5a8707c6061cdfd6d184e570"},
+ {file = "numpy-1.24.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:202de8f38fc4a45a3eea4b63e2f376e5f2dc64ef0fa692838e31a808520efaf7"},
+ {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8535303847b89aa6b0f00aa1dc62867b5a32923e4d1681a35b5eef2d9591a463"},
+ {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d926b52ba1367f9acb76b0df6ed21f0b16a1ad87c6720a1121674e5cf63e2b6"},
+ {file = "numpy-1.24.3-cp310-cp310-win32.whl", hash = "sha256:f21c442fdd2805e91799fbe044a7b999b8571bb0ab0f7850d0cb9641a687092b"},
+ {file = "numpy-1.24.3-cp310-cp310-win_amd64.whl", hash = "sha256:ab5f23af8c16022663a652d3b25dcdc272ac3f83c3af4c02eb8b824e6b3ab9d7"},
+ {file = "numpy-1.24.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9a7721ec204d3a237225db3e194c25268faf92e19338a35f3a224469cb6039a3"},
+ {file = "numpy-1.24.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d6cc757de514c00b24ae8cf5c876af2a7c3df189028d68c0cb4eaa9cd5afc2bf"},
+ {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76e3f4e85fc5d4fd311f6e9b794d0c00e7002ec122be271f2019d63376f1d385"},
+ {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1d3c026f57ceaad42f8231305d4653d5f05dc6332a730ae5c0bea3513de0950"},
+ {file = "numpy-1.24.3-cp311-cp311-win32.whl", hash = "sha256:c91c4afd8abc3908e00a44b2672718905b8611503f7ff87390cc0ac3423fb096"},
+ {file = "numpy-1.24.3-cp311-cp311-win_amd64.whl", hash = "sha256:5342cf6aad47943286afa6f1609cad9b4266a05e7f2ec408e2cf7aea7ff69d80"},
+ {file = "numpy-1.24.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7776ea65423ca6a15255ba1872d82d207bd1e09f6d0894ee4a64678dd2204078"},
+ {file = "numpy-1.24.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ae8d0be48d1b6ed82588934aaaa179875e7dc4f3d84da18d7eae6eb3f06c242c"},
+ {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecde0f8adef7dfdec993fd54b0f78183051b6580f606111a6d789cd14c61ea0c"},
+ {file = "numpy-1.24.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4749e053a29364d3452c034827102ee100986903263e89884922ef01a0a6fd2f"},
+ {file = "numpy-1.24.3-cp38-cp38-win32.whl", hash = "sha256:d933fabd8f6a319e8530d0de4fcc2e6a61917e0b0c271fded460032db42a0fe4"},
+ {file = "numpy-1.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:56e48aec79ae238f6e4395886b5eaed058abb7231fb3361ddd7bfdf4eed54289"},
+ {file = "numpy-1.24.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4719d5aefb5189f50887773699eaf94e7d1e02bf36c1a9d353d9f46703758ca4"},
+ {file = "numpy-1.24.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ec87a7084caa559c36e0a2309e4ecb1baa03b687201d0a847c8b0ed476a7187"},
+ {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea8282b9bcfe2b5e7d491d0bf7f3e2da29700cec05b49e64d6246923329f2b02"},
+ {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210461d87fb02a84ef243cac5e814aad2b7f4be953b32cb53327bb49fd77fbb4"},
+ {file = "numpy-1.24.3-cp39-cp39-win32.whl", hash = "sha256:784c6da1a07818491b0ffd63c6bbe5a33deaa0e25a20e1b3ea20cf0e43f8046c"},
+ {file = "numpy-1.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:d5036197ecae68d7f491fcdb4df90082b0d4960ca6599ba2659957aafced7c17"},
+ {file = "numpy-1.24.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:352ee00c7f8387b44d19f4cada524586f07379c0d49270f87233983bc5087ca0"},
+ {file = "numpy-1.24.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7d6acc2e7524c9955e5c903160aa4ea083736fde7e91276b0e5d98e6332812"},
+ {file = "numpy-1.24.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:35400e6a8d102fd07c71ed7dcadd9eb62ee9a6e84ec159bd48c28235bbb0f8e4"},
+ {file = "numpy-1.24.3.tar.gz", hash = "sha256:ab344f1bf21f140adab8e47fdbc7c35a477dc01408791f8ba00d018dd0bc5155"},
+]
+
+[[package]]
+name = "nvidia-cublas-cu11"
+version = "11.10.3.66"
+description = "CUBLAS native runtime libraries"
+category = "main"
+optional = false
+python-versions = ">=3"
+files = [
+ {file = "nvidia_cublas_cu11-11.10.3.66-py3-none-manylinux1_x86_64.whl", hash = "sha256:d32e4d75f94ddfb93ea0a5dda08389bcc65d8916a25cb9f37ac89edaeed3bded"},
+ {file = "nvidia_cublas_cu11-11.10.3.66-py3-none-win_amd64.whl", hash = "sha256:8ac17ba6ade3ed56ab898a036f9ae0756f1e81052a317bf98f8c6d18dc3ae49e"},
+]
+
+[package.dependencies]
+setuptools = "*"
+wheel = "*"
+
+[[package]]
+name = "nvidia-cuda-cupti-cu11"
+version = "11.7.101"
+description = "CUDA profiling tools runtime libs."
+category = "main"
+optional = false
+python-versions = ">=3"
+files = [
+ {file = "nvidia_cuda_cupti_cu11-11.7.101-py3-none-manylinux1_x86_64.whl", hash = "sha256:e0cfd9854e1f2edaa36ca20d21cd0bdd5dcfca4e3b9e130a082e05b33b6c5895"},
+ {file = "nvidia_cuda_cupti_cu11-11.7.101-py3-none-win_amd64.whl", hash = "sha256:7cc5b8f91ae5e1389c3c0ad8866b3b016a175e827ea8f162a672990a402ab2b0"},
+]
+
+[package.dependencies]
+setuptools = "*"
+wheel = "*"
+
+[[package]]
+name = "nvidia-cuda-nvrtc-cu11"
+version = "11.7.99"
+description = "NVRTC native runtime libraries"
+category = "main"
+optional = false
+python-versions = ">=3"
+files = [
+ {file = "nvidia_cuda_nvrtc_cu11-11.7.99-2-py3-none-manylinux1_x86_64.whl", hash = "sha256:9f1562822ea264b7e34ed5930567e89242d266448e936b85bc97a3370feabb03"},
+ {file = "nvidia_cuda_nvrtc_cu11-11.7.99-py3-none-manylinux1_x86_64.whl", hash = "sha256:f7d9610d9b7c331fa0da2d1b2858a4a8315e6d49765091d28711c8946e7425e7"},
+ {file = "nvidia_cuda_nvrtc_cu11-11.7.99-py3-none-win_amd64.whl", hash = "sha256:f2effeb1309bdd1b3854fc9b17eaf997808f8b25968ce0c7070945c4265d64a3"},
+]
+
+[package.dependencies]
+setuptools = "*"
+wheel = "*"
+
+[[package]]
+name = "nvidia-cuda-runtime-cu11"
+version = "11.7.99"
+description = "CUDA Runtime native Libraries"
+category = "main"
+optional = false
+python-versions = ">=3"
+files = [
+ {file = "nvidia_cuda_runtime_cu11-11.7.99-py3-none-manylinux1_x86_64.whl", hash = "sha256:cc768314ae58d2641f07eac350f40f99dcb35719c4faff4bc458a7cd2b119e31"},
+ {file = "nvidia_cuda_runtime_cu11-11.7.99-py3-none-win_amd64.whl", hash = "sha256:bc77fa59a7679310df9d5c70ab13c4e34c64ae2124dd1efd7e5474b71be125c7"},
+]
+
+[package.dependencies]
+setuptools = "*"
+wheel = "*"
+
+[[package]]
+name = "nvidia-cudnn-cu11"
+version = "8.5.0.96"
+description = "cuDNN runtime libraries"
+category = "main"
+optional = false
+python-versions = ">=3"
+files = [
+ {file = "nvidia_cudnn_cu11-8.5.0.96-2-py3-none-manylinux1_x86_64.whl", hash = "sha256:402f40adfc6f418f9dae9ab402e773cfed9beae52333f6d86ae3107a1b9527e7"},
+ {file = "nvidia_cudnn_cu11-8.5.0.96-py3-none-manylinux1_x86_64.whl", hash = "sha256:71f8111eb830879ff2836db3cccf03bbd735df9b0d17cd93761732ac50a8a108"},
+]
+
+[package.dependencies]
+setuptools = "*"
+wheel = "*"
+
+[[package]]
+name = "nvidia-cufft-cu11"
+version = "10.9.0.58"
+description = "CUFFT native runtime libraries"
+category = "main"
+optional = false
+python-versions = ">=3"
+files = [
+ {file = "nvidia_cufft_cu11-10.9.0.58-py3-none-manylinux1_x86_64.whl", hash = "sha256:222f9da70c80384632fd6035e4c3f16762d64ea7a843829cb278f98b3cb7dd81"},
+ {file = "nvidia_cufft_cu11-10.9.0.58-py3-none-win_amd64.whl", hash = "sha256:c4d316f17c745ec9c728e30409612eaf77a8404c3733cdf6c9c1569634d1ca03"},
+]
+
+[[package]]
+name = "nvidia-curand-cu11"
+version = "10.2.10.91"
+description = "CURAND native runtime libraries"
+category = "main"
+optional = false
+python-versions = ">=3"
+files = [
+ {file = "nvidia_curand_cu11-10.2.10.91-py3-none-manylinux1_x86_64.whl", hash = "sha256:eecb269c970fa599a2660c9232fa46aaccbf90d9170b96c462e13bcb4d129e2c"},
+ {file = "nvidia_curand_cu11-10.2.10.91-py3-none-win_amd64.whl", hash = "sha256:f742052af0e1e75523bde18895a9ed016ecf1e5aa0ecddfcc3658fd11a1ff417"},
+]
+
+[package.dependencies]
+setuptools = "*"
+wheel = "*"
+
+[[package]]
+name = "nvidia-cusolver-cu11"
+version = "11.4.0.1"
+description = "CUDA solver native runtime libraries"
+category = "main"
+optional = false
+python-versions = ">=3"
+files = [
+ {file = "nvidia_cusolver_cu11-11.4.0.1-2-py3-none-manylinux1_x86_64.whl", hash = "sha256:72fa7261d755ed55c0074960df5904b65e2326f7adce364cbe4945063c1be412"},
+ {file = "nvidia_cusolver_cu11-11.4.0.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:700b781bfefd57d161443aff9ace1878584b93e0b2cfef3d6e9296d96febbf99"},
+ {file = "nvidia_cusolver_cu11-11.4.0.1-py3-none-win_amd64.whl", hash = "sha256:00f70b256add65f8c1eb3b6a65308795a93e7740f6df9e273eccbba770d370c4"},
+]
+
+[package.dependencies]
+setuptools = "*"
+wheel = "*"
+
+[[package]]
+name = "nvidia-cusparse-cu11"
+version = "11.7.4.91"
+description = "CUSPARSE native runtime libraries"
+category = "main"
+optional = false
+python-versions = ">=3"
+files = [
+ {file = "nvidia_cusparse_cu11-11.7.4.91-py3-none-manylinux1_x86_64.whl", hash = "sha256:a3389de714db63321aa11fbec3919271f415ef19fda58aed7f2ede488c32733d"},
+ {file = "nvidia_cusparse_cu11-11.7.4.91-py3-none-win_amd64.whl", hash = "sha256:304a01599534f5186a8ed1c3756879282c72c118bc77dd890dc1ff868cad25b9"},
+]
+
+[package.dependencies]
+setuptools = "*"
+wheel = "*"
+
+[[package]]
+name = "nvidia-nccl-cu11"
+version = "2.14.3"
+description = "NVIDIA Collective Communication Library (NCCL) Runtime"
+category = "main"
+optional = false
+python-versions = ">=3"
+files = [
+ {file = "nvidia_nccl_cu11-2.14.3-py3-none-manylinux1_x86_64.whl", hash = "sha256:5e5534257d1284b8e825bc3a182c6f06acd6eb405e9f89d49340e98cd8f136eb"},
+]
+
+[[package]]
+name = "nvidia-nvtx-cu11"
+version = "11.7.91"
+description = "NVIDIA Tools Extension"
+category = "main"
+optional = false
+python-versions = ">=3"
+files = [
+ {file = "nvidia_nvtx_cu11-11.7.91-py3-none-manylinux1_x86_64.whl", hash = "sha256:b22c64eee426a62fc00952b507d6d29cf62b4c9df7a480fcc417e540e05fd5ac"},
+ {file = "nvidia_nvtx_cu11-11.7.91-py3-none-win_amd64.whl", hash = "sha256:dfd7fcb2a91742513027d63a26b757f38dd8b07fecac282c4d132a9d373ff064"},
+]
+
+[package.dependencies]
+setuptools = "*"
+wheel = "*"
+
+[[package]]
+name = "olefile"
+version = "0.46"
+description = "Python package to parse, read and write Microsoft OLE2 files (Structured Storage or Compound Document, Microsoft Office)"
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "olefile-0.46.zip", hash = "sha256:133b031eaf8fd2c9399b78b8bc5b8fcbe4c31e85295749bb17a87cba8f3c3964"},
]
[[package]]
name = "openai"
-version = "0.27.2"
+version = "0.27.4"
description = "Python client library for the OpenAI API"
category = "main"
optional = false
python-versions = ">=3.7.1"
files = [
- {file = "openai-0.27.2-py3-none-any.whl", hash = "sha256:6df674cf257e9e0504f1fd191c333d3f6a2442b13218d0eccf06230eb24d320e"},
- {file = "openai-0.27.2.tar.gz", hash = "sha256:5869fdfa34b0ec66c39afa22f4a0fb83a135dff81f6505f52834c6ab3113f762"},
+ {file = "openai-0.27.4-py3-none-any.whl", hash = "sha256:3b82c867d531e1fd2003d9de2131e1c4bfd4c70b1a3149e0543a555b30807b70"},
+ {file = "openai-0.27.4.tar.gz", hash = "sha256:9f9d27d26e62c6068f516c0729449954b5ef6994be1a6cbfe7dbefbc84423a04"},
]
[package.dependencies]
@@ -1402,18 +2488,82 @@ dev = ["black (>=21.6b0,<22.0)", "pytest (>=6.0.0,<7.0.0)", "pytest-asyncio", "p
embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"]
wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"]
+[[package]]
+name = "openpyxl"
+version = "3.1.2"
+description = "A Python library to read/write Excel 2010 xlsx/xlsm files"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"},
+ {file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"},
+]
+
+[package.dependencies]
+et-xmlfile = "*"
+
[[package]]
name = "packaging"
-version = "23.0"
+version = "23.1"
description = "Core utilities for Python packages"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"},
- {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"},
+ {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"},
+ {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"},
]
+[[package]]
+name = "pandas"
+version = "1.5.3"
+description = "Powerful data structures for data analysis, time series, and statistics"
+category = "main"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"},
+ {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"},
+ {file = "pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50869a35cbb0f2e0cd5ec04b191e7b12ed688874bd05dd777c19b28cbea90996"},
+ {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ac844a0fe00bfaeb2c9b51ab1424e5c8744f89860b138434a363b1f620f354"},
+ {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23"},
+ {file = "pandas-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:478ff646ca42b20376e4ed3fa2e8d7341e8a63105586efe54fa2508ee087f328"},
+ {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6973549c01ca91ec96199e940495219c887ea815b2083722821f1d7abfa2b4dc"},
+ {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c39a8da13cede5adcd3be1182883aea1c925476f4e84b2807a46e2775306305d"},
+ {file = "pandas-1.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f76d097d12c82a535fda9dfe5e8dd4127952b45fea9b0276cb30cca5ea313fbc"},
+ {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e474390e60ed609cec869b0da796ad94f420bb057d86784191eefc62b65819ae"},
+ {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2b952406a1588ad4cad5b3f55f520e82e902388a6d5a4a91baa8d38d23c7f6"},
+ {file = "pandas-1.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc4c368f42b551bf72fac35c5128963a171b40dce866fb066540eeaf46faa003"},
+ {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14e45300521902689a81f3f41386dc86f19b8ba8dd5ac5a3c7010ef8d2932813"},
+ {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9842b6f4b8479e41968eced654487258ed81df7d1c9b7b870ceea24ed9459b31"},
+ {file = "pandas-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26d9c71772c7afb9d5046e6e9cf42d83dd147b5cf5bcb9d97252077118543792"},
+ {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbcb19d6fceb9e946b3e23258757c7b225ba450990d9ed63ccceeb8cae609f7"},
+ {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565fa34a5434d38e9d250af3c12ff931abaf88050551d9fbcdfafca50d62babf"},
+ {file = "pandas-1.5.3-cp38-cp38-win32.whl", hash = "sha256:87bd9c03da1ac870a6d2c8902a0e1fd4267ca00f13bc494c9e5a9020920e1d51"},
+ {file = "pandas-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:41179ce559943d83a9b4bbacb736b04c928b095b5f25dd2b7389eda08f46f373"},
+ {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c74a62747864ed568f5a82a49a23a8d7fe171d0c69038b38cedf0976831296fa"},
+ {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c00e0b0597c8e4f59e8d461f797e5d70b4d025880516a8261b2817c47759ee"},
+ {file = "pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a"},
+ {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd05f7783b3274aa206a1af06f0ceed3f9b412cf665b7247eacd83be41cf7bf0"},
+ {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f69c4029613de47816b1bb30ff5ac778686688751a5e9c99ad8c7031f6508e5"},
+ {file = "pandas-1.5.3-cp39-cp39-win32.whl", hash = "sha256:7cec0bee9f294e5de5bbfc14d0573f65526071029d036b753ee6507d2a21480a"},
+ {file = "pandas-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfd681c5dc216037e0b0a2c821f5ed99ba9f03ebcf119c7dac0e9a7b960b9ec9"},
+ {file = "pandas-1.5.3.tar.gz", hash = "sha256:74a3fd7e5a7ec052f183273dc7b0acd3a863edf7520f5d3a1765c04ffdb3b0b1"},
+]
+
+[package.dependencies]
+numpy = [
+ {version = ">=1.20.3", markers = "python_version < \"3.10\""},
+ {version = ">=1.21.0", markers = "python_version >= \"3.10\""},
+ {version = ">=1.23.2", markers = "python_version >= \"3.11\""},
+]
+python-dateutil = ">=2.8.1"
+pytz = ">=2020.1"
+
+[package.extras]
+test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"]
+
[[package]]
name = "parso"
version = "0.8.3"
@@ -1469,6 +2619,86 @@ files = [
{file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"},
]
+[[package]]
+name = "pillow"
+version = "9.5.0"
+description = "Python Imaging Library (Fork)"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "Pillow-9.5.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:ace6ca218308447b9077c14ea4ef381ba0b67ee78d64046b3f19cf4e1139ad16"},
+ {file = "Pillow-9.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d3d403753c9d5adc04d4694d35cf0391f0f3d57c8e0030aac09d7678fa8030aa"},
+ {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ba1b81ee69573fe7124881762bb4cd2e4b6ed9dd28c9c60a632902fe8db8b38"},
+ {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe7e1c262d3392afcf5071df9afa574544f28eac825284596ac6db56e6d11062"},
+ {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f36397bf3f7d7c6a3abdea815ecf6fd14e7fcd4418ab24bae01008d8d8ca15e"},
+ {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:252a03f1bdddce077eff2354c3861bf437c892fb1832f75ce813ee94347aa9b5"},
+ {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:85ec677246533e27770b0de5cf0f9d6e4ec0c212a1f89dfc941b64b21226009d"},
+ {file = "Pillow-9.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b416f03d37d27290cb93597335a2f85ed446731200705b22bb927405320de903"},
+ {file = "Pillow-9.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1781a624c229cb35a2ac31cc4a77e28cafc8900733a864870c49bfeedacd106a"},
+ {file = "Pillow-9.5.0-cp310-cp310-win32.whl", hash = "sha256:8507eda3cd0608a1f94f58c64817e83ec12fa93a9436938b191b80d9e4c0fc44"},
+ {file = "Pillow-9.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:d3c6b54e304c60c4181da1c9dadf83e4a54fd266a99c70ba646a9baa626819eb"},
+ {file = "Pillow-9.5.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:7ec6f6ce99dab90b52da21cf0dc519e21095e332ff3b399a357c187b1a5eee32"},
+ {file = "Pillow-9.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:560737e70cb9c6255d6dcba3de6578a9e2ec4b573659943a5e7e4af13f298f5c"},
+ {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96e88745a55b88a7c64fa49bceff363a1a27d9a64e04019c2281049444a571e3"},
+ {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d9c206c29b46cfd343ea7cdfe1232443072bbb270d6a46f59c259460db76779a"},
+ {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfcc2c53c06f2ccb8976fb5c71d448bdd0a07d26d8e07e321c103416444c7ad1"},
+ {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:a0f9bb6c80e6efcde93ffc51256d5cfb2155ff8f78292f074f60f9e70b942d99"},
+ {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8d935f924bbab8f0a9a28404422da8af4904e36d5c33fc6f677e4c4485515625"},
+ {file = "Pillow-9.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fed1e1cf6a42577953abbe8e6cf2fe2f566daebde7c34724ec8803c4c0cda579"},
+ {file = "Pillow-9.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c1170d6b195555644f0616fd6ed929dfcf6333b8675fcca044ae5ab110ded296"},
+ {file = "Pillow-9.5.0-cp311-cp311-win32.whl", hash = "sha256:54f7102ad31a3de5666827526e248c3530b3a33539dbda27c6843d19d72644ec"},
+ {file = "Pillow-9.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:cfa4561277f677ecf651e2b22dc43e8f5368b74a25a8f7d1d4a3a243e573f2d4"},
+ {file = "Pillow-9.5.0-cp311-cp311-win_arm64.whl", hash = "sha256:965e4a05ef364e7b973dd17fc765f42233415974d773e82144c9bbaaaea5d089"},
+ {file = "Pillow-9.5.0-cp312-cp312-win32.whl", hash = "sha256:22baf0c3cf0c7f26e82d6e1adf118027afb325e703922c8dfc1d5d0156bb2eeb"},
+ {file = "Pillow-9.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:432b975c009cf649420615388561c0ce7cc31ce9b2e374db659ee4f7d57a1f8b"},
+ {file = "Pillow-9.5.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:5d4ebf8e1db4441a55c509c4baa7a0587a0210f7cd25fcfe74dbbce7a4bd1906"},
+ {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:375f6e5ee9620a271acb6820b3d1e94ffa8e741c0601db4c0c4d3cb0a9c224bf"},
+ {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99eb6cafb6ba90e436684e08dad8be1637efb71c4f2180ee6b8f940739406e78"},
+ {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfaaf10b6172697b9bceb9a3bd7b951819d1ca339a5ef294d1f1ac6d7f63270"},
+ {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:763782b2e03e45e2c77d7779875f4432e25121ef002a41829d8868700d119392"},
+ {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:35f6e77122a0c0762268216315bf239cf52b88865bba522999dc38f1c52b9b47"},
+ {file = "Pillow-9.5.0-cp37-cp37m-win32.whl", hash = "sha256:aca1c196f407ec7cf04dcbb15d19a43c507a81f7ffc45b690899d6a76ac9fda7"},
+ {file = "Pillow-9.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322724c0032af6692456cd6ed554bb85f8149214d97398bb80613b04e33769f6"},
+ {file = "Pillow-9.5.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:a0aa9417994d91301056f3d0038af1199eb7adc86e646a36b9e050b06f526597"},
+ {file = "Pillow-9.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f8286396b351785801a976b1e85ea88e937712ee2c3ac653710a4a57a8da5d9c"},
+ {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c830a02caeb789633863b466b9de10c015bded434deb3ec87c768e53752ad22a"},
+ {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbd359831c1657d69bb81f0db962905ee05e5e9451913b18b831febfe0519082"},
+ {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8fc330c3370a81bbf3f88557097d1ea26cd8b019d6433aa59f71195f5ddebbf"},
+ {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:7002d0797a3e4193c7cdee3198d7c14f92c0836d6b4a3f3046a64bd1ce8df2bf"},
+ {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:229e2c79c00e85989a34b5981a2b67aa079fd08c903f0aaead522a1d68d79e51"},
+ {file = "Pillow-9.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9adf58f5d64e474bed00d69bcd86ec4bcaa4123bfa70a65ce72e424bfb88ed96"},
+ {file = "Pillow-9.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:662da1f3f89a302cc22faa9f14a262c2e3951f9dbc9617609a47521c69dd9f8f"},
+ {file = "Pillow-9.5.0-cp38-cp38-win32.whl", hash = "sha256:6608ff3bf781eee0cd14d0901a2b9cc3d3834516532e3bd673a0a204dc8615fc"},
+ {file = "Pillow-9.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:e49eb4e95ff6fd7c0c402508894b1ef0e01b99a44320ba7d8ecbabefddcc5569"},
+ {file = "Pillow-9.5.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:482877592e927fd263028c105b36272398e3e1be3269efda09f6ba21fd83ec66"},
+ {file = "Pillow-9.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3ded42b9ad70e5f1754fb7c2e2d6465a9c842e41d178f262e08b8c85ed8a1d8e"},
+ {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c446d2245ba29820d405315083d55299a796695d747efceb5717a8b450324115"},
+ {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8aca1152d93dcc27dc55395604dcfc55bed5f25ef4c98716a928bacba90d33a3"},
+ {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:608488bdcbdb4ba7837461442b90ea6f3079397ddc968c31265c1e056964f1ef"},
+ {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:60037a8db8750e474af7ffc9faa9b5859e6c6d0a50e55c45576bf28be7419705"},
+ {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:07999f5834bdc404c442146942a2ecadd1cb6292f5229f4ed3b31e0a108746b1"},
+ {file = "Pillow-9.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a127ae76092974abfbfa38ca2d12cbeddcdeac0fb71f9627cc1135bedaf9d51a"},
+ {file = "Pillow-9.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:489f8389261e5ed43ac8ff7b453162af39c3e8abd730af8363587ba64bb2e865"},
+ {file = "Pillow-9.5.0-cp39-cp39-win32.whl", hash = "sha256:9b1af95c3a967bf1da94f253e56b6286b50af23392a886720f563c547e48e964"},
+ {file = "Pillow-9.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:77165c4a5e7d5a284f10a6efaa39a0ae8ba839da344f20b111d62cc932fa4e5d"},
+ {file = "Pillow-9.5.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:833b86a98e0ede388fa29363159c9b1a294b0905b5128baf01db683672f230f5"},
+ {file = "Pillow-9.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aaf305d6d40bd9632198c766fb64f0c1a83ca5b667f16c1e79e1661ab5060140"},
+ {file = "Pillow-9.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0852ddb76d85f127c135b6dd1f0bb88dbb9ee990d2cd9aa9e28526c93e794fba"},
+ {file = "Pillow-9.5.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:91ec6fe47b5eb5a9968c79ad9ed78c342b1f97a091677ba0e012701add857829"},
+ {file = "Pillow-9.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cb841572862f629b99725ebaec3287fc6d275be9b14443ea746c1dd325053cbd"},
+ {file = "Pillow-9.5.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:c380b27d041209b849ed246b111b7c166ba36d7933ec6e41175fd15ab9eb1572"},
+ {file = "Pillow-9.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c9af5a3b406a50e313467e3565fc99929717f780164fe6fbb7704edba0cebbe"},
+ {file = "Pillow-9.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5671583eab84af046a397d6d0ba25343c00cd50bce03787948e0fff01d4fd9b1"},
+ {file = "Pillow-9.5.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:84a6f19ce086c1bf894644b43cd129702f781ba5751ca8572f08aa40ef0ab7b7"},
+ {file = "Pillow-9.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1e7723bd90ef94eda669a3c2c19d549874dd5badaeefabefd26053304abe5799"},
+ {file = "Pillow-9.5.0.tar.gz", hash = "sha256:bf548479d336726d7a0eceb6e767e179fbde37833ae42794602631a070d630f1"},
+]
+
+[package.extras]
+docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"]
+tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"]
+
[[package]]
name = "platformdirs"
version = "3.2.0"
@@ -1481,6 +2711,13 @@ files = [
{file = "platformdirs-3.2.0.tar.gz", hash = "sha256:d5b638ca397f25f979350ff789db335903d7ea010ab28903f57b27e1b16c2b08"},
]
+[package.dependencies]
+backoff = ">=1.10.0"
+monotonic = ">=1.5"
+python-dateutil = ">2.1"
+requests = ">=2.7,<3.0"
+six = ">=1.5"
+
[package.extras]
docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"]
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
@@ -1502,54 +2739,126 @@ wcwidth = "*"
[[package]]
name = "protobuf"
-version = "4.22.1"
+version = "4.22.3"
description = ""
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "protobuf-4.22.1-cp310-abi3-win32.whl", hash = "sha256:85aa9acc5a777adc0c21b449dafbc40d9a0b6413ff3a4f77ef9df194be7f975b"},
- {file = "protobuf-4.22.1-cp310-abi3-win_amd64.whl", hash = "sha256:8bc971d76c03f1dd49f18115b002254f2ddb2d4b143c583bb860b796bb0d399e"},
- {file = "protobuf-4.22.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:5917412347e1da08ce2939eb5cd60650dfb1a9ab4606a415b9278a1041fb4d19"},
- {file = "protobuf-4.22.1-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:9e12e2810e7d297dbce3c129ae5e912ffd94240b050d33f9ecf023f35563b14f"},
- {file = "protobuf-4.22.1-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:953fc7904ef46900262a26374b28c2864610b60cdc8b272f864e22143f8373c4"},
- {file = "protobuf-4.22.1-cp37-cp37m-win32.whl", hash = "sha256:6e100f7bc787cd0a0ae58dbf0ab8bbf1ee7953f862b89148b6cf5436d5e9eaa1"},
- {file = "protobuf-4.22.1-cp37-cp37m-win_amd64.whl", hash = "sha256:87a6393fa634f294bf24d1cfe9fdd6bb605cbc247af81b9b10c4c0f12dfce4b3"},
- {file = "protobuf-4.22.1-cp38-cp38-win32.whl", hash = "sha256:e3fb58076bdb550e75db06ace2a8b3879d4c4f7ec9dd86e4254656118f4a78d7"},
- {file = "protobuf-4.22.1-cp38-cp38-win_amd64.whl", hash = "sha256:651113695bc2e5678b799ee5d906b5d3613f4ccfa61b12252cfceb6404558af0"},
- {file = "protobuf-4.22.1-cp39-cp39-win32.whl", hash = "sha256:67b7d19da0fda2733702c2299fd1ef6cb4b3d99f09263eacaf1aa151d9d05f02"},
- {file = "protobuf-4.22.1-cp39-cp39-win_amd64.whl", hash = "sha256:b8700792f88e59ccecfa246fa48f689d6eee6900eddd486cdae908ff706c482b"},
- {file = "protobuf-4.22.1-py3-none-any.whl", hash = "sha256:3e19dcf4adbf608924d3486ece469dd4f4f2cf7d2649900f0efcd1a84e8fd3ba"},
- {file = "protobuf-4.22.1.tar.gz", hash = "sha256:dce7a55d501c31ecf688adb2f6c3f763cf11bc0be815d1946a84d74772ab07a7"},
+ {file = "protobuf-4.22.3-cp310-abi3-win32.whl", hash = "sha256:8b54f56d13ae4a3ec140076c9d937221f887c8f64954673d46f63751209e839a"},
+ {file = "protobuf-4.22.3-cp310-abi3-win_amd64.whl", hash = "sha256:7760730063329d42a9d4c4573b804289b738d4931e363ffbe684716b796bde51"},
+ {file = "protobuf-4.22.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:d14fc1a41d1a1909998e8aff7e80d2a7ae14772c4a70e4bf7db8a36690b54425"},
+ {file = "protobuf-4.22.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:70659847ee57a5262a65954538088a1d72dfc3e9882695cab9f0c54ffe71663b"},
+ {file = "protobuf-4.22.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:13233ee2b9d3bd9a5f216c1fa2c321cd564b93d8f2e4f521a85b585447747997"},
+ {file = "protobuf-4.22.3-cp37-cp37m-win32.whl", hash = "sha256:ecae944c6c2ce50dda6bf76ef5496196aeb1b85acb95df5843cd812615ec4b61"},
+ {file = "protobuf-4.22.3-cp37-cp37m-win_amd64.whl", hash = "sha256:d4b66266965598ff4c291416be429cef7989d8fae88b55b62095a2331511b3fa"},
+ {file = "protobuf-4.22.3-cp38-cp38-win32.whl", hash = "sha256:f08aa300b67f1c012100d8eb62d47129e53d1150f4469fd78a29fa3cb68c66f2"},
+ {file = "protobuf-4.22.3-cp38-cp38-win_amd64.whl", hash = "sha256:f2f4710543abec186aee332d6852ef5ae7ce2e9e807a3da570f36de5a732d88e"},
+ {file = "protobuf-4.22.3-cp39-cp39-win32.whl", hash = "sha256:7cf56e31907c532e460bb62010a513408e6cdf5b03fb2611e4b67ed398ad046d"},
+ {file = "protobuf-4.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:e0e630d8e6a79f48c557cd1835865b593d0547dce221c66ed1b827de59c66c97"},
+ {file = "protobuf-4.22.3-py3-none-any.whl", hash = "sha256:52f0a78141078077cfe15fe333ac3e3a077420b9a3f5d1bf9b5fe9d286b4d881"},
+ {file = "protobuf-4.22.3.tar.gz", hash = "sha256:23452f2fdea754a8251d0fc88c0317735ae47217e0d27bf330a30eec2848811a"},
]
[[package]]
name = "psutil"
-version = "5.9.4"
+version = "5.9.5"
description = "Cross-platform lib for process and system monitoring in Python."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
files = [
- {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"},
- {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"},
- {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"},
- {file = "psutil-5.9.4-cp27-cp27m-win32.whl", hash = "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad"},
- {file = "psutil-5.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94"},
- {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24"},
- {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7"},
- {file = "psutil-5.9.4-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7"},
- {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1"},
- {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08"},
- {file = "psutil-5.9.4-cp36-abi3-win32.whl", hash = "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff"},
- {file = "psutil-5.9.4-cp36-abi3-win_amd64.whl", hash = "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4"},
- {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"},
- {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"},
+ {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"},
+ {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"},
+ {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"},
+ {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"},
+ {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"},
+ {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"},
+ {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"},
+ {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"},
+ {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"},
+ {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"},
+ {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"},
+ {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"},
+ {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"},
+ {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"},
]
[package.extras]
test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"]
+[[package]]
+name = "psycopg2-binary"
+version = "2.9.6"
+description = "psycopg2 - Python-PostgreSQL Database Adapter"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "psycopg2-binary-2.9.6.tar.gz", hash = "sha256:1f64dcfb8f6e0c014c7f55e51c9759f024f70ea572fbdef123f85318c297947c"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d26e0342183c762de3276cca7a530d574d4e25121ca7d6e4a98e4f05cb8e4df7"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c48d8f2db17f27d41fb0e2ecd703ea41984ee19362cbce52c097963b3a1b4365"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffe9dc0a884a8848075e576c1de0290d85a533a9f6e9c4e564f19adf8f6e54a7"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a76e027f87753f9bd1ab5f7c9cb8c7628d1077ef927f5e2446477153a602f2c"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6460c7a99fc939b849431f1e73e013d54aa54293f30f1109019c56a0b2b2ec2f"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae102a98c547ee2288637af07393dd33f440c25e5cd79556b04e3fca13325e5f"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9972aad21f965599ed0106f65334230ce826e5ae69fda7cbd688d24fa922415e"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7a40c00dbe17c0af5bdd55aafd6ff6679f94a9be9513a4c7e071baf3d7d22a70"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:cacbdc5839bdff804dfebc058fe25684cae322987f7a38b0168bc1b2df703fb1"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7f0438fa20fb6c7e202863e0d5ab02c246d35efb1d164e052f2f3bfe2b152bd0"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-win32.whl", hash = "sha256:b6c8288bb8a84b47e07013bb4850f50538aa913d487579e1921724631d02ea1b"},
+ {file = "psycopg2_binary-2.9.6-cp310-cp310-win_amd64.whl", hash = "sha256:61b047a0537bbc3afae10f134dc6393823882eb263088c271331602b672e52e9"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:964b4dfb7c1c1965ac4c1978b0f755cc4bd698e8aa2b7667c575fb5f04ebe06b"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afe64e9b8ea66866a771996f6ff14447e8082ea26e675a295ad3bdbffdd72afb"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15e2ee79e7cf29582ef770de7dab3d286431b01c3bb598f8e05e09601b890081"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfa74c903a3c1f0d9b1c7e7b53ed2d929a4910e272add6700c38f365a6002820"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b83456c2d4979e08ff56180a76429263ea254c3f6552cd14ada95cff1dec9bb8"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0645376d399bfd64da57148694d78e1f431b1e1ee1054872a5713125681cf1be"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e99e34c82309dd78959ba3c1590975b5d3c862d6f279f843d47d26ff89d7d7e1"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4ea29fc3ad9d91162c52b578f211ff1c931d8a38e1f58e684c45aa470adf19e2"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4ac30da8b4f57187dbf449294d23b808f8f53cad6b1fc3623fa8a6c11d176dd0"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e78e6e2a00c223e164c417628572a90093c031ed724492c763721c2e0bc2a8df"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-win32.whl", hash = "sha256:1876843d8e31c89c399e31b97d4b9725a3575bb9c2af92038464231ec40f9edb"},
+ {file = "psycopg2_binary-2.9.6-cp311-cp311-win_amd64.whl", hash = "sha256:b4b24f75d16a89cc6b4cdff0eb6a910a966ecd476d1e73f7ce5985ff1328e9a6"},
+ {file = "psycopg2_binary-2.9.6-cp36-cp36m-win32.whl", hash = "sha256:498807b927ca2510baea1b05cc91d7da4718a0f53cb766c154c417a39f1820a0"},
+ {file = "psycopg2_binary-2.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:0d236c2825fa656a2d98bbb0e52370a2e852e5a0ec45fc4f402977313329174d"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:34b9ccdf210cbbb1303c7c4db2905fa0319391bd5904d32689e6dd5c963d2ea8"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84d2222e61f313c4848ff05353653bf5f5cf6ce34df540e4274516880d9c3763"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30637a20623e2a2eacc420059be11527f4458ef54352d870b8181a4c3020ae6b"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8122cfc7cae0da9a3077216528b8bb3629c43b25053284cc868744bfe71eb141"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38601cbbfe600362c43714482f43b7c110b20cb0f8172422c616b09b85a750c5"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c7e62ab8b332147a7593a385d4f368874d5fe4ad4e341770d4983442d89603e3"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2ab652e729ff4ad76d400df2624d223d6e265ef81bb8aa17fbd63607878ecbee"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c83a74b68270028dc8ee74d38ecfaf9c90eed23c8959fca95bd703d25b82c88e"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d4e6036decf4b72d6425d5b29bbd3e8f0ff1059cda7ac7b96d6ac5ed34ffbacd"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-win32.whl", hash = "sha256:a8c28fd40a4226b4a84bdf2d2b5b37d2c7bd49486b5adcc200e8c7ec991dfa7e"},
+ {file = "psycopg2_binary-2.9.6-cp37-cp37m-win_amd64.whl", hash = "sha256:51537e3d299be0db9137b321dfb6a5022caaab275775680e0c3d281feefaca6b"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cf4499e0a83b7b7edcb8dabecbd8501d0d3a5ef66457200f77bde3d210d5debb"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7e13a5a2c01151f1208d5207e42f33ba86d561b7a89fca67c700b9486a06d0e2"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e0f754d27fddcfd74006455b6e04e6705d6c31a612ec69ddc040a5468e44b4e"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d57c3fd55d9058645d26ae37d76e61156a27722097229d32a9e73ed54819982a"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71f14375d6f73b62800530b581aed3ada394039877818b2d5f7fc77e3bb6894d"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:441cc2f8869a4f0f4bb408475e5ae0ee1f3b55b33f350406150277f7f35384fc"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:65bee1e49fa6f9cf327ce0e01c4c10f39165ee76d35c846ade7cb0ec6683e303"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:af335bac6b666cc6aea16f11d486c3b794029d9df029967f9938a4bed59b6a19"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:cfec476887aa231b8548ece2e06d28edc87c1397ebd83922299af2e051cf2827"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:65c07febd1936d63bfde78948b76cd4c2a411572a44ac50719ead41947d0f26b"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-win32.whl", hash = "sha256:4dfb4be774c4436a4526d0c554af0cc2e02082c38303852a36f6456ece7b3503"},
+ {file = "psycopg2_binary-2.9.6-cp38-cp38-win_amd64.whl", hash = "sha256:02c6e3cf3439e213e4ee930308dc122d6fb4d4bea9aef4a12535fbd605d1a2fe"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e9182eb20f41417ea1dd8e8f7888c4d7c6e805f8a7c98c1081778a3da2bee3e4"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8a6979cf527e2603d349a91060f428bcb135aea2be3201dff794813256c274f1"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8338a271cb71d8da40b023a35d9c1e919eba6cbd8fa20a54b748a332c355d896"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3ed340d2b858d6e6fb5083f87c09996506af483227735de6964a6100b4e6a54"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f81e65376e52f03422e1fb475c9514185669943798ed019ac50410fb4c4df232"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfb13af3c5dd3a9588000910178de17010ebcccd37b4f9794b00595e3a8ddad3"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4c727b597c6444a16e9119386b59388f8a424223302d0c06c676ec8b4bc1f963"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4d67fbdaf177da06374473ef6f7ed8cc0a9dc640b01abfe9e8a2ccb1b1402c1f"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0892ef645c2fabb0c75ec32d79f4252542d0caec1d5d949630e7d242ca4681a3"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:02c0f3757a4300cf379eb49f543fb7ac527fb00144d39246ee40e1df684ab514"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-win32.whl", hash = "sha256:c3dba7dab16709a33a847e5cd756767271697041fbe3fe97c215b1fc1f5c9848"},
+ {file = "psycopg2_binary-2.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:f6a88f384335bb27812293fdb11ac6aee2ca3f51d3c7820fe03de0a304ab6249"},
+]
+
[[package]]
name = "ptyprocess"
version = "0.7.0"
@@ -1578,37 +2887,75 @@ files = [
tests = ["pytest"]
[[package]]
-name = "pyasn1"
-version = "0.4.8"
-description = "ASN.1 types and codecs"
+name = "pyarrow"
+version = "11.0.0"
+description = "Python library for Apache Arrow"
category = "main"
optional = false
-python-versions = "*"
+python-versions = ">=3.7"
files = [
- {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
- {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
+ {file = "pyarrow-11.0.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:40bb42afa1053c35c749befbe72f6429b7b5f45710e85059cdd534553ebcf4f2"},
+ {file = "pyarrow-11.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7c28b5f248e08dea3b3e0c828b91945f431f4202f1a9fe84d1012a761324e1ba"},
+ {file = "pyarrow-11.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a37bc81f6c9435da3c9c1e767324ac3064ffbe110c4e460660c43e144be4ed85"},
+ {file = "pyarrow-11.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad7c53def8dbbc810282ad308cc46a523ec81e653e60a91c609c2233ae407689"},
+ {file = "pyarrow-11.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:25aa11c443b934078bfd60ed63e4e2d42461682b5ac10f67275ea21e60e6042c"},
+ {file = "pyarrow-11.0.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:e217d001e6389b20a6759392a5ec49d670757af80101ee6b5f2c8ff0172e02ca"},
+ {file = "pyarrow-11.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ad42bb24fc44c48f74f0d8c72a9af16ba9a01a2ccda5739a517aa860fa7e3d56"},
+ {file = "pyarrow-11.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d942c690ff24a08b07cb3df818f542a90e4d359381fbff71b8f2aea5bf58841"},
+ {file = "pyarrow-11.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f010ce497ca1b0f17a8243df3048055c0d18dcadbcc70895d5baf8921f753de5"},
+ {file = "pyarrow-11.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:2f51dc7ca940fdf17893227edb46b6784d37522ce08d21afc56466898cb213b2"},
+ {file = "pyarrow-11.0.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:1cbcfcbb0e74b4d94f0b7dde447b835a01bc1d16510edb8bb7d6224b9bf5bafc"},
+ {file = "pyarrow-11.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaee8f79d2a120bf3e032d6d64ad20b3af6f56241b0ffc38d201aebfee879d00"},
+ {file = "pyarrow-11.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:410624da0708c37e6a27eba321a72f29d277091c8f8d23f72c92bada4092eb5e"},
+ {file = "pyarrow-11.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2d53ba72917fdb71e3584ffc23ee4fcc487218f8ff29dd6df3a34c5c48fe8c06"},
+ {file = "pyarrow-11.0.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:f12932e5a6feb5c58192209af1d2607d488cb1d404fbc038ac12ada60327fa34"},
+ {file = "pyarrow-11.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:41a1451dd895c0b2964b83d91019e46f15b5564c7ecd5dcb812dadd3f05acc97"},
+ {file = "pyarrow-11.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:becc2344be80e5dce4e1b80b7c650d2fc2061b9eb339045035a1baa34d5b8f1c"},
+ {file = "pyarrow-11.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f40be0d7381112a398b93c45a7e69f60261e7b0269cc324e9f739ce272f4f70"},
+ {file = "pyarrow-11.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:362a7c881b32dc6b0eccf83411a97acba2774c10edcec715ccaab5ebf3bb0835"},
+ {file = "pyarrow-11.0.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:ccbf29a0dadfcdd97632b4f7cca20a966bb552853ba254e874c66934931b9841"},
+ {file = "pyarrow-11.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3e99be85973592051e46412accea31828da324531a060bd4585046a74ba45854"},
+ {file = "pyarrow-11.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69309be84dcc36422574d19c7d3a30a7ea43804f12552356d1ab2a82a713c418"},
+ {file = "pyarrow-11.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da93340fbf6f4e2a62815064383605b7ffa3e9eeb320ec839995b1660d69f89b"},
+ {file = "pyarrow-11.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:caad867121f182d0d3e1a0d36f197df604655d0b466f1bc9bafa903aa95083e4"},
+ {file = "pyarrow-11.0.0.tar.gz", hash = "sha256:5461c57dbdb211a632a48facb9b39bbeb8a7905ec95d768078525283caef5f6d"},
+]
+
+[package.dependencies]
+numpy = ">=1.16.6"
+
+[[package]]
+name = "pyasn1"
+version = "0.5.0"
+description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)"
+category = "main"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+files = [
+ {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"},
+ {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"},
]
[[package]]
name = "pyasn1-modules"
-version = "0.2.8"
-description = "A collection of ASN.1-based protocols modules."
+version = "0.3.0"
+description = "A collection of ASN.1-based protocols modules"
category = "main"
optional = false
-python-versions = "*"
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
files = [
- {file = "pyasn1-modules-0.2.8.tar.gz", hash = "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e"},
- {file = "pyasn1_modules-0.2.8-py2.py3-none-any.whl", hash = "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74"},
+ {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"},
+ {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"},
]
[package.dependencies]
-pyasn1 = ">=0.4.6,<0.5.0"
+pyasn1 = ">=0.4.6,<0.6.0"
[[package]]
name = "pycparser"
version = "2.21"
description = "C parser in Python"
-category = "dev"
+category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
files = [
@@ -1671,19 +3018,31 @@ email = ["email-validator (>=1.0.3)"]
[[package]]
name = "pygments"
-version = "2.14.0"
+version = "2.15.1"
description = "Pygments is a syntax highlighting package written in Python."
-category = "dev"
+category = "main"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
files = [
- {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"},
- {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"},
+ {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"},
+ {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"},
]
[package.extras]
plugins = ["importlib-metadata"]
+[[package]]
+name = "pypandoc"
+version = "1.11"
+description = "Thin wrapper for pandoc."
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "pypandoc-1.11-py3-none-any.whl", hash = "sha256:b260596934e9cfc6513056110a7c8600171d414f90558bf4407e68b209be8007"},
+ {file = "pypandoc-1.11.tar.gz", hash = "sha256:7f6d68db0e57e0f6961bec2190897118c4d305fc2d31c22cd16037f22ee084a5"},
+]
+
[[package]]
name = "pyparsing"
version = "3.0.9"
@@ -1699,11 +3058,70 @@ files = [
[package.extras]
diagrams = ["jinja2", "railroad-diagrams"]
+[[package]]
+name = "pypdf"
+version = "3.8.1"
+description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "pypdf-3.8.1-py3-none-any.whl", hash = "sha256:0c34620e4bbceaf9632b6b7a8ec6d4a4d5b0cdee6e39bdb86dc91a8c44cb0f19"},
+ {file = "pypdf-3.8.1.tar.gz", hash = "sha256:761ad6dc33abb78d358b4ae42206c5f185798f8b537be9b8fdecd9ee834a894d"},
+]
+
+[package.dependencies]
+typing_extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
+
+[package.extras]
+crypto = ["PyCryptodome"]
+dev = ["black", "flit", "pip-tools", "pre-commit (<2.18.0)", "pytest-cov", "wheel"]
+docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"]
+full = ["Pillow", "PyCryptodome"]
+image = ["Pillow"]
+
+[[package]]
+name = "pysrt"
+version = "1.1.2"
+description = "SubRip (.srt) subtitle parser and writer"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pysrt-1.1.2.tar.gz", hash = "sha256:b4f844ba33e4e7743e9db746492f3a193dc0bc112b153914698e7c1cdeb9b0b9"},
+]
+
+[package.dependencies]
+chardet = "*"
+
+[[package]]
+name = "pytest"
+version = "7.3.1"
+description = "pytest: simple powerful testing with Python"
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pytest-7.3.1-py3-none-any.whl", hash = "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362"},
+ {file = "pytest-7.3.1.tar.gz", hash = "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "sys_platform == \"win32\""}
+exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
+iniconfig = "*"
+packaging = "*"
+pluggy = ">=0.12,<2.0"
+tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
+
+[package.extras]
+testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
+
[[package]]
name = "python-dateutil"
version = "2.8.2"
description = "Extensions to the standard Python datetime module"
-category = "dev"
+category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
files = [
@@ -1714,6 +3132,75 @@ files = [
[package.dependencies]
six = ">=1.5"
+[[package]]
+name = "python-docx"
+version = "0.8.11"
+description = "Create and update Microsoft Word .docx files."
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+ {file = "python-docx-0.8.11.tar.gz", hash = "sha256:1105d233a0956dd8dd1e710d20b159e2d72ac3c301041b95f4d4ceb3e0ebebc4"},
+]
+
+[package.dependencies]
+lxml = ">=2.3.2"
+
+[[package]]
+name = "python-dotenv"
+version = "1.0.0"
+description = "Read key-value pairs from a .env file and set them as environment variables"
+category = "main"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"},
+ {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"},
+]
+
+[package.extras]
+cli = ["click (>=5.0)"]
+
+[[package]]
+name = "python-magic"
+version = "0.4.27"
+description = "File type identification using libmagic"
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+ {file = "python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b"},
+ {file = "python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3"},
+]
+
+[[package]]
+name = "python-pptx"
+version = "0.6.21"
+description = "Generate and manipulate Open XML PowerPoint (.pptx) files"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+ {file = "python-pptx-0.6.21.tar.gz", hash = "sha256:7798a2aaf89563565b3c7120c0acfe9aff775db0db3580544e3bf4840c2e378f"},
+]
+
+[package.dependencies]
+lxml = ">=3.1.0"
+Pillow = ">=3.3.2"
+XlsxWriter = ">=0.5.7"
+
+[[package]]
+name = "pytz"
+version = "2023.3"
+description = "World timezone definitions, modern and historical"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+ {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"},
+ {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"},
+]
+
[[package]]
name = "pywin32"
version = "306"
@@ -1878,6 +3365,76 @@ files = [
[package.dependencies]
cffi = {version = "*", markers = "implementation_name == \"pypy\""}
+[[package]]
+name = "regex"
+version = "2023.3.23"
+description = "Alternative regular expression module, to replace re."
+category = "main"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "regex-2023.3.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:845a5e2d84389c4ddada1a9b95c055320070f18bb76512608374aca00d22eca8"},
+ {file = "regex-2023.3.23-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:87d9951f5a538dd1d016bdc0dcae59241d15fa94860964833a54d18197fcd134"},
+ {file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37ae17d3be44c0b3f782c28ae9edd8b47c1f1776d4cabe87edc0b98e1f12b021"},
+ {file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0b8eb1e3bca6b48dc721818a60ae83b8264d4089a4a41d62be6d05316ec38e15"},
+ {file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df45fac182ebc3c494460c644e853515cc24f5ad9da05f8ffb91da891bfee879"},
+ {file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7006105b10b59971d3b248ad75acc3651c7e4cf54d81694df5a5130a3c3f7ea"},
+ {file = "regex-2023.3.23-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93f3f1aa608380fe294aa4cb82e2afda07a7598e828d0341e124b8fd9327c715"},
+ {file = "regex-2023.3.23-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:787954f541ab95d8195d97b0b8cf1dc304424adb1e07365967e656b92b38a699"},
+ {file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:20abe0bdf03630fe92ccafc45a599bca8b3501f48d1de4f7d121153350a2f77d"},
+ {file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11d00c31aeab9a6e0503bc77e73ed9f4527b3984279d997eb145d7c7be6268fd"},
+ {file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:d5bbe0e1511b844794a3be43d6c145001626ba9a6c1db8f84bdc724e91131d9d"},
+ {file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ea3c0cb56eadbf4ab2277e7a095676370b3e46dbfc74d5c383bd87b0d6317910"},
+ {file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d895b4c863059a4934d3e874b90998df774644a41b349ebb330f85f11b4ef2c0"},
+ {file = "regex-2023.3.23-cp310-cp310-win32.whl", hash = "sha256:9d764514d19b4edcc75fd8cb1423448ef393e8b6cbd94f38cab983ab1b75855d"},
+ {file = "regex-2023.3.23-cp310-cp310-win_amd64.whl", hash = "sha256:11d1f2b7a0696dc0310de0efb51b1f4d813ad4401fe368e83c0c62f344429f98"},
+ {file = "regex-2023.3.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8a9c63cde0eaa345795c0fdeb19dc62d22e378c50b0bc67bf4667cd5b482d98b"},
+ {file = "regex-2023.3.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dd7200b4c27b68cf9c9646da01647141c6db09f48cc5b51bc588deaf8e98a797"},
+ {file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22720024b90a6ba673a725dcc62e10fb1111b889305d7c6b887ac7466b74bedb"},
+ {file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b190a339090e6af25f4a5fd9e77591f6d911cc7b96ecbb2114890b061be0ac1"},
+ {file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e76b6fc0d8e9efa39100369a9b3379ce35e20f6c75365653cf58d282ad290f6f"},
+ {file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7868b8f218bf69a2a15402fde08b08712213a1f4b85a156d90473a6fb6b12b09"},
+ {file = "regex-2023.3.23-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2472428efc4127374f494e570e36b30bb5e6b37d9a754f7667f7073e43b0abdd"},
+ {file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c37df2a060cb476d94c047b18572ee2b37c31f831df126c0da3cd9227b39253d"},
+ {file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4479f9e2abc03362df4045b1332d4a2b7885b245a30d4f4b051c4083b97d95d8"},
+ {file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e2396e0678167f2d0c197da942b0b3fb48fee2f0b5915a0feb84d11b6686afe6"},
+ {file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:75f288c60232a5339e0ff2fa05779a5e9c74e9fc085c81e931d4a264501e745b"},
+ {file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c869260aa62cee21c5eb171a466c0572b5e809213612ef8d495268cd2e34f20d"},
+ {file = "regex-2023.3.23-cp311-cp311-win32.whl", hash = "sha256:25f0532fd0c53e96bad84664171969de9673b4131f2297f1db850d3918d58858"},
+ {file = "regex-2023.3.23-cp311-cp311-win_amd64.whl", hash = "sha256:5ccfafd98473e007cebf7da10c1411035b7844f0f204015efd050601906dbb53"},
+ {file = "regex-2023.3.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6572ff287176c0fb96568adb292674b421fa762153ed074d94b1d939ed92c253"},
+ {file = "regex-2023.3.23-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a610e0adfcb0fc84ea25f6ea685e39e74cbcd9245a72a9a7aab85ff755a5ed27"},
+ {file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086afe222d58b88b62847bdbd92079b4699350b4acab892f88a935db5707c790"},
+ {file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79e29fd62fa2f597a6754b247356bda14b866131a22444d67f907d6d341e10f3"},
+ {file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c07ce8e9eee878a48ebeb32ee661b49504b85e164b05bebf25420705709fdd31"},
+ {file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b036f401895e854de9fefe061518e78d506d8a919cc250dc3416bca03f6f9a"},
+ {file = "regex-2023.3.23-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78ac8dd8e18800bb1f97aad0d73f68916592dddf233b99d2b5cabc562088503a"},
+ {file = "regex-2023.3.23-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:539dd010dc35af935b32f248099e38447bbffc10b59c2b542bceead2bed5c325"},
+ {file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9bf4a5626f2a0ea006bf81e8963f498a57a47d58907eaa58f4b3e13be68759d8"},
+ {file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf86b4328c204c3f315074a61bc1c06f8a75a8e102359f18ce99fbcbbf1951f0"},
+ {file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:2848bf76673c83314068241c8d5b7fa9ad9bed866c979875a0e84039349e8fa7"},
+ {file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c125a02d22c555e68f7433bac8449992fa1cead525399f14e47c2d98f2f0e467"},
+ {file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cd1671e9d5ac05ce6aa86874dd8dfa048824d1dbe73060851b310c6c1a201a96"},
+ {file = "regex-2023.3.23-cp38-cp38-win32.whl", hash = "sha256:fffe57312a358be6ec6baeb43d253c36e5790e436b7bf5b7a38df360363e88e9"},
+ {file = "regex-2023.3.23-cp38-cp38-win_amd64.whl", hash = "sha256:dbb3f87e15d3dd76996d604af8678316ad2d7d20faa394e92d9394dfd621fd0c"},
+ {file = "regex-2023.3.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c88e8c226473b5549fe9616980ea7ca09289246cfbdf469241edf4741a620004"},
+ {file = "regex-2023.3.23-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6560776ec19c83f3645bbc5db64a7a5816c9d8fb7ed7201c5bcd269323d88072"},
+ {file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b1fc2632c01f42e06173d8dd9bb2e74ab9b0afa1d698058c867288d2c7a31f3"},
+ {file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fdf7ad455f1916b8ea5cdbc482d379f6daf93f3867b4232d14699867a5a13af7"},
+ {file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5fc33b27b1d800fc5b78d7f7d0f287e35079ecabe68e83d46930cf45690e1c8c"},
+ {file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c49552dc938e3588f63f8a78c86f3c9c75301e813bca0bef13bdb4b87ccf364"},
+ {file = "regex-2023.3.23-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e152461e9a0aedec7d37fc66ec0fa635eca984777d3d3c3e36f53bf3d3ceb16e"},
+ {file = "regex-2023.3.23-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:db034255e72d2995cf581b14bb3fc9c00bdbe6822b49fcd4eef79e1d5f232618"},
+ {file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:55ae114da21b7a790b90255ea52d2aa3a0d121a646deb2d3c6a3194e722fc762"},
+ {file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ef3f528fe1cc3d139508fe1b22523745aa77b9d6cb5b0bf277f48788ee0b993f"},
+ {file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:a81c9ec59ca2303acd1ccd7b9ac409f1e478e40e96f8f79b943be476c5fdb8bb"},
+ {file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cde09c4fdd070772aa2596d97e942eb775a478b32459e042e1be71b739d08b77"},
+ {file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3cd9f5dd7b821f141d3a6ca0d5d9359b9221e4f051ca3139320adea9f1679691"},
+ {file = "regex-2023.3.23-cp39-cp39-win32.whl", hash = "sha256:7304863f3a652dab5e68e6fb1725d05ebab36ec0390676d1736e0571ebb713ef"},
+ {file = "regex-2023.3.23-cp39-cp39-win_amd64.whl", hash = "sha256:54c3fa855a3f7438149de3211738dd9b5f0c733f48b54ae05aa7fce83d48d858"},
+ {file = "regex-2023.3.23.tar.gz", hash = "sha256:dc80df325b43ffea5cdea2e3eaa97a44f3dd298262b1c7fe9dbb2a9522b956a7"},
+]
+
[[package]]
name = "requests"
version = "2.28.2"
@@ -1900,6 +3457,43 @@ urllib3 = ">=1.21.1,<1.27"
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
+[[package]]
+name = "rfc3986"
+version = "1.5.0"
+description = "Validating URI References per RFC 3986"
+category = "dev"
+optional = false
+python-versions = "*"
+files = [
+ {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"},
+ {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"},
+]
+
+[package.dependencies]
+idna = {version = "*", optional = true, markers = "extra == \"idna2008\""}
+
+[package.extras]
+idna2008 = ["idna"]
+
+[[package]]
+name = "rich"
+version = "13.3.4"
+description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
+category = "main"
+optional = false
+python-versions = ">=3.7.0"
+files = [
+ {file = "rich-13.3.4-py3-none-any.whl", hash = "sha256:22b74cae0278fd5086ff44144d3813be1cedc9115bdfabbfefd86400cb88b20a"},
+ {file = "rich-13.3.4.tar.gz", hash = "sha256:b5d573e13605423ec80bdd0cd5f8541f7844a0e71a13f74cf454ccb2f490708b"},
+]
+
+[package.dependencies]
+markdown-it-py = ">=2.2.0,<3.0.0"
+pygments = ">=2.13.0,<3.0.0"
+
+[package.extras]
+jupyter = ["ipywidgets (>=7.5.1,<9)"]
+
[[package]]
name = "rsa"
version = "4.9"
@@ -1942,6 +3536,166 @@ files = [
{file = "ruff-0.0.254.tar.gz", hash = "sha256:0eb66c9520151d3bd950ea43b3a088618a8e4e10a5014a72687881e6f3606312"},
]
+[[package]]
+name = "scikit-learn"
+version = "1.2.2"
+description = "A set of python modules for machine learning and data mining"
+category = "main"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "scikit-learn-1.2.2.tar.gz", hash = "sha256:8429aea30ec24e7a8c7ed8a3fa6213adf3814a6efbea09e16e0a0c71e1a1a3d7"},
+ {file = "scikit_learn-1.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99cc01184e347de485bf253d19fcb3b1a3fb0ee4cea5ee3c43ec0cc429b6d29f"},
+ {file = "scikit_learn-1.2.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e6e574db9914afcb4e11ade84fab084536a895ca60aadea3041e85b8ac963edb"},
+ {file = "scikit_learn-1.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fe83b676f407f00afa388dd1fdd49e5c6612e551ed84f3b1b182858f09e987d"},
+ {file = "scikit_learn-1.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e2642baa0ad1e8f8188917423dd73994bf25429f8893ddbe115be3ca3183584"},
+ {file = "scikit_learn-1.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ad66c3848c0a1ec13464b2a95d0a484fd5b02ce74268eaa7e0c697b904f31d6c"},
+ {file = "scikit_learn-1.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dfeaf8be72117eb61a164ea6fc8afb6dfe08c6f90365bde2dc16456e4bc8e45f"},
+ {file = "scikit_learn-1.2.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:fe0aa1a7029ed3e1dcbf4a5bc675aa3b1bc468d9012ecf6c6f081251ca47f590"},
+ {file = "scikit_learn-1.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:065e9673e24e0dc5113e2dd2b4ca30c9d8aa2fa90f4c0597241c93b63130d233"},
+ {file = "scikit_learn-1.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf036ea7ef66115e0d49655f16febfa547886deba20149555a41d28f56fd6d3c"},
+ {file = "scikit_learn-1.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:8b0670d4224a3c2d596fd572fb4fa673b2a0ccfb07152688ebd2ea0b8c61025c"},
+ {file = "scikit_learn-1.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9c710ff9f9936ba8a3b74a455ccf0dcf59b230caa1e9ba0223773c490cab1e51"},
+ {file = "scikit_learn-1.2.2-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:2dd3ffd3950e3d6c0c0ef9033a9b9b32d910c61bd06cb8206303fb4514b88a49"},
+ {file = "scikit_learn-1.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44b47a305190c28dd8dd73fc9445f802b6ea716669cfc22ab1eb97b335d238b1"},
+ {file = "scikit_learn-1.2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:953236889928d104c2ef14027539f5f2609a47ebf716b8cbe4437e85dce42744"},
+ {file = "scikit_learn-1.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:7f69313884e8eb311460cc2f28676d5e400bd929841a2c8eb8742ae78ebf7c20"},
+ {file = "scikit_learn-1.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8156db41e1c39c69aa2d8599ab7577af53e9e5e7a57b0504e116cc73c39138dd"},
+ {file = "scikit_learn-1.2.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fe175ee1dab589d2e1033657c5b6bec92a8a3b69103e3dd361b58014729975c3"},
+ {file = "scikit_learn-1.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d5312d9674bed14f73773d2acf15a3272639b981e60b72c9b190a0cffed5bad"},
+ {file = "scikit_learn-1.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea061bf0283bf9a9f36ea3c5d3231ba2176221bbd430abd2603b1c3b2ed85c89"},
+ {file = "scikit_learn-1.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:6477eed40dbce190f9f9e9d0d37e020815825b300121307942ec2110302b66a3"},
+]
+
+[package.dependencies]
+joblib = ">=1.1.1"
+numpy = ">=1.17.3"
+scipy = ">=1.3.2"
+threadpoolctl = ">=2.0.0"
+
+[package.extras]
+benchmark = ["matplotlib (>=3.1.3)", "memory-profiler (>=0.57.0)", "pandas (>=1.0.5)"]
+docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.1.3)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "plotly (>=5.10.0)", "pooch (>=1.6.0)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)", "sphinx (>=4.0.1)", "sphinx-gallery (>=0.7.0)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"]
+examples = ["matplotlib (>=3.1.3)", "pandas (>=1.0.5)", "plotly (>=5.10.0)", "pooch (>=1.6.0)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)"]
+tests = ["black (>=22.3.0)", "flake8 (>=3.8.2)", "matplotlib (>=3.1.3)", "mypy (>=0.961)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pytest (>=5.3.1)", "pytest-cov (>=2.9.0)", "scikit-image (>=0.16.2)"]
+
+[[package]]
+name = "scipy"
+version = "1.9.3"
+description = "Fundamental algorithms for scientific computing in Python"
+category = "main"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "scipy-1.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1884b66a54887e21addf9c16fb588720a8309a57b2e258ae1c7986d4444d3bc0"},
+ {file = "scipy-1.9.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:83b89e9586c62e787f5012e8475fbb12185bafb996a03257e9675cd73d3736dd"},
+ {file = "scipy-1.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a72d885fa44247f92743fc20732ae55564ff2a519e8302fb7e18717c5355a8b"},
+ {file = "scipy-1.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d01e1dd7b15bd2449c8bfc6b7cc67d630700ed655654f0dfcf121600bad205c9"},
+ {file = "scipy-1.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:68239b6aa6f9c593da8be1509a05cb7f9efe98b80f43a5861cd24c7557e98523"},
+ {file = "scipy-1.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b41bc822679ad1c9a5f023bc93f6d0543129ca0f37c1ce294dd9d386f0a21096"},
+ {file = "scipy-1.9.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:90453d2b93ea82a9f434e4e1cba043e779ff67b92f7a0e85d05d286a3625df3c"},
+ {file = "scipy-1.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83c06e62a390a9167da60bedd4575a14c1f58ca9dfde59830fc42e5197283dab"},
+ {file = "scipy-1.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abaf921531b5aeaafced90157db505e10345e45038c39e5d9b6c7922d68085cb"},
+ {file = "scipy-1.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:06d2e1b4c491dc7d8eacea139a1b0b295f74e1a1a0f704c375028f8320d16e31"},
+ {file = "scipy-1.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5a04cd7d0d3eff6ea4719371cbc44df31411862b9646db617c99718ff68d4840"},
+ {file = "scipy-1.9.3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:545c83ffb518094d8c9d83cce216c0c32f8c04aaf28b92cc8283eda0685162d5"},
+ {file = "scipy-1.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d54222d7a3ba6022fdf5773931b5d7c56efe41ede7f7128c7b1637700409108"},
+ {file = "scipy-1.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cff3a5295234037e39500d35316a4c5794739433528310e117b8a9a0c76d20fc"},
+ {file = "scipy-1.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:2318bef588acc7a574f5bfdff9c172d0b1bf2c8143d9582e05f878e580a3781e"},
+ {file = "scipy-1.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d644a64e174c16cb4b2e41dfea6af722053e83d066da7343f333a54dae9bc31c"},
+ {file = "scipy-1.9.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:da8245491d73ed0a994ed9c2e380fd058ce2fa8a18da204681f2fe1f57f98f95"},
+ {file = "scipy-1.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4db5b30849606a95dcf519763dd3ab6fe9bd91df49eba517359e450a7d80ce2e"},
+ {file = "scipy-1.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c68db6b290cbd4049012990d7fe71a2abd9ffbe82c0056ebe0f01df8be5436b0"},
+ {file = "scipy-1.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:5b88e6d91ad9d59478fafe92a7c757d00c59e3bdc3331be8ada76a4f8d683f58"},
+ {file = "scipy-1.9.3.tar.gz", hash = "sha256:fbc5c05c85c1a02be77b1ff591087c83bc44579c6d2bd9fb798bb64ea5e1a027"},
+]
+
+[package.dependencies]
+numpy = ">=1.18.5,<1.26.0"
+
+[package.extras]
+dev = ["flake8", "mypy", "pycodestyle", "typing_extensions"]
+doc = ["matplotlib (>2)", "numpydoc", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-panels (>=0.5.2)", "sphinx-tabs"]
+test = ["asv", "gmpy2", "mpmath", "pytest", "pytest-cov", "pytest-xdist", "scikit-umfpack", "threadpoolctl"]
+
+[[package]]
+name = "sentence-transformers"
+version = "2.2.2"
+description = "Multilingual text embeddings"
+category = "main"
+optional = false
+python-versions = ">=3.6.0"
+files = [
+ {file = "sentence-transformers-2.2.2.tar.gz", hash = "sha256:dbc60163b27de21076c9a30d24b5b7b6fa05141d68cf2553fa9a77bf79a29136"},
+]
+
+[package.dependencies]
+huggingface-hub = ">=0.4.0"
+nltk = "*"
+numpy = "*"
+scikit-learn = "*"
+scipy = "*"
+sentencepiece = "*"
+torch = ">=1.6.0"
+torchvision = "*"
+tqdm = "*"
+transformers = ">=4.6.0,<5.0.0"
+
+[[package]]
+name = "sentencepiece"
+version = "0.1.98"
+description = "SentencePiece python wrapper"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+ {file = "sentencepiece-0.1.98-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1daf0a79cd953e4830746c41e92b98a2f2e9e5ec0e90a9447aa10350e11bd027"},
+ {file = "sentencepiece-0.1.98-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:57911445fc91c80d59552adf8a749af9205458920a7328f3bd7d51308658bcd9"},
+ {file = "sentencepiece-0.1.98-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3f9239785849ed1f55a825bcc282bef1a6073f7431cc535bdc658a94873652ea"},
+ {file = "sentencepiece-0.1.98-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:467740ef8af170e5b6cfe22c272114ed930c899c297619ac7a2ac463a13bdbac"},
+ {file = "sentencepiece-0.1.98-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b6f0b9ffb601e2699e265f3f20c353ec9a661e4b5f0cff08ad6c9909c0ae43e"},
+ {file = "sentencepiece-0.1.98-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6150ba525fac4fda76f5c4777ae300597e70cef739ed2a47cea02ff81a88873f"},
+ {file = "sentencepiece-0.1.98-cp310-cp310-win32.whl", hash = "sha256:58ca96d73ea0e5575e3f6a9524449c673d62e6ecee3b2ddd5bfb4f49cb315c0a"},
+ {file = "sentencepiece-0.1.98-cp310-cp310-win_amd64.whl", hash = "sha256:8abe5c4c034e497e69f485dcd2c0e6bc87bf0498ad5aef5f539a7d0f9eae6275"},
+ {file = "sentencepiece-0.1.98-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b6ed62f89c0bd25cec39a7075f6b9354fe4c240ed964e63009d77efcf29c34e9"},
+ {file = "sentencepiece-0.1.98-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c2d9a74986d3716dc6961e9dbae7a3b25bb1260118f098545fd963ae23252c1"},
+ {file = "sentencepiece-0.1.98-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f7dc2fc175623529fb60a2799748f8877cd48c4541b32cd97b8523465e88b69"},
+ {file = "sentencepiece-0.1.98-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64e32c55d04a2e21f0c2fda1b7a3dd108133ebfb8616b52896916bb30e4352ed"},
+ {file = "sentencepiece-0.1.98-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:443f32e94b18571231b02a51be173686114b5556b5edfcbf347fb63e7bd5ddc6"},
+ {file = "sentencepiece-0.1.98-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:558a373a8660bdff299d6c133c2a4f4fb0875e9e6fafe225b8080ecce8a405f9"},
+ {file = "sentencepiece-0.1.98-cp311-cp311-win32.whl", hash = "sha256:fcf100268cefe1774794b18cbaf3065e2bf988f168a387973eb1260d51198795"},
+ {file = "sentencepiece-0.1.98-cp311-cp311-win_amd64.whl", hash = "sha256:05b4eecbece0606883cd81ed86bb3c619680bb570b997b236533ec854d64a575"},
+ {file = "sentencepiece-0.1.98-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:35af00f5103a4779694fedea41b6e24947a9ed81166efe63864ab1e781d70a66"},
+ {file = "sentencepiece-0.1.98-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2766cd708e9fc2b5b9784a990a8b303b9e0b9a69fa482616fe86fa538daa1756"},
+ {file = "sentencepiece-0.1.98-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2531c0e9cc8cd404fabd856d80d695b373371c00f1fce29c06f41f3f7429d87"},
+ {file = "sentencepiece-0.1.98-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffcc78e80c55eab67ee3439ade493607a4e37e1f0b82b168ead3debf9eaeaabe"},
+ {file = "sentencepiece-0.1.98-cp36-cp36m-win32.whl", hash = "sha256:ef384b31ec7a06a9a6aba42e68435f3f3b38809aa65559ede3658cdd446a562c"},
+ {file = "sentencepiece-0.1.98-cp36-cp36m-win_amd64.whl", hash = "sha256:e7a828f1fe2e51d2d9e5e9b3283d4006f1891efb02a3d9303ed39ddafdd9c864"},
+ {file = "sentencepiece-0.1.98-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8663be00a68098f85d6cda1f7041a27de05c320e433fa730ecb1156a8304f21c"},
+ {file = "sentencepiece-0.1.98-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf05611089a075b78d353720ccc3a09a78e0846332cff0cc78fda8b2383626a"},
+ {file = "sentencepiece-0.1.98-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11f410cc7eeb3e1cfa8d92d128b568e5dc7829b7904b164499fd0209316ec2fa"},
+ {file = "sentencepiece-0.1.98-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5ea8fb2c68073fe25a08a178eed269ed382fba074ff2ba4de72f0f56d86630e"},
+ {file = "sentencepiece-0.1.98-cp37-cp37m-win32.whl", hash = "sha256:fa13a125417d28e84fbdebcaf6aa115e4177d3e93aa66b857a42e7179f515b88"},
+ {file = "sentencepiece-0.1.98-cp37-cp37m-win_amd64.whl", hash = "sha256:e54aa70b574eee895d184072d84e62824f404821e551a82c619c5d4320a93834"},
+ {file = "sentencepiece-0.1.98-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:515a971c2a157647ca0e60ce3c435f4b43cd5c9f5862159cfefa0b5b4d46d3c3"},
+ {file = "sentencepiece-0.1.98-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c23c3a562221bc40eaae42428fcd8e607e0f084ea8aa968ba3f1a7d0ea975807"},
+ {file = "sentencepiece-0.1.98-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c067ba22be8edc699f6365e01ec15046bf3563dbabfdc052ecc88e581b675cba"},
+ {file = "sentencepiece-0.1.98-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12c913493d6ebac86ee7ae109e368522a5a365a7b150d4d8cf845599262d2b21"},
+ {file = "sentencepiece-0.1.98-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:720f827dc69ee24951ea4f51b9fb69cc56890a7190fc52c2c0da2545caab1760"},
+ {file = "sentencepiece-0.1.98-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:918b4caf18b2f73c302c4e197d1c2dafba39eb143d16b4590930b45f15042fdd"},
+ {file = "sentencepiece-0.1.98-cp38-cp38-win32.whl", hash = "sha256:2d50edfc4649a1566b64f1a8402cd607e1893bf8e368732337d83f00df62d3fa"},
+ {file = "sentencepiece-0.1.98-cp38-cp38-win_amd64.whl", hash = "sha256:7425b727c3d6b3b7bad0005a3be316078b254180b712d73955ff08cae3f6a385"},
+ {file = "sentencepiece-0.1.98-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:00b2becbd7b98905a6de9695cf8682abe0d510ab0198e23c7d86fb2b793b6ae0"},
+ {file = "sentencepiece-0.1.98-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f71c4bdedb797052fb2ccad0871c2409bf6f812cb6b651917c55f9e8eced07f"},
+ {file = "sentencepiece-0.1.98-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7287461d2346f530928ab187f0834cb15ddfbc4553592cacdcb6470364739ec6"},
+ {file = "sentencepiece-0.1.98-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:472ad943eaffcb6871ece56c7850388e7b8722f520ba73c93e7a6ef965453221"},
+ {file = "sentencepiece-0.1.98-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b7e23aaf9d5afd91ca13550968bd17f0c17b0966823188ad2a50c51544cf8ed"},
+ {file = "sentencepiece-0.1.98-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b0ce9efc790c209cce2463058855dceb21438213d2ff13cb5a565d52a7efe25"},
+ {file = "sentencepiece-0.1.98-cp39-cp39-win32.whl", hash = "sha256:8b50cbe8e46204eff7aa5a663af5652c45e7807aa560d08e5f5b10c60e795a49"},
+ {file = "sentencepiece-0.1.98-cp39-cp39-win_amd64.whl", hash = "sha256:14841bd2a3d77c4dbba58f02488c374866551e428d755e8d473d82325a0a94f3"},
+ {file = "sentencepiece-0.1.98.tar.gz", hash = "sha256:947cf0a4b8a480510d560a922f8256f34e93984a86cf870be4d05731f59fb28d"},
+]
+
[[package]]
name = "setuptools"
version = "67.6.1"
@@ -1985,14 +3739,14 @@ files = [
[[package]]
name = "soupsieve"
-version = "2.4"
+version = "2.4.1"
description = "A modern CSS selector implementation for Beautiful Soup."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "soupsieve-2.4-py3-none-any.whl", hash = "sha256:49e5368c2cda80ee7e84da9dbe3e110b70a4575f196efb74e51b94549d921955"},
- {file = "soupsieve-2.4.tar.gz", hash = "sha256:e28dba9ca6c7c00173e34e4ba57448f0688bb681b7c5e8bf4971daafc093d69a"},
+ {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"},
+ {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"},
]
[[package]]
@@ -2109,6 +3863,21 @@ typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""
[package.extras]
full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"]
+[[package]]
+name = "sympy"
+version = "1.11.1"
+description = "Computer algebra system (CAS) in Python"
+category = "main"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "sympy-1.11.1-py3-none-any.whl", hash = "sha256:938f984ee2b1e8eae8a07b884c8b7a1146010040fccddc6539c54f401c8f6fcf"},
+ {file = "sympy-1.11.1.tar.gz", hash = "sha256:e32380dce63cb7c0108ed525570092fd45168bdae2faa17e528221ef72e88658"},
+]
+
+[package.dependencies]
+mpmath = ">=0.19"
+
[[package]]
name = "tenacity"
version = "8.2.2"
@@ -2124,6 +3893,73 @@ files = [
[package.extras]
doc = ["reno", "sphinx", "tornado (>=4.5)"]
+[[package]]
+name = "threadpoolctl"
+version = "3.1.0"
+description = "threadpoolctl"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "threadpoolctl-3.1.0-py3-none-any.whl", hash = "sha256:8b99adda265feb6773280df41eece7b2e6561b772d21ffd52e372f999024907b"},
+ {file = "threadpoolctl-3.1.0.tar.gz", hash = "sha256:a335baacfaa4400ae1f0d8e3a58d6674d2f8828e3716bb2802c44955ad391380"},
+]
+
+[[package]]
+name = "tokenizers"
+version = "0.13.3"
+description = "Fast and Customizable Tokenizers"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+ {file = "tokenizers-0.13.3-cp310-cp310-macosx_10_11_x86_64.whl", hash = "sha256:f3835c5be51de8c0a092058a4d4380cb9244fb34681fd0a295fbf0a52a5fdf33"},
+ {file = "tokenizers-0.13.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4ef4c3e821730f2692489e926b184321e887f34fb8a6b80b8096b966ba663d07"},
+ {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5fd1a6a25353e9aa762e2aae5a1e63883cad9f4e997c447ec39d071020459bc"},
+ {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee0b1b311d65beab83d7a41c56a1e46ab732a9eed4460648e8eb0bd69fc2d059"},
+ {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ef4215284df1277dadbcc5e17d4882bda19f770d02348e73523f7e7d8b8d396"},
+ {file = "tokenizers-0.13.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4d53976079cff8a033f778fb9adca2d9d69d009c02fa2d71a878b5f3963ed30"},
+ {file = "tokenizers-0.13.3-cp310-cp310-win32.whl", hash = "sha256:1f0e3b4c2ea2cd13238ce43548959c118069db7579e5d40ec270ad77da5833ce"},
+ {file = "tokenizers-0.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:89649c00d0d7211e8186f7a75dfa1db6996f65edce4b84821817eadcc2d3c79e"},
+ {file = "tokenizers-0.13.3-cp311-cp311-macosx_10_11_universal2.whl", hash = "sha256:56b726e0d2bbc9243872b0144515ba684af5b8d8cd112fb83ee1365e26ec74c8"},
+ {file = "tokenizers-0.13.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:cc5c022ce692e1f499d745af293ab9ee6f5d92538ed2faf73f9708c89ee59ce6"},
+ {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f55c981ac44ba87c93e847c333e58c12abcbb377a0c2f2ef96e1a266e4184ff2"},
+ {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f247eae99800ef821a91f47c5280e9e9afaeed9980fc444208d5aa6ba69ff148"},
+ {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b3e3215d048e94f40f1c95802e45dcc37c5b05eb46280fc2ccc8cd351bff839"},
+ {file = "tokenizers-0.13.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ba2b0bf01777c9b9bc94b53764d6684554ce98551fec496f71bc5be3a03e98b"},
+ {file = "tokenizers-0.13.3-cp311-cp311-win32.whl", hash = "sha256:cc78d77f597d1c458bf0ea7c2a64b6aa06941c7a99cb135b5969b0278824d808"},
+ {file = "tokenizers-0.13.3-cp311-cp311-win_amd64.whl", hash = "sha256:ecf182bf59bd541a8876deccf0360f5ae60496fd50b58510048020751cf1724c"},
+ {file = "tokenizers-0.13.3-cp37-cp37m-macosx_10_11_x86_64.whl", hash = "sha256:0527dc5436a1f6bf2c0327da3145687d3bcfbeab91fed8458920093de3901b44"},
+ {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07cbb2c307627dc99b44b22ef05ff4473aa7c7cc1fec8f0a8b37d8a64b1a16d2"},
+ {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4560dbdeaae5b7ee0d4e493027e3de6d53c991b5002d7ff95083c99e11dd5ac0"},
+ {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64064bd0322405c9374305ab9b4c07152a1474370327499911937fd4a76d004b"},
+ {file = "tokenizers-0.13.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8c6e2ab0f2e3d939ca66aa1d596602105fe33b505cd2854a4c1717f704c51de"},
+ {file = "tokenizers-0.13.3-cp37-cp37m-win32.whl", hash = "sha256:6cc29d410768f960db8677221e497226e545eaaea01aa3613fa0fdf2cc96cff4"},
+ {file = "tokenizers-0.13.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fc2a7fdf864554a0dacf09d32e17c0caa9afe72baf9dd7ddedc61973bae352d8"},
+ {file = "tokenizers-0.13.3-cp38-cp38-macosx_10_11_x86_64.whl", hash = "sha256:8791dedba834c1fc55e5f1521be325ea3dafb381964be20684b92fdac95d79b7"},
+ {file = "tokenizers-0.13.3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:d607a6a13718aeb20507bdf2b96162ead5145bbbfa26788d6b833f98b31b26e1"},
+ {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3791338f809cd1bf8e4fee6b540b36822434d0c6c6bc47162448deee3f77d425"},
+ {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2f35f30e39e6aab8716f07790f646bdc6e4a853816cc49a95ef2a9016bf9ce6"},
+ {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310204dfed5aa797128b65d63538a9837cbdd15da2a29a77d67eefa489edda26"},
+ {file = "tokenizers-0.13.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0f9b92ea052305166559f38498b3b0cae159caea712646648aaa272f7160963"},
+ {file = "tokenizers-0.13.3-cp38-cp38-win32.whl", hash = "sha256:9a3fa134896c3c1f0da6e762d15141fbff30d094067c8f1157b9fdca593b5806"},
+ {file = "tokenizers-0.13.3-cp38-cp38-win_amd64.whl", hash = "sha256:8e7b0cdeace87fa9e760e6a605e0ae8fc14b7d72e9fc19c578116f7287bb873d"},
+ {file = "tokenizers-0.13.3-cp39-cp39-macosx_10_11_x86_64.whl", hash = "sha256:00cee1e0859d55507e693a48fa4aef07060c4bb6bd93d80120e18fea9371c66d"},
+ {file = "tokenizers-0.13.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:a23ff602d0797cea1d0506ce69b27523b07e70f6dda982ab8cf82402de839088"},
+ {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70ce07445050b537d2696022dafb115307abdffd2a5c106f029490f84501ef97"},
+ {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:280ffe95f50eaaf655b3a1dc7ff1d9cf4777029dbbc3e63a74e65a056594abc3"},
+ {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97acfcec592f7e9de8cadcdcda50a7134423ac8455c0166b28c9ff04d227b371"},
+ {file = "tokenizers-0.13.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd7730c98a3010cd4f523465867ff95cd9d6430db46676ce79358f65ae39797b"},
+ {file = "tokenizers-0.13.3-cp39-cp39-win32.whl", hash = "sha256:48625a108029cb1ddf42e17a81b5a3230ba6888a70c9dc14e81bc319e812652d"},
+ {file = "tokenizers-0.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:bc0a6f1ba036e482db6453571c9e3e60ecd5489980ffd95d11dc9f960483d783"},
+ {file = "tokenizers-0.13.3.tar.gz", hash = "sha256:2e546dbb68b623008a5442353137fbb0123d311a6d7ba52f2667c8862a75af2e"},
+]
+
+[package.extras]
+dev = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"]
+docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"]
+testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests"]
+
[[package]]
name = "tomli"
version = "2.0.1"
@@ -2136,25 +3972,116 @@ files = [
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
]
+[[package]]
+name = "torch"
+version = "2.0.0"
+description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration"
+category = "main"
+optional = false
+python-versions = ">=3.8.0"
+files = [
+ {file = "torch-2.0.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:7a9319a67294ef02459a19738bbfa8727bb5307b822dadd708bc2ccf6c901aca"},
+ {file = "torch-2.0.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:9f01fe1f6263f31bd04e1757946fd63ad531ae37f28bb2dbf66f5c826ee089f4"},
+ {file = "torch-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:527f4ae68df7b8301ee6b1158ca56350282ea633686537b30dbb5d7b4a52622a"},
+ {file = "torch-2.0.0-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:ce9b5a49bd513dff7950a5a07d6e26594dd51989cee05ba388b03e8e366fd5d5"},
+ {file = "torch-2.0.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:53e1c33c6896583cdb9a583693e22e99266444c4a43392dddc562640d39e542b"},
+ {file = "torch-2.0.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:09651bff72e439d004c991f15add0c397c66f98ab36fe60d5514b44e4da722e8"},
+ {file = "torch-2.0.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:d439aec349c98f12819e8564b8c54008e4613dd4428582af0e6e14c24ca85870"},
+ {file = "torch-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:2802f84f021907deee7e9470ed10c0e78af7457ac9a08a6cd7d55adef835fede"},
+ {file = "torch-2.0.0-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:01858620f25f25e7a9ec4b547ff38e5e27c92d38ec4ccba9cfbfb31d7071ed9c"},
+ {file = "torch-2.0.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:9a2e53b5783ef5896a6af338b36d782f28e83c8ddfc2ac44b67b066d9d76f498"},
+ {file = "torch-2.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:ec5fff2447663e369682838ff0f82187b4d846057ef4d119a8dea7772a0b17dd"},
+ {file = "torch-2.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:11b0384fe3c18c01b8fc5992e70fc519cde65e44c51cc87be1838c1803daf42f"},
+ {file = "torch-2.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:e54846aa63855298cfb1195487f032e413e7ac9cbfa978fda32354cc39551475"},
+ {file = "torch-2.0.0-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:cc788cbbbbc6eb4c90e52c550efd067586c2693092cf367c135b34893a64ae78"},
+ {file = "torch-2.0.0-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:d292640f0fd72b7a31b2a6e3b635eb5065fcbedd4478f9cad1a1e7a9ec861d35"},
+ {file = "torch-2.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6befaad784004b7af357e3d87fa0863c1f642866291f12a4c2af2de435e8ac5c"},
+ {file = "torch-2.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:a83b26bd6ae36fbf5fee3d56973d9816e2002e8a3b7d9205531167c28aaa38a7"},
+ {file = "torch-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:c7e67195e1c3e33da53954b026e89a8e1ff3bc1aeb9eb32b677172d4a9b5dcbf"},
+ {file = "torch-2.0.0-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:6e0b97beb037a165669c312591f242382e9109a240e20054d5a5782d9236cad0"},
+ {file = "torch-2.0.0-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:297a4919aff1c0f98a58ebe969200f71350a1d4d4f986dbfd60c02ffce780e99"},
+]
+
+[package.dependencies]
+filelock = "*"
+jinja2 = "*"
+networkx = "*"
+nvidia-cublas-cu11 = {version = "11.10.3.66", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+nvidia-cuda-cupti-cu11 = {version = "11.7.101", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+nvidia-cuda-nvrtc-cu11 = {version = "11.7.99", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+nvidia-cuda-runtime-cu11 = {version = "11.7.99", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+nvidia-cudnn-cu11 = {version = "8.5.0.96", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+nvidia-cufft-cu11 = {version = "10.9.0.58", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+nvidia-curand-cu11 = {version = "10.2.10.91", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+nvidia-cusolver-cu11 = {version = "11.4.0.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+nvidia-cusparse-cu11 = {version = "11.7.4.91", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+nvidia-nccl-cu11 = {version = "2.14.3", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+nvidia-nvtx-cu11 = {version = "11.7.91", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+sympy = "*"
+triton = {version = "2.0.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+typing-extensions = "*"
+
+[package.extras]
+opt-einsum = ["opt-einsum (>=3.3)"]
+
+[[package]]
+name = "torchvision"
+version = "0.15.1"
+description = "image and video datasets and models for torch deep learning"
+category = "main"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "torchvision-0.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc10d48e9a60d006d0c1b48dea87f1ec9b63d856737d592f7c5c44cd87f3f4b7"},
+ {file = "torchvision-0.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3708d3410fdcaf6280e358cda9de2a4ab06cc0b4c0fd9aeeac550ec2563a887e"},
+ {file = "torchvision-0.15.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:d4de10c837f1493c1c54344388e300a06c96914c6cc55fcb2527c21f2f010bbd"},
+ {file = "torchvision-0.15.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:b82fcc5abc9b5c96495c76596a1573025cc1e09d97d2d6fda717c44b9ca45881"},
+ {file = "torchvision-0.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:c84e97d8cc4fe167d87adad0a2a6424cff90544365545b20669bc50e6ea46875"},
+ {file = "torchvision-0.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:97b90eb3b7333a31d049c4ccfd1064361e8491874959d38f466af64d67418cef"},
+ {file = "torchvision-0.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6b60e1c839ae2a071befbba69b17468d67feafdf576e90ff9645bfbee998de17"},
+ {file = "torchvision-0.15.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:13f71a3372d9168b01481a754ebaa171207f3dc455bf2fd86906c69222443738"},
+ {file = "torchvision-0.15.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b2e8394726009090b40f6cc3a95cc878cc011dfac3d8e7a6060c79213d360880"},
+ {file = "torchvision-0.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:2852f501189483187ce9eb0ccd01b3f4f0918d29057e4a18b3cce8dad9a8a964"},
+ {file = "torchvision-0.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e5861baaeea87d19b6fd7d131e11a4a6bd17be14234c490a259bb360775e9520"},
+ {file = "torchvision-0.15.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e714f362b9d8217cf4d68509b679ebc9ddf128cfe80f6c1def8e3f8a18466e75"},
+ {file = "torchvision-0.15.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:43624accad1e47f16824be4db37ad678dd89326ad90b69c9c6363eeb22b9467e"},
+ {file = "torchvision-0.15.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:7fe9b0cd3311b0db9e6d45ffab594ced06418fa4e2aa15eb2e60d55e5c51135c"},
+ {file = "torchvision-0.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:b45324ea4911a23a4b00b5a15cdbe36d47f93137206dab9f8c606d81b69dd3a7"},
+ {file = "torchvision-0.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1dfdec7c7df967330bba3341a781e0c047d4e0163e67164a9918500362bf7d91"},
+ {file = "torchvision-0.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c153710186cec0338d4fff411459a57ddbc8504436123ca73b3f0bdc26ff918c"},
+ {file = "torchvision-0.15.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:ff4e650aa601f32ab97bce06704868dd2baad69ca4d454fa1f0012a51199f2bc"},
+ {file = "torchvision-0.15.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:e9b4bb2a15849391df0415d2f76dd36e6528e4253f7b69322b7a0d682535544b"},
+ {file = "torchvision-0.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:21e6beb69e77ef6575c4fdd0ab332b96e8a7f144eee0d333acff469c827a4b5e"},
+]
+
+[package.dependencies]
+numpy = "*"
+pillow = ">=5.3.0,<8.3.0 || >=8.4.0"
+requests = "*"
+torch = "2.0.0"
+
+[package.extras]
+scipy = ["scipy"]
+
[[package]]
name = "tornado"
-version = "6.2"
+version = "6.3.1"
description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed."
category = "dev"
optional = false
-python-versions = ">= 3.7"
+python-versions = ">= 3.8"
files = [
- {file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"},
- {file = "tornado-6.2-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:87dcafae3e884462f90c90ecc200defe5e580a7fbbb4365eda7c7c1eb809ebc9"},
- {file = "tornado-6.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba09ef14ca9893954244fd872798b4ccb2367c165946ce2dd7376aebdde8e3ac"},
- {file = "tornado-6.2-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8150f721c101abdef99073bf66d3903e292d851bee51910839831caba341a75"},
- {file = "tornado-6.2-cp37-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a2f5999215a3a06a4fc218026cd84c61b8b2b40ac5296a6db1f1451ef04c1e"},
- {file = "tornado-6.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5f8c52d219d4995388119af7ccaa0bcec289535747620116a58d830e7c25d8a8"},
- {file = "tornado-6.2-cp37-abi3-musllinux_1_1_i686.whl", hash = "sha256:6fdfabffd8dfcb6cf887428849d30cf19a3ea34c2c248461e1f7d718ad30b66b"},
- {file = "tornado-6.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:1d54d13ab8414ed44de07efecb97d4ef7c39f7438cf5e976ccd356bebb1b5fca"},
- {file = "tornado-6.2-cp37-abi3-win32.whl", hash = "sha256:5c87076709343557ef8032934ce5f637dbb552efa7b21d08e89ae7619ed0eb23"},
- {file = "tornado-6.2-cp37-abi3-win_amd64.whl", hash = "sha256:e5f923aa6a47e133d1cf87d60700889d7eae68988704e20c75fb2d65677a8e4b"},
- {file = "tornado-6.2.tar.gz", hash = "sha256:9b630419bde84ec666bfd7ea0a4cb2a8a651c2d5cccdbdd1972a0c859dfc3c13"},
+ {file = "tornado-6.3.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:db181eb3df8738613ff0a26f49e1b394aade05034b01200a63e9662f347d4415"},
+ {file = "tornado-6.3.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b4e7b956f9b5e6f9feb643ea04f07e7c6b49301e03e0023eedb01fa8cf52f579"},
+ {file = "tornado-6.3.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9661aa8bc0e9d83d757cd95b6f6d1ece8ca9fd1ccdd34db2de381e25bf818233"},
+ {file = "tornado-6.3.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81c17e0cc396908a5e25dc8e9c5e4936e6dfd544c9290be48bd054c79bcad51e"},
+ {file = "tornado-6.3.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a27a1cfa9997923f80bdd962b3aab048ac486ad8cfb2f237964f8ab7f7eb824b"},
+ {file = "tornado-6.3.1-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d7117f3c7ba5d05813b17a1f04efc8e108a1b811ccfddd9134cc68553c414864"},
+ {file = "tornado-6.3.1-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:ffdce65a281fd708da5a9def3bfb8f364766847fa7ed806821a69094c9629e8a"},
+ {file = "tornado-6.3.1-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:90f569a35a8ec19bde53aa596952071f445da678ec8596af763b9b9ce07605e6"},
+ {file = "tornado-6.3.1-cp38-abi3-win32.whl", hash = "sha256:3455133b9ff262fd0a75630af0a8ee13564f25fb4fd3d9ce239b8a7d3d027bf8"},
+ {file = "tornado-6.3.1-cp38-abi3-win_amd64.whl", hash = "sha256:1285f0691143f7ab97150831455d4db17a267b59649f7bd9700282cba3d5e771"},
+ {file = "tornado-6.3.1.tar.gz", hash = "sha256:5e2f49ad371595957c50e42dd7e5c14d64a6843a3cf27352b69c706d1b5918af"},
]
[[package]]
@@ -2194,6 +4121,103 @@ files = [
docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"]
test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"]
+[[package]]
+name = "transformers"
+version = "4.28.1"
+description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow"
+category = "main"
+optional = false
+python-versions = ">=3.7.0"
+files = [
+ {file = "transformers-4.28.1-py3-none-any.whl", hash = "sha256:f30a006220d0475789ac0e7c874f51bf5143956797616d89975b637883ce0be6"},
+ {file = "transformers-4.28.1.tar.gz", hash = "sha256:7334f8730cff7ac31d9ba5c12f2113fcb7a7a5b61eeb5dbbdb162117c3aaa2d1"},
+]
+
+[package.dependencies]
+filelock = "*"
+huggingface-hub = ">=0.11.0,<1.0"
+numpy = ">=1.17"
+packaging = ">=20.0"
+pyyaml = ">=5.1"
+regex = "!=2019.12.17"
+requests = "*"
+tokenizers = ">=0.11.1,<0.11.3 || >0.11.3,<0.14"
+tqdm = ">=4.27"
+
+[package.extras]
+accelerate = ["accelerate (>=0.10.0)"]
+all = ["Pillow", "accelerate (>=0.10.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision"]
+audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"]
+codecarbon = ["codecarbon (==1.2.0)"]
+deepspeed = ["accelerate (>=0.10.0)", "deepspeed (>=0.8.3)"]
+deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.10.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.8.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf (<=3.20.2)", "psutil", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "sentencepiece (>=0.1.91,!=0.1.92)", "timeout-decorator"]
+dev = ["GitPython (<3.1.19)", "Pillow", "accelerate (>=0.10.0)", "av (==9.2.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"]
+dev-tensorflow = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3,<0.14)"]
+dev-torch = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"]
+docs = ["Pillow", "accelerate (>=0.10.0)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1)", "hf-doc-builder", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision"]
+docs-specific = ["hf-doc-builder"]
+fairscale = ["fairscale (>0.3)"]
+flax = ["flax (>=0.4.1)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "optax (>=0.0.8)"]
+flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"]
+ftfy = ["ftfy"]
+integrations = ["optuna", "ray[tune]", "sigopt"]
+ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "rhoknp (>=1.1.0)", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"]
+modelcreation = ["cookiecutter (==1.7.3)"]
+natten = ["natten (>=0.14.6)"]
+onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"]
+onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"]
+optuna = ["optuna"]
+quality = ["GitPython (<3.1.19)", "black (>=23.1,<24.0)", "datasets (!=2.5.0)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "ruff (>=0.0.241,<=0.0.259)"]
+ray = ["ray[tune]"]
+retrieval = ["datasets (!=2.5.0)", "faiss-cpu"]
+sagemaker = ["sagemaker (>=2.31.0)"]
+sentencepiece = ["protobuf (<=3.20.2)", "sentencepiece (>=0.1.91,!=0.1.92)"]
+serving = ["fastapi", "pydantic", "starlette", "uvicorn"]
+sigopt = ["sigopt"]
+sklearn = ["scikit-learn"]
+speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"]
+testing = ["GitPython (<3.1.19)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf (<=3.20.2)", "psutil", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "timeout-decorator"]
+tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx"]
+tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.4,<2.13)", "tensorflow-text (<2.13)", "tf2onnx"]
+tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"]
+timm = ["timm"]
+tokenizers = ["tokenizers (>=0.11.1,!=0.11.3,<0.14)"]
+torch = ["torch (>=1.9,!=1.12.0)"]
+torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"]
+torch-vision = ["Pillow", "torchvision"]
+torchhub = ["filelock", "huggingface-hub (>=0.11.0,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf (<=3.20.2)", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "tqdm (>=4.27)"]
+video = ["av (==9.2.0)", "decord (==0.6.0)"]
+vision = ["Pillow"]
+
+[[package]]
+name = "triton"
+version = "2.0.0"
+description = "A language and compiler for custom Deep Learning operations"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+ {file = "triton-2.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f05a7e64e4ca0565535e3d5d3405d7e49f9d308505bb7773d21fb26a4c008c2"},
+ {file = "triton-2.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb4b99ca3c6844066e516658541d876c28a5f6e3a852286bbc97ad57134827fd"},
+ {file = "triton-2.0.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47b4d70dc92fb40af553b4460492c31dc7d3a114a979ffb7a5cdedb7eb546c08"},
+ {file = "triton-2.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fedce6a381901b1547e0e7e1f2546e4f65dca6d91e2d8a7305a2d1f5551895be"},
+ {file = "triton-2.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75834f27926eab6c7f00ce73aaf1ab5bfb9bec6eb57ab7c0bfc0a23fac803b4c"},
+ {file = "triton-2.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0117722f8c2b579cd429e0bee80f7731ae05f63fe8e9414acd9a679885fcbf42"},
+ {file = "triton-2.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcd9be5d0c2e45d2b7e6ddc6da20112b6862d69741576f9c3dbaf941d745ecae"},
+ {file = "triton-2.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42a0d2c3fc2eab4ba71384f2e785fbfd47aa41ae05fa58bf12cb31dcbd0aeceb"},
+ {file = "triton-2.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52c47b72c72693198163ece9d90a721299e4fb3b8e24fd13141e384ad952724f"},
+]
+
+[package.dependencies]
+cmake = "*"
+filelock = "*"
+lit = "*"
+torch = "*"
+
+[package.extras]
+tests = ["autopep8", "flake8", "isort", "numpy", "pytest", "scipy (>=1.7.1)"]
+tutorials = ["matplotlib", "pandas", "tabulate"]
+
[[package]]
name = "typer"
version = "0.7.0"
@@ -2255,6 +4279,46 @@ files = [
mypy-extensions = ">=0.3.0"
typing-extensions = ">=3.7.4"
+[[package]]
+name = "unstructured"
+version = "0.5.13"
+description = "A library that prepares raw documents for downstream ML tasks."
+category = "main"
+optional = false
+python-versions = ">=3.7.0"
+files = [
+ {file = "unstructured-0.5.13-py3-none-any.whl", hash = "sha256:44ddc2848f1009fd64ced91a9b52d0e9dd15dee837f34a898ecd95863236b880"},
+ {file = "unstructured-0.5.13.tar.gz", hash = "sha256:b25443a47353297bda5eec64615215111e07c2507771cf8ad5471d04ac9026b1"},
+]
+
+[package.dependencies]
+argilla = "*"
+certifi = ">=2022.12.07"
+lxml = "*"
+markdown = "*"
+msg-parser = "*"
+nltk = "*"
+openpyxl = "*"
+pandas = "*"
+pillow = "*"
+pypandoc = "*"
+python-docx = "*"
+python-magic = "*"
+python-pptx = "*"
+requests = "*"
+
+[package.extras]
+azure = ["adlfs", "fsspec"]
+github = ["pygithub (==1.57.0)"]
+gitlab = ["python-gitlab"]
+google-drive = ["google-api-python-client", "protobuf (<3.21)"]
+huggingface = ["langdetect", "sacremoses", "sentencepiece", "torch", "transformers"]
+local-inference = ["unstructured-inference (==0.3.2)"]
+reddit = ["praw"]
+s3 = ["fsspec", "s3fs"]
+slack = ["slack-sdk"]
+wikipedia = ["wikipedia"]
+
[[package]]
name = "uritemplate"
version = "4.1.1"
@@ -2298,11 +4362,98 @@ files = [
[package.dependencies]
click = ">=7.0"
+colorama = {version = ">=0.4", optional = true, markers = "sys_platform == \"win32\" and extra == \"standard\""}
h11 = ">=0.8"
+httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""}
+python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
+pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""}
+uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""}
+watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""}
+websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""}
[package.extras]
standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"]
+[[package]]
+name = "uvloop"
+version = "0.17.0"
+description = "Fast implementation of asyncio event loop on top of libuv"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "uvloop-0.17.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ce9f61938d7155f79d3cb2ffa663147d4a76d16e08f65e2c66b77bd41b356718"},
+ {file = "uvloop-0.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:68532f4349fd3900b839f588972b3392ee56042e440dd5873dfbbcd2cc67617c"},
+ {file = "uvloop-0.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0949caf774b9fcefc7c5756bacbbbd3fc4c05a6b7eebc7c7ad6f825b23998d6d"},
+ {file = "uvloop-0.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff3d00b70ce95adce264462c930fbaecb29718ba6563db354608f37e49e09024"},
+ {file = "uvloop-0.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a5abddb3558d3f0a78949c750644a67be31e47936042d4f6c888dd6f3c95f4aa"},
+ {file = "uvloop-0.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8efcadc5a0003d3a6e887ccc1fb44dec25594f117a94e3127954c05cf144d811"},
+ {file = "uvloop-0.17.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3378eb62c63bf336ae2070599e49089005771cc651c8769aaad72d1bd9385a7c"},
+ {file = "uvloop-0.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6aafa5a78b9e62493539456f8b646f85abc7093dd997f4976bb105537cf2635e"},
+ {file = "uvloop-0.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c686a47d57ca910a2572fddfe9912819880b8765e2f01dc0dd12a9bf8573e539"},
+ {file = "uvloop-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:864e1197139d651a76c81757db5eb199db8866e13acb0dfe96e6fc5d1cf45fc4"},
+ {file = "uvloop-0.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2a6149e1defac0faf505406259561bc14b034cdf1d4711a3ddcdfbaa8d825a05"},
+ {file = "uvloop-0.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6708f30db9117f115eadc4f125c2a10c1a50d711461699a0cbfaa45b9a78e376"},
+ {file = "uvloop-0.17.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:23609ca361a7fc587031429fa25ad2ed7242941adec948f9d10c045bfecab06b"},
+ {file = "uvloop-0.17.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2deae0b0fb00a6af41fe60a675cec079615b01d68beb4cc7b722424406b126a8"},
+ {file = "uvloop-0.17.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45cea33b208971e87a31c17622e4b440cac231766ec11e5d22c76fab3bf9df62"},
+ {file = "uvloop-0.17.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9b09e0f0ac29eee0451d71798878eae5a4e6a91aa275e114037b27f7db72702d"},
+ {file = "uvloop-0.17.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dbbaf9da2ee98ee2531e0c780455f2841e4675ff580ecf93fe5c48fe733b5667"},
+ {file = "uvloop-0.17.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a4aee22ece20958888eedbad20e4dbb03c37533e010fb824161b4f05e641f738"},
+ {file = "uvloop-0.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:307958f9fc5c8bb01fad752d1345168c0abc5d62c1b72a4a8c6c06f042b45b20"},
+ {file = "uvloop-0.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ebeeec6a6641d0adb2ea71dcfb76017602ee2bfd8213e3fcc18d8f699c5104f"},
+ {file = "uvloop-0.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1436c8673c1563422213ac6907789ecb2b070f5939b9cbff9ef7113f2b531595"},
+ {file = "uvloop-0.17.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8887d675a64cfc59f4ecd34382e5b4f0ef4ae1da37ed665adba0c2badf0d6578"},
+ {file = "uvloop-0.17.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3db8de10ed684995a7f34a001f15b374c230f7655ae840964d51496e2f8a8474"},
+ {file = "uvloop-0.17.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7d37dccc7ae63e61f7b96ee2e19c40f153ba6ce730d8ba4d3b4e9738c1dccc1b"},
+ {file = "uvloop-0.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cbbe908fda687e39afd6ea2a2f14c2c3e43f2ca88e3a11964b297822358d0e6c"},
+ {file = "uvloop-0.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d97672dc709fa4447ab83276f344a165075fd9f366a97b712bdd3fee05efae8"},
+ {file = "uvloop-0.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1e507c9ee39c61bfddd79714e4f85900656db1aec4d40c6de55648e85c2799c"},
+ {file = "uvloop-0.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c092a2c1e736086d59ac8e41f9c98f26bbf9b9222a76f21af9dfe949b99b2eb9"},
+ {file = "uvloop-0.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:30babd84706115626ea78ea5dbc7dd8d0d01a2e9f9b306d24ca4ed5796c66ded"},
+ {file = "uvloop-0.17.0.tar.gz", hash = "sha256:0ddf6baf9cf11a1a22c71487f39f15b2cf78eb5bde7e5b45fbb99e8a9d91b9e1"},
+]
+
+[package.extras]
+dev = ["Cython (>=0.29.32,<0.30.0)", "Sphinx (>=4.1.2,<4.2.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=22.0.0,<22.1.0)", "pycodestyle (>=2.7.0,<2.8.0)", "pytest (>=3.6.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"]
+docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"]
+test = ["Cython (>=0.29.32,<0.30.0)", "aiohttp", "flake8 (>=3.9.2,<3.10.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=22.0.0,<22.1.0)", "pycodestyle (>=2.7.0,<2.8.0)"]
+
+[[package]]
+name = "watchfiles"
+version = "0.19.0"
+description = "Simple, modern and high performance file watching and code reload in python."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "watchfiles-0.19.0-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:91633e64712df3051ca454ca7d1b976baf842d7a3640b87622b323c55f3345e7"},
+ {file = "watchfiles-0.19.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:b6577b8c6c8701ba8642ea9335a129836347894b666dd1ec2226830e263909d3"},
+ {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:18b28f6ad871b82df9542ff958d0c86bb0d8310bb09eb8e87d97318a3b5273af"},
+ {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fac19dc9cbc34052394dbe81e149411a62e71999c0a19e1e09ce537867f95ae0"},
+ {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:09ea3397aecbc81c19ed7f025e051a7387feefdb789cf768ff994c1228182fda"},
+ {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0376deac92377817e4fb8f347bf559b7d44ff556d9bc6f6208dd3f79f104aaf"},
+ {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c75eff897786ee262c9f17a48886f4e98e6cfd335e011c591c305e5d083c056"},
+ {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb5d45c4143c1dd60f98a16187fd123eda7248f84ef22244818c18d531a249d1"},
+ {file = "watchfiles-0.19.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:79c533ff593db861ae23436541f481ec896ee3da4e5db8962429b441bbaae16e"},
+ {file = "watchfiles-0.19.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3d7d267d27aceeeaa3de0dd161a0d64f0a282264d592e335fff7958cc0cbae7c"},
+ {file = "watchfiles-0.19.0-cp37-abi3-win32.whl", hash = "sha256:176a9a7641ec2c97b24455135d58012a5be5c6217fc4d5fef0b2b9f75dbf5154"},
+ {file = "watchfiles-0.19.0-cp37-abi3-win_amd64.whl", hash = "sha256:945be0baa3e2440151eb3718fd8846751e8b51d8de7b884c90b17d271d34cae8"},
+ {file = "watchfiles-0.19.0-cp37-abi3-win_arm64.whl", hash = "sha256:0089c6dc24d436b373c3c57657bf4f9a453b13767150d17284fc6162b2791911"},
+ {file = "watchfiles-0.19.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cae3dde0b4b2078f31527acff6f486e23abed307ba4d3932466ba7cdd5ecec79"},
+ {file = "watchfiles-0.19.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f3920b1285a7d3ce898e303d84791b7bf40d57b7695ad549dc04e6a44c9f120"},
+ {file = "watchfiles-0.19.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9afd0d69429172c796164fd7fe8e821ade9be983f51c659a38da3faaaaac44dc"},
+ {file = "watchfiles-0.19.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68dce92b29575dda0f8d30c11742a8e2b9b8ec768ae414b54f7453f27bdf9545"},
+ {file = "watchfiles-0.19.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:5569fc7f967429d4bc87e355cdfdcee6aabe4b620801e2cf5805ea245c06097c"},
+ {file = "watchfiles-0.19.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5471582658ea56fca122c0f0d0116a36807c63fefd6fdc92c71ca9a4491b6b48"},
+ {file = "watchfiles-0.19.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b538014a87f94d92f98f34d3e6d2635478e6be6423a9ea53e4dd96210065e193"},
+ {file = "watchfiles-0.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20b44221764955b1e703f012c74015306fb7e79a00c15370785f309b1ed9aa8d"},
+ {file = "watchfiles-0.19.0.tar.gz", hash = "sha256:d9b073073e048081e502b6c6b0b88714c026a1a4c890569238d04aca5f9ca74b"},
+]
+
+[package.dependencies]
+anyio = ">=3.0.0"
+
[[package]]
name = "wcwidth"
version = "0.2.6"
@@ -2315,88 +4466,195 @@ files = [
{file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"},
]
+[[package]]
+name = "websockets"
+version = "11.0.2"
+description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "websockets-11.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:580cc95c58118f8c39106be71e24d0b7e1ad11a155f40a2ee687f99b3e5e432e"},
+ {file = "websockets-11.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:143782041e95b63083b02107f31cda999f392903ae331de1307441f3a4557d51"},
+ {file = "websockets-11.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8df63dcd955eb6b2e371d95aacf8b7c535e482192cff1b6ce927d8f43fb4f552"},
+ {file = "websockets-11.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9b2dced5cbbc5094678cc1ec62160f7b0fe4defd601cd28a36fde7ee71bbb5"},
+ {file = "websockets-11.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0eeeea3b01c97fd3b5049a46c908823f68b59bf0e18d79b231d8d6764bc81ee"},
+ {file = "websockets-11.0.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:502683c5dedfc94b9f0f6790efb26aa0591526e8403ad443dce922cd6c0ec83b"},
+ {file = "websockets-11.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d3cc3e48b6c9f7df8c3798004b9c4b92abca09eeea5e1b0a39698f05b7a33b9d"},
+ {file = "websockets-11.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:808b8a33c961bbd6d33c55908f7c137569b09ea7dd024bce969969aa04ecf07c"},
+ {file = "websockets-11.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:34a6f8996964ccaa40da42ee36aa1572adcb1e213665e24aa2f1037da6080909"},
+ {file = "websockets-11.0.2-cp310-cp310-win32.whl", hash = "sha256:8f24cd758cbe1607a91b720537685b64e4d39415649cac9177cd1257317cf30c"},
+ {file = "websockets-11.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:3b87cd302f08ea9e74fdc080470eddbed1e165113c1823fb3ee6328bc40ca1d3"},
+ {file = "websockets-11.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3565a8f8c7bdde7c29ebe46146bd191290413ee6f8e94cf350609720c075b0a1"},
+ {file = "websockets-11.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f97e03d4d5a4f0dca739ea274be9092822f7430b77d25aa02da6775e490f6846"},
+ {file = "websockets-11.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8f392587eb2767afa8a34e909f2fec779f90b630622adc95d8b5e26ea8823cb8"},
+ {file = "websockets-11.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7742cd4524622cc7aa71734b51294644492a961243c4fe67874971c4d3045982"},
+ {file = "websockets-11.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46dda4bc2030c335abe192b94e98686615f9274f6b56f32f2dd661fb303d9d12"},
+ {file = "websockets-11.0.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6b2bfa1d884c254b841b0ff79373b6b80779088df6704f034858e4d705a4802"},
+ {file = "websockets-11.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1df2413266bf48430ef2a752c49b93086c6bf192d708e4a9920544c74cd2baa6"},
+ {file = "websockets-11.0.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf45d273202b0c1cec0f03a7972c655b93611f2e996669667414557230a87b88"},
+ {file = "websockets-11.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a09cce3dacb6ad638fdfa3154d9e54a98efe7c8f68f000e55ca9c716496ca67"},
+ {file = "websockets-11.0.2-cp311-cp311-win32.whl", hash = "sha256:2174a75d579d811279855df5824676d851a69f52852edb0e7551e0eeac6f59a4"},
+ {file = "websockets-11.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:c78ca3037a954a4209b9f900e0eabbc471fb4ebe96914016281df2c974a93e3e"},
+ {file = "websockets-11.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2100b02d1aaf66dc48ff1b2a72f34f6ebc575a02bc0350cc8e9fbb35940166"},
+ {file = "websockets-11.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dca9708eea9f9ed300394d4775beb2667288e998eb6f542cdb6c02027430c599"},
+ {file = "websockets-11.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:320ddceefd2364d4afe6576195201a3632a6f2e6d207b0c01333e965b22dbc84"},
+ {file = "websockets-11.0.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2a573c8d71b7af937852b61e7ccb37151d719974146b5dc734aad350ef55a02"},
+ {file = "websockets-11.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:13bd5bebcd16a4b5e403061b8b9dcc5c77e7a71e3c57e072d8dff23e33f70fba"},
+ {file = "websockets-11.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:95c09427c1c57206fe04277bf871b396476d5a8857fa1b99703283ee497c7a5d"},
+ {file = "websockets-11.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2eb042734e710d39e9bc58deab23a65bd2750e161436101488f8af92f183c239"},
+ {file = "websockets-11.0.2-cp37-cp37m-win32.whl", hash = "sha256:5875f623a10b9ba154cb61967f940ab469039f0b5e61c80dd153a65f024d9fb7"},
+ {file = "websockets-11.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:634239bc844131863762865b75211a913c536817c0da27f691400d49d256df1d"},
+ {file = "websockets-11.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3178d965ec204773ab67985a09f5696ca6c3869afeed0bb51703ea404a24e975"},
+ {file = "websockets-11.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:955fcdb304833df2e172ce2492b7b47b4aab5dcc035a10e093d911a1916f2c87"},
+ {file = "websockets-11.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb46d2c7631b2e6f10f7c8bac7854f7c5e5288f024f1c137d4633c79ead1e3c0"},
+ {file = "websockets-11.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25aae96c1060e85836552a113495db6d857400288161299d77b7b20f2ac569f2"},
+ {file = "websockets-11.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2abeeae63154b7f63d9f764685b2d299e9141171b8b896688bd8baec6b3e2303"},
+ {file = "websockets-11.0.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:daa1e8ea47507555ed7a34f8b49398d33dff5b8548eae3de1dc0ef0607273a33"},
+ {file = "websockets-11.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:954eb789c960fa5daaed3cfe336abc066941a5d456ff6be8f0e03dd89886bb4c"},
+ {file = "websockets-11.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3ffe251a31f37e65b9b9aca5d2d67fd091c234e530f13d9dce4a67959d5a3fba"},
+ {file = "websockets-11.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:adf6385f677ed2e0b021845b36f55c43f171dab3a9ee0ace94da67302f1bc364"},
+ {file = "websockets-11.0.2-cp38-cp38-win32.whl", hash = "sha256:aa7b33c1fb2f7b7b9820f93a5d61ffd47f5a91711bc5fa4583bbe0c0601ec0b2"},
+ {file = "websockets-11.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:220d5b93764dd70d7617f1663da64256df7e7ea31fc66bc52c0e3750ee134ae3"},
+ {file = "websockets-11.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0fb4480556825e4e6bf2eebdbeb130d9474c62705100c90e59f2f56459ddab42"},
+ {file = "websockets-11.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ec00401846569aaf018700249996143f567d50050c5b7b650148989f956547af"},
+ {file = "websockets-11.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87c69f50281126dcdaccd64d951fb57fbce272578d24efc59bce72cf264725d0"},
+ {file = "websockets-11.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:232b6ba974f5d09b1b747ac232f3a3d8f86de401d7b565e837cc86988edf37ac"},
+ {file = "websockets-11.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:392d409178db1e46d1055e51cc850136d302434e12d412a555e5291ab810f622"},
+ {file = "websockets-11.0.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4fe2442091ff71dee0769a10449420fd5d3b606c590f78dd2b97d94b7455640"},
+ {file = "websockets-11.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ede13a6998ba2568b21825809d96e69a38dc43184bdeebbde3699c8baa21d015"},
+ {file = "websockets-11.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4c54086b2d2aec3c3cb887ad97e9c02c6be9f1d48381c7419a4aa932d31661e4"},
+ {file = "websockets-11.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e37a76ccd483a6457580077d43bc3dfe1fd784ecb2151fcb9d1c73f424deaeba"},
+ {file = "websockets-11.0.2-cp39-cp39-win32.whl", hash = "sha256:d1881518b488a920434a271a6e8a5c9481a67c4f6352ebbdd249b789c0467ddc"},
+ {file = "websockets-11.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:25e265686ea385f22a00cc2b719b880797cd1bb53b46dbde969e554fb458bfde"},
+ {file = "websockets-11.0.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ce69f5c742eefd039dce8622e99d811ef2135b69d10f9aa79fbf2fdcc1e56cd7"},
+ {file = "websockets-11.0.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b985ba2b9e972cf99ddffc07df1a314b893095f62c75bc7c5354a9c4647c6503"},
+ {file = "websockets-11.0.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b52def56d2a26e0e9c464f90cadb7e628e04f67b0ff3a76a4d9a18dfc35e3dd"},
+ {file = "websockets-11.0.2-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d70a438ef2a22a581d65ad7648e949d4ccd20e3c8ed7a90bbc46df4e60320891"},
+ {file = "websockets-11.0.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:752fbf420c71416fb1472fec1b4cb8631c1aa2be7149e0a5ba7e5771d75d2bb9"},
+ {file = "websockets-11.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:dd906b0cdc417ea7a5f13bb3c6ca3b5fd563338dc596996cb0fdd7872d691c0a"},
+ {file = "websockets-11.0.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e79065ff6549dd3c765e7916067e12a9c91df2affea0ac51bcd302aaf7ad207"},
+ {file = "websockets-11.0.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46388a050d9e40316e58a3f0838c63caacb72f94129eb621a659a6e49bad27ce"},
+ {file = "websockets-11.0.2-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c7de298371d913824f71b30f7685bb07ad13969c79679cca5b1f7f94fec012f"},
+ {file = "websockets-11.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6d872c972c87c393e6a49c1afbdc596432df8c06d0ff7cd05aa18e885e7cfb7c"},
+ {file = "websockets-11.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b444366b605d2885f0034dd889faf91b4b47668dd125591e2c64bfde611ac7e1"},
+ {file = "websockets-11.0.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b967a4849db6b567dec3f7dd5d97b15ce653e3497b8ce0814e470d5e074750"},
+ {file = "websockets-11.0.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2acdc82099999e44fa7bd8c886f03c70a22b1d53ae74252f389be30d64fd6004"},
+ {file = "websockets-11.0.2-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:518ed6782d9916c5721ebd61bb7651d244178b74399028302c8617d0620af291"},
+ {file = "websockets-11.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:58477b041099bb504e1a5ddd8aa86302ed1d5c6995bdd3db2b3084ef0135d277"},
+ {file = "websockets-11.0.2-py3-none-any.whl", hash = "sha256:5004c087d17251938a52cce21b3dbdabeecbbe432ce3f5bbbf15d8692c36eac9"},
+ {file = "websockets-11.0.2.tar.gz", hash = "sha256:b1a69701eb98ed83dd099de4a686dc892c413d974fa31602bc00aca7cb988ac9"},
+]
+
+[[package]]
+name = "wheel"
+version = "0.40.0"
+description = "A built-package format for Python"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "wheel-0.40.0-py3-none-any.whl", hash = "sha256:d236b20e7cb522daf2390fa84c55eea81c5c30190f90f29ae2ca1ad8355bf247"},
+ {file = "wheel-0.40.0.tar.gz", hash = "sha256:cd1196f3faee2b31968d626e1731c94f99cbdb67cf5a46e4f5656cbee7738873"},
+]
+
+[package.extras]
+test = ["pytest (>=6.0.0)"]
+
+[[package]]
+name = "xlsxwriter"
+version = "3.1.0"
+description = "A Python module for creating Excel XLSX files."
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "XlsxWriter-3.1.0-py3-none-any.whl", hash = "sha256:b70a147d36235d1ee835cfd037396f789db1f76740a0e5c917d54137169341de"},
+ {file = "XlsxWriter-3.1.0.tar.gz", hash = "sha256:02913b50b74c00f165933d5da3e3a02cab4204cb4932722a1b342c5c71034122"},
+]
+
[[package]]
name = "yarl"
-version = "1.8.2"
+version = "1.9.2"
description = "Yet another URL library"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bb81f753c815f6b8e2ddd2eef3c855cf7da193b82396ac013c661aaa6cc6b0a5"},
- {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:47d49ac96156f0928f002e2424299b2c91d9db73e08c4cd6742923a086f1c863"},
- {file = "yarl-1.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3fc056e35fa6fba63248d93ff6e672c096f95f7836938241ebc8260e062832fe"},
- {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58a3c13d1c3005dbbac5c9f0d3210b60220a65a999b1833aa46bd6677c69b08e"},
- {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10b08293cda921157f1e7c2790999d903b3fd28cd5c208cf8826b3b508026996"},
- {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de986979bbd87272fe557e0a8fcb66fd40ae2ddfe28a8b1ce4eae22681728fef"},
- {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c4fcfa71e2c6a3cb568cf81aadc12768b9995323186a10827beccf5fa23d4f8"},
- {file = "yarl-1.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae4d7ff1049f36accde9e1ef7301912a751e5bae0a9d142459646114c70ecba6"},
- {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf071f797aec5b96abfc735ab97da9fd8f8768b43ce2abd85356a3127909d146"},
- {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:74dece2bfc60f0f70907c34b857ee98f2c6dd0f75185db133770cd67300d505f"},
- {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:df60a94d332158b444301c7f569659c926168e4d4aad2cfbf4bce0e8fb8be826"},
- {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:63243b21c6e28ec2375f932a10ce7eda65139b5b854c0f6b82ed945ba526bff3"},
- {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cfa2bbca929aa742b5084fd4663dd4b87c191c844326fcb21c3afd2d11497f80"},
- {file = "yarl-1.8.2-cp310-cp310-win32.whl", hash = "sha256:b05df9ea7496df11b710081bd90ecc3a3db6adb4fee36f6a411e7bc91a18aa42"},
- {file = "yarl-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:24ad1d10c9db1953291f56b5fe76203977f1ed05f82d09ec97acb623a7976574"},
- {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2a1fca9588f360036242f379bfea2b8b44cae2721859b1c56d033adfd5893634"},
- {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f37db05c6051eff17bc832914fe46869f8849de5b92dc4a3466cd63095d23dfd"},
- {file = "yarl-1.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77e913b846a6b9c5f767b14dc1e759e5aff05502fe73079f6f4176359d832581"},
- {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0978f29222e649c351b173da2b9b4665ad1feb8d1daa9d971eb90df08702668a"},
- {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:388a45dc77198b2460eac0aca1efd6a7c09e976ee768b0d5109173e521a19daf"},
- {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2305517e332a862ef75be8fad3606ea10108662bc6fe08509d5ca99503ac2aee"},
- {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42430ff511571940d51e75cf42f1e4dbdded477e71c1b7a17f4da76c1da8ea76"},
- {file = "yarl-1.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3150078118f62371375e1e69b13b48288e44f6691c1069340081c3fd12c94d5b"},
- {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c15163b6125db87c8f53c98baa5e785782078fbd2dbeaa04c6141935eb6dab7a"},
- {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d04acba75c72e6eb90745447d69f84e6c9056390f7a9724605ca9c56b4afcc6"},
- {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e7fd20d6576c10306dea2d6a5765f46f0ac5d6f53436217913e952d19237efc4"},
- {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:75c16b2a900b3536dfc7014905a128a2bea8fb01f9ee26d2d7d8db0a08e7cb2c"},
- {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6d88056a04860a98341a0cf53e950e3ac9f4e51d1b6f61a53b0609df342cc8b2"},
- {file = "yarl-1.8.2-cp311-cp311-win32.whl", hash = "sha256:fb742dcdd5eec9f26b61224c23baea46c9055cf16f62475e11b9b15dfd5c117b"},
- {file = "yarl-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:8c46d3d89902c393a1d1e243ac847e0442d0196bbd81aecc94fcebbc2fd5857c"},
- {file = "yarl-1.8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ceff9722e0df2e0a9e8a79c610842004fa54e5b309fe6d218e47cd52f791d7ef"},
- {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f6b4aca43b602ba0f1459de647af954769919c4714706be36af670a5f44c9c1"},
- {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1684a9bd9077e922300ecd48003ddae7a7474e0412bea38d4631443a91d61077"},
- {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebb78745273e51b9832ef90c0898501006670d6e059f2cdb0e999494eb1450c2"},
- {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3adeef150d528ded2a8e734ebf9ae2e658f4c49bf413f5f157a470e17a4a2e89"},
- {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57a7c87927a468e5a1dc60c17caf9597161d66457a34273ab1760219953f7f4c"},
- {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:efff27bd8cbe1f9bd127e7894942ccc20c857aa8b5a0327874f30201e5ce83d0"},
- {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a783cd344113cb88c5ff7ca32f1f16532a6f2142185147822187913eb989f739"},
- {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:705227dccbe96ab02c7cb2c43e1228e2826e7ead880bb19ec94ef279e9555b5b"},
- {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:34c09b43bd538bf6c4b891ecce94b6fa4f1f10663a8d4ca589a079a5018f6ed7"},
- {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a48f4f7fea9a51098b02209d90297ac324241bf37ff6be6d2b0149ab2bd51b37"},
- {file = "yarl-1.8.2-cp37-cp37m-win32.whl", hash = "sha256:0414fd91ce0b763d4eadb4456795b307a71524dbacd015c657bb2a39db2eab89"},
- {file = "yarl-1.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:d881d152ae0007809c2c02e22aa534e702f12071e6b285e90945aa3c376463c5"},
- {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5df5e3d04101c1e5c3b1d69710b0574171cc02fddc4b23d1b2813e75f35a30b1"},
- {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7a66c506ec67eb3159eea5096acd05f5e788ceec7b96087d30c7d2865a243918"},
- {file = "yarl-1.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b4fa2606adf392051d990c3b3877d768771adc3faf2e117b9de7eb977741229"},
- {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e21fb44e1eff06dd6ef971d4bdc611807d6bd3691223d9c01a18cec3677939e"},
- {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93202666046d9edadfe9f2e7bf5e0782ea0d497b6d63da322e541665d65a044e"},
- {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc77086ce244453e074e445104f0ecb27530d6fd3a46698e33f6c38951d5a0f1"},
- {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dd68a92cab699a233641f5929a40f02a4ede8c009068ca8aa1fe87b8c20ae3"},
- {file = "yarl-1.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b372aad2b5f81db66ee7ec085cbad72c4da660d994e8e590c997e9b01e44901"},
- {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e6f3515aafe0209dd17fb9bdd3b4e892963370b3de781f53e1746a521fb39fc0"},
- {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dfef7350ee369197106805e193d420b75467b6cceac646ea5ed3049fcc950a05"},
- {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:728be34f70a190566d20aa13dc1f01dc44b6aa74580e10a3fb159691bc76909d"},
- {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ff205b58dc2929191f68162633d5e10e8044398d7a45265f90a0f1d51f85f72c"},
- {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf211dcad448a87a0d9047dc8282d7de59473ade7d7fdf22150b1d23859f946"},
- {file = "yarl-1.8.2-cp38-cp38-win32.whl", hash = "sha256:272b4f1599f1b621bf2aabe4e5b54f39a933971f4e7c9aa311d6d7dc06965165"},
- {file = "yarl-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:326dd1d3caf910cd26a26ccbfb84c03b608ba32499b5d6eeb09252c920bcbe4f"},
- {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f8ca8ad414c85bbc50f49c0a106f951613dfa5f948ab69c10ce9b128d368baf8"},
- {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:418857f837347e8aaef682679f41e36c24250097f9e2f315d39bae3a99a34cbf"},
- {file = "yarl-1.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae0eec05ab49e91a78700761777f284c2df119376e391db42c38ab46fd662b77"},
- {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:009a028127e0a1755c38b03244c0bea9d5565630db9c4cf9572496e947137a87"},
- {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3edac5d74bb3209c418805bda77f973117836e1de7c000e9755e572c1f7850d0"},
- {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da65c3f263729e47351261351b8679c6429151ef9649bba08ef2528ff2c423b2"},
- {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef8fb25e52663a1c85d608f6dd72e19bd390e2ecaf29c17fb08f730226e3a08"},
- {file = "yarl-1.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcd7bb1e5c45274af9a1dd7494d3c52b2be5e6bd8d7e49c612705fd45420b12d"},
- {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44ceac0450e648de86da8e42674f9b7077d763ea80c8ceb9d1c3e41f0f0a9951"},
- {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:97209cc91189b48e7cfe777237c04af8e7cc51eb369004e061809bcdf4e55220"},
- {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:48dd18adcf98ea9cd721a25313aef49d70d413a999d7d89df44f469edfb38a06"},
- {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e59399dda559688461762800d7fb34d9e8a6a7444fd76ec33220a926c8be1516"},
- {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d617c241c8c3ad5c4e78a08429fa49e4b04bedfc507b34b4d8dceb83b4af3588"},
- {file = "yarl-1.8.2-cp39-cp39-win32.whl", hash = "sha256:cb6d48d80a41f68de41212f3dfd1a9d9898d7841c8f7ce6696cf2fd9cb57ef83"},
- {file = "yarl-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:6604711362f2dbf7160df21c416f81fac0de6dbcf0b5445a2ef25478ecc4c778"},
- {file = "yarl-1.8.2.tar.gz", hash = "sha256:49d43402c6e3013ad0978602bf6bf5328535c48d192304b91b97a3c6790b1562"},
+ {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"},
+ {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"},
+ {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"},
+ {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"},
+ {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"},
+ {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"},
+ {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"},
+ {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"},
+ {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"},
+ {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"},
+ {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"},
+ {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"},
+ {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"},
+ {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"},
+ {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"},
+ {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"},
+ {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"},
+ {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"},
+ {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"},
+ {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"},
+ {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"},
+ {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"},
+ {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"},
+ {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"},
+ {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"},
+ {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"},
+ {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"},
+ {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"},
+ {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"},
+ {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"},
+ {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"},
+ {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"},
+ {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"},
+ {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"},
]
[package.dependencies]
@@ -2407,7 +4665,7 @@ multidict = ">=4.0"
name = "zipp"
version = "3.15.0"
description = "Backport of pathlib-compatible object wrapper for zip files"
-category = "dev"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -2419,6 +4677,65 @@ files = [
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
+[[package]]
+name = "zstandard"
+version = "0.21.0"
+description = "Zstandard bindings for Python"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "zstandard-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:649a67643257e3b2cff1c0a73130609679a5673bf389564bc6d4b164d822a7ce"},
+ {file = "zstandard-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:144a4fe4be2e747bf9c646deab212666e39048faa4372abb6a250dab0f347a29"},
+ {file = "zstandard-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b72060402524ab91e075881f6b6b3f37ab715663313030d0ce983da44960a86f"},
+ {file = "zstandard-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8257752b97134477fb4e413529edaa04fc0457361d304c1319573de00ba796b1"},
+ {file = "zstandard-0.21.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c053b7c4cbf71cc26808ed67ae955836232f7638444d709bfc302d3e499364fa"},
+ {file = "zstandard-0.21.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2769730c13638e08b7a983b32cb67775650024632cd0476bf1ba0e6360f5ac7d"},
+ {file = "zstandard-0.21.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7d3bc4de588b987f3934ca79140e226785d7b5e47e31756761e48644a45a6766"},
+ {file = "zstandard-0.21.0-cp310-cp310-win32.whl", hash = "sha256:67829fdb82e7393ca68e543894cd0581a79243cc4ec74a836c305c70a5943f07"},
+ {file = "zstandard-0.21.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6048a287f8d2d6e8bc67f6b42a766c61923641dd4022b7fd3f7439e17ba5a4d"},
+ {file = "zstandard-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7f2afab2c727b6a3d466faee6974a7dad0d9991241c498e7317e5ccf53dbc766"},
+ {file = "zstandard-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff0852da2abe86326b20abae912d0367878dd0854b8931897d44cfeb18985472"},
+ {file = "zstandard-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d12fa383e315b62630bd407477d750ec96a0f438447d0e6e496ab67b8b451d39"},
+ {file = "zstandard-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1b9703fe2e6b6811886c44052647df7c37478af1b4a1a9078585806f42e5b15"},
+ {file = "zstandard-0.21.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df28aa5c241f59a7ab524f8ad8bb75d9a23f7ed9d501b0fed6d40ec3064784e8"},
+ {file = "zstandard-0.21.0-cp311-cp311-win32.whl", hash = "sha256:0aad6090ac164a9d237d096c8af241b8dcd015524ac6dbec1330092dba151657"},
+ {file = "zstandard-0.21.0-cp311-cp311-win_amd64.whl", hash = "sha256:48b6233b5c4cacb7afb0ee6b4f91820afbb6c0e3ae0fa10abbc20000acdf4f11"},
+ {file = "zstandard-0.21.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e7d560ce14fd209db6adacce8908244503a009c6c39eee0c10f138996cd66d3e"},
+ {file = "zstandard-0.21.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e6e131a4df2eb6f64961cea6f979cdff22d6e0d5516feb0d09492c8fd36f3bc"},
+ {file = "zstandard-0.21.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1e0c62a67ff425927898cf43da2cf6b852289ebcc2054514ea9bf121bec10a5"},
+ {file = "zstandard-0.21.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1545fb9cb93e043351d0cb2ee73fa0ab32e61298968667bb924aac166278c3fc"},
+ {file = "zstandard-0.21.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe6c821eb6870f81d73bf10e5deed80edcac1e63fbc40610e61f340723fd5f7c"},
+ {file = "zstandard-0.21.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ddb086ea3b915e50f6604be93f4f64f168d3fc3cef3585bb9a375d5834392d4f"},
+ {file = "zstandard-0.21.0-cp37-cp37m-win32.whl", hash = "sha256:57ac078ad7333c9db7a74804684099c4c77f98971c151cee18d17a12649bc25c"},
+ {file = "zstandard-0.21.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1243b01fb7926a5a0417120c57d4c28b25a0200284af0525fddba812d575f605"},
+ {file = "zstandard-0.21.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ea68b1ba4f9678ac3d3e370d96442a6332d431e5050223626bdce748692226ea"},
+ {file = "zstandard-0.21.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8070c1cdb4587a8aa038638acda3bd97c43c59e1e31705f2766d5576b329e97c"},
+ {file = "zstandard-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4af612c96599b17e4930fe58bffd6514e6c25509d120f4eae6031b7595912f85"},
+ {file = "zstandard-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cff891e37b167bc477f35562cda1248acc115dbafbea4f3af54ec70821090965"},
+ {file = "zstandard-0.21.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9fec02ce2b38e8b2e86079ff0b912445495e8ab0b137f9c0505f88ad0d61296"},
+ {file = "zstandard-0.21.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bdbe350691dec3078b187b8304e6a9c4d9db3eb2d50ab5b1d748533e746d099"},
+ {file = "zstandard-0.21.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b69cccd06a4a0a1d9fb3ec9a97600055cf03030ed7048d4bcb88c574f7895773"},
+ {file = "zstandard-0.21.0-cp38-cp38-win32.whl", hash = "sha256:9980489f066a391c5572bc7dc471e903fb134e0b0001ea9b1d3eff85af0a6f1b"},
+ {file = "zstandard-0.21.0-cp38-cp38-win_amd64.whl", hash = "sha256:0e1e94a9d9e35dc04bf90055e914077c80b1e0c15454cc5419e82529d3e70728"},
+ {file = "zstandard-0.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2d61675b2a73edcef5e327e38eb62bdfc89009960f0e3991eae5cc3d54718de"},
+ {file = "zstandard-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:25fbfef672ad798afab12e8fd204d122fca3bc8e2dcb0a2ba73bf0a0ac0f5f07"},
+ {file = "zstandard-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62957069a7c2626ae80023998757e27bd28d933b165c487ab6f83ad3337f773d"},
+ {file = "zstandard-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14e10ed461e4807471075d4b7a2af51f5234c8f1e2a0c1d37d5ca49aaaad49e8"},
+ {file = "zstandard-0.21.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9cff89a036c639a6a9299bf19e16bfb9ac7def9a7634c52c257166db09d950e7"},
+ {file = "zstandard-0.21.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52b2b5e3e7670bd25835e0e0730a236f2b0df87672d99d3bf4bf87248aa659fb"},
+ {file = "zstandard-0.21.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b1367da0dde8ae5040ef0413fb57b5baeac39d8931c70536d5f013b11d3fc3a5"},
+ {file = "zstandard-0.21.0-cp39-cp39-win32.whl", hash = "sha256:db62cbe7a965e68ad2217a056107cc43d41764c66c895be05cf9c8b19578ce9c"},
+ {file = "zstandard-0.21.0-cp39-cp39-win_amd64.whl", hash = "sha256:a8d200617d5c876221304b0e3fe43307adde291b4a897e7b0617a61611dfff6a"},
+ {file = "zstandard-0.21.0.tar.gz", hash = "sha256:f08e3a10d01a247877e4cb61a82a319ea746c356a3786558bed2481e6c405546"},
+]
+
+[package.dependencies]
+cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""}
+
+[package.extras]
+cffi = ["cffi (>=1.11)"]
+
[metadata]
lock-version = "2.0"
python-versions = "^3.9"
diff --git a/pyproject.toml b/pyproject.toml
index 4150cf0d3..96c4e087d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow"
-version = "0.0.46"
+version = "0.0.58"
description = "A Python package with a built-in web application"
authors = ["Logspace "]
maintainers = [
@@ -32,12 +32,34 @@ gunicorn = "^20.1.0"
langchain = "~0.0.113"
openai = "^0.27.2"
types-pyyaml = "^6.0.12.8"
+dill = "^0.3.6"
+pandas = "^1.5.3"
+chromadb = "^0.3.21"
+huggingface-hub = "^0.13.3"
+rich = "^13.3.3"
+llama-cpp-python = "0.1.23"
+networkx = "^3.1"
+unstructured = "^0.5.11"
+pypdf = "^3.7.1"
+lxml = "^4.9.2"
+pysrt = "^1.1.2"
+fake-useragent = "^1.1.3"
+docstring-parser = "^0.15"
+psycopg2-binary = "^2.9.6"
+pyarrow = "^11.0.0"
[tool.poetry.group.dev.dependencies]
black = "^23.1.0"
ipykernel = "^6.21.2"
mypy = "^1.1.1"
ruff = "^0.0.254"
+httpx = "^0.23.3"
+pytest = "^7.2.2"
+types-requests = "^2.28.11"
+requests = "^2.28.0"
+
+[tool.ruff]
+line-length = 120
[build-system]
requires = ["poetry-core"]
diff --git a/scripts/deploy_langflow_gcp.sh b/scripts/deploy_langflow_gcp.sh
new file mode 100644
index 000000000..2c3dc0420
--- /dev/null
+++ b/scripts/deploy_langflow_gcp.sh
@@ -0,0 +1,89 @@
+# Set the VM, image, and networking configuration
+VM_NAME="langflow-dev"
+IMAGE_FAMILY="debian-11"
+IMAGE_PROJECT="debian-cloud"
+BOOT_DISK_SIZE="100GB"
+ZONE="us-central1-a"
+REGION="us-central1"
+VPC_NAME="default"
+SUBNET_NAME="default"
+SUBNET_RANGE="10.128.0.0/20"
+NAT_GATEWAY_NAME="nat-gateway"
+CLOUD_ROUTER_NAME="nat-client"
+
+# Set the GCP project's compute region
+gcloud config set compute/region $REGION
+
+# Check if the VPC exists, and create it if not
+vpc_exists=$(gcloud compute networks list --filter="name=$VPC_NAME" --format="value(name)")
+if [[ -z "$vpc_exists" ]]; then
+ gcloud compute networks create $VPC_NAME --subnet-mode=custom
+fi
+
+# Check if the subnet exists, and create it if not
+subnet_exists=$(gcloud compute networks subnets list --filter="name=$SUBNET_NAME AND region=$REGION" --format="value(name)")
+if [[ -z "$subnet_exists" ]]; then
+ gcloud compute networks subnets create $SUBNET_NAME --network=$VPC_NAME --region=$REGION --range=$SUBNET_RANGE
+fi
+
+# Create a firewall rule to allow TCP port 8080 for all instances in the VPC
+firewall_8080_exists=$(gcloud compute firewall-rules list --filter="name=allow-tcp-8080" --format="value(name)")
+if [[ -z "$firewall_8080_exists" ]]; then
+ gcloud compute firewall-rules create allow-tcp-8080 --network $VPC_NAME --allow tcp:8080 --source-ranges 0.0.0.0/0 --direction INGRESS
+fi
+
+# Create a firewall rule to allow IAP traffic
+firewall_iap_exists=$(gcloud compute firewall-rules list --filter="name=allow-iap" --format="value(name)")
+if [[ -z "$firewall_iap_exists" ]]; then
+ gcloud compute firewall-rules create allow-iap --network $VPC_NAME --allow tcp:80,tcp:443 --source-ranges 35.235.240.0/20 --direction INGRESS
+fi
+
+# Define the startup script as a multiline Bash here-doc
+STARTUP_SCRIPT=$(cat <<'EOF'
+#!/bin/bash
+
+# Update and upgrade the system
+apt -y update
+apt -y upgrade
+
+# Install Python 3 pip, Langflow, and Nginx
+apt -y install python3-pip
+pip install langflow
+apt-get -y install nginx
+
+# Configure Nginx for Langflow
+touch /etc/nginx/sites-available/langflow-app
+echo "server {
+ listen 0.0.0.0:8080;
+
+ location / {
+ proxy_pass http://127.0.0.1:7860;
+ proxy_set_header Host "\$host";
+ proxy_set_header X-Real-IP "\$remote_addr";
+ proxy_set_header X-Forwarded-For "\$proxy_add_x_forwarded_for";
+ }
+}" >> /etc/nginx/sites-available/langflow-app
+ln -s /etc/nginx/sites-available/langflow-app /etc/nginx/sites-enabled/
+sudo nginx -t
+sudo systemctl restart nginx
+langflow
+EOF
+)
+
+# Create a temporary file to store the startup script
+tempfile=$(mktemp)
+echo "$STARTUP_SCRIPT" > $tempfile
+
+# Create the VM instance with the specified configuration and startup script
+gcloud compute instances create $VM_NAME \
+ --image-family $IMAGE_FAMILY \
+ --image-project $IMAGE_PROJECT \
+ --boot-disk-size $BOOT_DISK_SIZE \
+ --machine-type=n1-standard-4 \
+ --metadata-from-file startup-script=$tempfile \
+ --zone $ZONE \
+ --network $VPC_NAME \
+ --subnet $SUBNET_NAME
+
+# Remove the temporary file after the VM is created
+rm $tempfile
diff --git a/scripts/deploy_langflow_gcp_spot.sh b/scripts/deploy_langflow_gcp_spot.sh
new file mode 100644
index 000000000..065b6013f
--- /dev/null
+++ b/scripts/deploy_langflow_gcp_spot.sh
@@ -0,0 +1,90 @@
+# Set the VM, image, and networking configuration
+VM_NAME="langflow-dev"
+IMAGE_FAMILY="debian-11"
+IMAGE_PROJECT="debian-cloud"
+BOOT_DISK_SIZE="100GB"
+ZONE="us-central1-a"
+REGION="us-central1"
+VPC_NAME="default"
+SUBNET_NAME="default"
+SUBNET_RANGE="10.128.0.0/20"
+NAT_GATEWAY_NAME="nat-gateway"
+CLOUD_ROUTER_NAME="nat-client"
+
+# Set the GCP project's compute region
+gcloud config set compute/region $REGION
+
+# Check if the VPC exists, and create it if not
+vpc_exists=$(gcloud compute networks list --filter="name=$VPC_NAME" --format="value(name)")
+if [[ -z "$vpc_exists" ]]; then
+ gcloud compute networks create $VPC_NAME --subnet-mode=custom
+fi
+
+# Check if the subnet exists, and create it if not
+subnet_exists=$(gcloud compute networks subnets list --filter="name=$SUBNET_NAME AND region=$REGION" --format="value(name)")
+if [[ -z "$subnet_exists" ]]; then
+ gcloud compute networks subnets create $SUBNET_NAME --network=$VPC_NAME --region=$REGION --range=$SUBNET_RANGE
+fi
+
+# Create a firewall rule to allow TCP port 8080 for all instances in the VPC
+firewall_8080_exists=$(gcloud compute firewall-rules list --filter="name=allow-tcp-8080" --format="value(name)")
+if [[ -z "$firewall_8080_exists" ]]; then
+ gcloud compute firewall-rules create allow-tcp-8080 --network $VPC_NAME --allow tcp:8080 --source-ranges 0.0.0.0/0 --direction INGRESS
+fi
+
+# Create a firewall rule to allow IAP traffic
+firewall_iap_exists=$(gcloud compute firewall-rules list --filter="name=allow-iap" --format="value(name)")
+if [[ -z "$firewall_iap_exists" ]]; then
+ gcloud compute firewall-rules create allow-iap --network $VPC_NAME --allow tcp:80,tcp:443 --source-ranges 35.235.240.0/20 --direction INGRESS
+fi
+
+# Define the startup script as a multiline Bash here-doc
+STARTUP_SCRIPT=$(cat <<'EOF'
+#!/bin/bash
+
+# Update and upgrade the system
+apt -y update
+apt -y upgrade
+
+# Install Python 3 pip, Langflow, and Nginx
+apt -y install python3-pip
+pip install langflow
+apt-get -y install nginx
+
+# Configure Nginx for Langflow
+touch /etc/nginx/sites-available/langflow-app
+echo "server {
+ listen 0.0.0.0:8080;
+
+ location / {
+ proxy_pass http://127.0.0.1:7860;
+ proxy_set_header Host "\$host";
+ proxy_set_header X-Real-IP "\$remote_addr";
+ proxy_set_header X-Forwarded-For "\$proxy_add_x_forwarded_for";
+ }
+}" >> /etc/nginx/sites-available/langflow-app
+ln -s /etc/nginx/sites-available/langflow-app /etc/nginx/sites-enabled/
+sudo nginx -t
+sudo systemctl restart nginx
+langflow
+EOF
+)
+
+# Create a temporary file to store the startup script
+tempfile=$(mktemp)
+echo "$STARTUP_SCRIPT" > $tempfile
+
+# Create the VM instance with the specified configuration and startup script
+gcloud compute instances create $VM_NAME \
+ --image-family $IMAGE_FAMILY \
+ --image-project $IMAGE_PROJECT \
+ --boot-disk-size $BOOT_DISK_SIZE \
+ --machine-type=n1-standard-4 \
+ --metadata-from-file startup-script=$tempfile \
+ --zone $ZONE \
+ --network $VPC_NAME \
+ --subnet $SUBNET_NAME \
+ -preemptible
+
+# Remove the temporary file after the VM is created
+rm $tempfile
diff --git a/scripts/walkthroughtutorial.md b/scripts/walkthroughtutorial.md
new file mode 100644
index 000000000..fa6e3c11d
--- /dev/null
+++ b/scripts/walkthroughtutorial.md
@@ -0,0 +1,86 @@
+# Deploy Langflow on Google Cloud Platform
+
+**Duration**: 45 minutes
+**Author**: [Robert Wilkins III](https://www.linkedin.com/in/robertwilkinsiii)
+
+## Introduction
+
+In this tutorial, you will learn how to deploy Langflow on [Google Cloud Platform](https://cloud.google.com/) (GCP) using Google Cloud Shell.
+
+This tutorial assumes you have a GCP account and basic knowledge of Google Cloud Shell. If you're not familiar with Cloud Shell, you can review the [Cloud Shell documentation](https://cloud.google.com/shell/docs).
+
+## Set up your environment
+
+Before you start, make sure you have the following prerequisites:
+
+- A GCP account with the necessary permissions to create resources
+- A project on GCP where you want to deploy Langflow
+
+[**Select your GCP project**]
+
+
+
+In the next step, you'll configure the GCP environment and deploy Langflow.
+
+## Configure the GCP environment and deploy Langflow
+Run the deploy_langflow_gcp.sh script to configure the GCP environment and deploy Langflow:
+
+```sh
+gcloud config set project
+bash ./deploy_langflow_gcp.sh
+```
+
+The script will:
+
+1. Check if the required resources (VPC, subnet, firewall rules, and Cloud Router) exist and create them if needed
+2. Create a startup script to install Python, Langflow, and Nginx
+3. Create a Compute Engine VM instance with the specified configuration and startup script
+4. Configure Nginx to serve Langflow on TCP port 8080
+
+
+> The process may take approximately 30 minutes to complete. Rest assured that progress is being made, and you'll be able to proceed once the process is finished.
+
+In the next step, you'll learn how to connect to the Langflow VM.
+
+## Connect to the Langflow VM
+To connect to your new Langflow VM, follow these steps:
+
+1. Navigate to the [VM instances](https://console.cloud.google.com/compute/instances) page and click on the external IP for your VM. Make sure to use HTTP and set the port to 8080
+ **or**
+3. Run the following command to display the URL for your Langflow environment:
+```bash
+export LANGFLOW_IP=$(gcloud compute instances list --filter="NAME=langflow-dev" --format="value(EXTERNAL_IP)")
+
+echo http://$LANGFLOW_IP:8080
+```
+
+4. Click on the Langflow URL in cloudshell to be greeted by the Langflow Dev environment
+
+Congratulations! You have successfully deployed Langflow on Google Cloud Platform.
+
+
+
+## Cleanup
+If you want to remove the resources created during this tutorial, you can use the following commands:
+
+```sql
+gcloud compute instances delete langflow-dev --zone us-central1-a --quiet
+```
+The following network settings and services are used during this walkthrough. If you plan to continue using the project after the walkthrough, you may keep these configurations in place.
+
+However, if you decide to remove them after completing the walkthrough, you can use the following gcloud commands:
+
+
+> These commands will delete the firewall rules and network configurations created during the walkthrough. Make sure to run them only if you no longer need these settings.
+
+```
+gcloud compute firewall-rules delete allow-tcp-8080 --quiet
+
+gcloud compute firewall-rules delete allow-iap --quiet
+
+gcloud compute networks subnets delete default --region us-central1 --quiet
+
+gcloud compute networks delete default --quiet
+```
diff --git a/scripts/walkthroughtutorial_spot.md b/scripts/walkthroughtutorial_spot.md
new file mode 100644
index 000000000..751f03d78
--- /dev/null
+++ b/scripts/walkthroughtutorial_spot.md
@@ -0,0 +1,83 @@
+# Deploy Langflow on Google Cloud Platform
+
+**Duration**: 45 minutes
+**Author**: [Robert Wilkins III](https://www.linkedin.com/in/robertwilkinsiii)
+
+## Introduction
+
+In this tutorial, you will learn how to deploy Langflow on [Google Cloud Platform](https://cloud.google.com/) (GCP) using Google Cloud Shell.
+
+This tutorial assumes you have a GCP account and basic knowledge of Google Cloud Shell. If you're not familiar with Cloud Shell, you can review the [Cloud Shell documentation](https://cloud.google.com/shell/docs).
+
+## Set up your environment
+
+Before you start, make sure you have the following prerequisites:
+
+- A GCP account with the necessary permissions to create resources
+- A project on GCP where you want to deploy Langflow
+
+[**Select your GCP project**]
+
+
+
+In the next step, you'll configure the GCP environment and deploy Langflow.
+
+## Configure the GCP environment and deploy Langflow
+Run the deploy_langflow_gcp_spot.sh script to configure the GCP environment and deploy Langflow:
+
+```sh
+gcloud config set project
+bash ./deploy_langflow_gcp.sh
+```
+
+The script will:
+
+1. Check if the required resources (VPC, subnet, firewall rules, and Cloud Router) exist and create them if needed
+2. Create a startup script to install Python, Langflow, and Nginx
+3. Create a Compute Engine VM instance with the specified configuration and startup script
+4. Configure Nginx to serve Langflow on TCP port 8080
+
+> The process may take approximately 30 minutes to complete. Rest assured that progress is being made, and you'll be able to proceed once the process is finished.
+
+In the next step, you'll learn how to connect to the Langflow VM.
+
+## Connect to the Langflow VM
+To connect to your new Langflow VM, follow these steps:
+
+1. Navigate to the [VM instances](https://console.cloud.google.com/compute/instances) page and click on the external IP for your VM. Make sure to use HTTP and set the port to 8080
+ **or**
+3. Run the following command to display the URL for your Langflow environment:
+```bash
+export LANGFLOW_IP=$(gcloud compute instances list --filter="NAME=langflow-dev" --format="value(EXTERNAL_IP)")
+
+echo http://$LANGFLOW_IP:8080
+```
+
+4. Click on the Langflow URL in cloudshell to be greeted by the Langflow Dev environment
+
+Congratulations! You have successfully deployed Langflow on Google Cloud Platform.
+
+
+
+## Cleanup
+If you want to remove the resources created during this tutorial, you can use the following commands:
+
+```sql
+gcloud compute instances delete langflow-dev --zone us-central1-a --quiet
+```
+The following network settings and services are used during this walkthrough. If you plan to continue using the project after the walkthrough, you may keep these configurations in place.
+
+However, if you decide to remove them after completing the walkthrough, you can use the following gcloud commands:
+> These commands will delete the firewall rules and network configurations created during the walkthrough. Make sure to run them only if you no longer need these settings.
+
+```
+gcloud compute firewall-rules delete allow-tcp-8080 --quiet
+
+gcloud compute firewall-rules delete allow-iap --quiet
+
+gcloud compute networks subnets delete default --region us-central1 --quiet
+
+gcloud compute networks delete default --quiet
+```
diff --git a/src/backend/langflow/__main__.py b/src/backend/langflow/__main__.py
index ea9386d7f..1f16744ae 100644
--- a/src/backend/langflow/__main__.py
+++ b/src/backend/langflow/__main__.py
@@ -1,4 +1,3 @@
-import logging
import multiprocessing
import platform
from pathlib import Path
@@ -7,8 +6,10 @@ import typer
from fastapi.staticfiles import StaticFiles
from langflow.main import create_app
+from langflow.settings import settings
+from langflow.utils.logger import configure
-logger = logging.getLogger(__name__)
+app = typer.Typer()
def get_number_of_workers(workers=None):
@@ -17,9 +18,28 @@ def get_number_of_workers(workers=None):
return workers
+def update_settings(config: str):
+ """Update the settings from a config file."""
+ if config:
+ settings.update_from_yaml(config)
+
+
+@app.command()
def serve(
- host: str = "127.0.0.1", workers: int = 1, timeout: int = 60, port: int = 7860
+ host: str = typer.Option("127.0.0.1", help="Host to bind the server to."),
+ workers: int = typer.Option(1, help="Number of worker processes."),
+ timeout: int = typer.Option(60, help="Worker timeout in seconds."),
+ port: int = typer.Option(7860, help="Port to listen on."),
+ config: str = typer.Option("config.yaml", help="Path to the configuration file."),
+ log_level: str = typer.Option("info", help="Logging level."),
+ log_file: Path = typer.Option("logs/langflow.log", help="Path to the log file."),
):
+ """
+ Run the Langflow server.
+ """
+
+ configure(log_level=log_level, log_file=log_file)
+ update_settings(config)
app = create_app()
# get the directory of the current file
path = Path(__file__).parent
@@ -39,10 +59,10 @@ def serve(
if platform.system() in ["Darwin", "Windows"]:
# Run using uvicorn on MacOS and Windows
# Windows doesn't support gunicorn
- # MacOS requires a env variable to be set to use gunicorn
+ # MacOS requires an env variable to be set to use gunicorn
import uvicorn
- uvicorn.run(app, host=host, port=port, log_level="info")
+ uvicorn.run(app, host=host, port=port, log_level=log_level)
else:
from langflow.server import LangflowApplication
@@ -50,7 +70,7 @@ def serve(
def main():
- typer.run(serve)
+ app()
if __name__ == "__main__":
diff --git a/src/backend/langflow/api/base.py b/src/backend/langflow/api/base.py
new file mode 100644
index 000000000..084e04d65
--- /dev/null
+++ b/src/backend/langflow/api/base.py
@@ -0,0 +1,80 @@
+from pydantic import BaseModel, validator
+
+from langflow.graph.utils import extract_input_variables_from_prompt
+
+
+class Code(BaseModel):
+ code: str
+
+
+class Prompt(BaseModel):
+ template: str
+
+
+# Build ValidationResponse class for {"imports": {"errors": []}, "function": {"errors": []}}
+class CodeValidationResponse(BaseModel):
+ imports: dict
+ function: dict
+
+ @validator("imports")
+ def validate_imports(cls, v):
+ return v or {"errors": []}
+
+ @validator("function")
+ def validate_function(cls, v):
+ return v or {"errors": []}
+
+
+class PromptValidationResponse(BaseModel):
+ input_variables: list
+
+
+INVALID_CHARACTERS = {
+ " ",
+ ",",
+ ".",
+ ":",
+ ";",
+ "!",
+ "?",
+ "/",
+ "\\",
+ "(",
+ ")",
+ "[",
+ "]",
+ "{",
+ "}",
+}
+
+
+def validate_prompt(template: str):
+ input_variables = extract_input_variables_from_prompt(template)
+
+ # Check if there are invalid characters in the input_variables
+ input_variables = check_input_variables(input_variables)
+
+ return PromptValidationResponse(input_variables=input_variables)
+
+
+def check_input_variables(input_variables: list):
+ invalid_chars = []
+ fixed_variables = []
+ for variable in input_variables:
+ new_var = variable
+ for char in INVALID_CHARACTERS:
+ if char in variable:
+ invalid_chars.append(char)
+ new_var = new_var.replace(char, "")
+ fixed_variables.append(new_var)
+ if new_var != variable:
+ input_variables.remove(variable)
+ input_variables.append(new_var)
+ # If any of the input_variables is not in the fixed_variables, then it means that
+ # there are invalid characters in the input_variables
+ if any(var not in fixed_variables for var in input_variables):
+ raise ValueError(
+ f"Invalid input variables: {input_variables}. Please, use something like {fixed_variables} instead."
+ )
+
+ return input_variables
diff --git a/src/backend/langflow/api/endpoints.py b/src/backend/langflow/api/endpoints.py
index 7214ec6a6..b8290e691 100644
--- a/src/backend/langflow/api/endpoints.py
+++ b/src/backend/langflow/api/endpoints.py
@@ -1,12 +1,14 @@
+import logging
from typing import Any, Dict
from fastapi import APIRouter, HTTPException
-from langflow.interface.run import process_data_graph
+from langflow.interface.run import process_graph_cached
from langflow.interface.types import build_langchain_types_dict
# build router
router = APIRouter()
+logger = logging.getLogger(__name__)
@router.get("/all")
@@ -17,6 +19,8 @@ def get_all():
@router.post("/predict")
def get_load(data: Dict[str, Any]):
try:
- return process_data_graph(data)
+ return process_graph_cached(data)
except Exception as e:
- return HTTPException(status_code=500, detail=str(e))
+ # Log stack trace
+ logger.exception(e)
+ raise HTTPException(status_code=500, detail=str(e)) from e
diff --git a/src/backend/langflow/api/list_endpoints.py b/src/backend/langflow/api/list_endpoints.py
deleted file mode 100644
index 15946a2db..000000000
--- a/src/backend/langflow/api/list_endpoints.py
+++ /dev/null
@@ -1,58 +0,0 @@
-from fastapi import APIRouter
-
-from langflow.interface.listing import list_type
-
-# build router
-router = APIRouter(
- prefix="/list",
- tags=["list"],
-)
-
-
-@router.get("/")
-def read_items():
- """List all components"""
- return [
- "chains",
- "agents",
- "prompts",
- "llms",
- "tools",
- ]
-
-
-@router.get("/chains")
-def list_chains():
- """List all chain types"""
- return list_type("chains")
-
-
-@router.get("/agents")
-def list_agents():
- """List all agent types"""
- # return list(agents.loading.AGENT_TO_CLASS.keys())
- return list_type("agents")
-
-
-@router.get("/prompts")
-def list_prompts():
- """List all prompt types"""
- return list_type("prompts")
-
-
-@router.get("/llms")
-def list_llms():
- """List all llm types"""
- return list_type("llms")
-
-
-@router.get("/memories")
-def list_memories():
- """List all memory types"""
- return list_type("memories")
-
-
-@router.get("/tools")
-def list_tools():
- """List all load tools"""
- return list_type("tools")
diff --git a/src/backend/langflow/api/signature.py b/src/backend/langflow/api/signature.py
deleted file mode 100644
index 96b654dbe..000000000
--- a/src/backend/langflow/api/signature.py
+++ /dev/null
@@ -1,63 +0,0 @@
-from fastapi import APIRouter, HTTPException
-
-from langflow.interface.signature import get_signature
-
-# build router
-router = APIRouter(
- prefix="/signatures",
- tags=["signatures"],
-)
-
-
-@router.get("/chain")
-def get_chain(name: str):
- """Get the signature of a chain."""
- try:
- return get_signature(name, "chains")
- except ValueError as exc:
- raise HTTPException(status_code=404, detail="Chain not found") from exc
-
-
-@router.get("/agent")
-def get_agent(name: str):
- """Get the signature of an agent."""
- try:
- return get_signature(name, "agents")
- except ValueError as exc:
- raise HTTPException(status_code=404, detail="Agent not found") from exc
-
-
-@router.get("/prompt")
-def get_prompt(name: str):
- """Get the signature of a prompt."""
- try:
- return get_signature(name, "prompts")
- except ValueError as exc:
- raise HTTPException(status_code=404, detail="Prompt not found") from exc
-
-
-@router.get("/llm")
-def get_llm(name: str):
- """Get the signature of an llm."""
- try:
- return get_signature(name, "llms")
- except ValueError as exc:
- raise HTTPException(status_code=404, detail="LLM not found") from exc
-
-
-@router.get("/memory")
-def get_memory(name: str):
- """Get the signature of a memory."""
- try:
- return get_signature(name, "memories")
- except ValueError as exc:
- raise HTTPException(status_code=404, detail="Memory not found") from exc
-
-
-@router.get("/tool")
-def get_tool(name: str):
- """Get the signature of a tool."""
- try:
- return get_signature(name, "tools")
- except ValueError as exc:
- raise HTTPException(status_code=404, detail="Tool not found") from exc
diff --git a/src/backend/langflow/api/validate.py b/src/backend/langflow/api/validate.py
new file mode 100644
index 000000000..a60bcc506
--- /dev/null
+++ b/src/backend/langflow/api/validate.py
@@ -0,0 +1,35 @@
+from fastapi import APIRouter, HTTPException
+
+from langflow.api.base import (
+ Code,
+ CodeValidationResponse,
+ Prompt,
+ PromptValidationResponse,
+ validate_prompt,
+)
+from langflow.utils.logger import logger
+from langflow.utils.validate import validate_code
+
+# build router
+router = APIRouter(prefix="/validate", tags=["validate"])
+
+
+@router.post("/code", status_code=200, response_model=CodeValidationResponse)
+def post_validate_code(code: Code):
+ try:
+ errors = validate_code(code.code)
+ return CodeValidationResponse(
+ imports=errors.get("imports", {}),
+ function=errors.get("function", {}),
+ )
+ except Exception as e:
+ return HTTPException(status_code=500, detail=str(e))
+
+
+@router.post("/prompt", status_code=200, response_model=PromptValidationResponse)
+def post_validate_prompt(prompt: Prompt):
+ try:
+ return validate_prompt(prompt.template)
+ except Exception as e:
+ logger.exception(e)
+ raise HTTPException(status_code=500, detail=str(e)) from e
diff --git a/src/backend/langflow/cache/__init__.py b/src/backend/langflow/cache/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/backend/langflow/cache/utils.py b/src/backend/langflow/cache/utils.py
new file mode 100644
index 000000000..310f3be80
--- /dev/null
+++ b/src/backend/langflow/cache/utils.py
@@ -0,0 +1,149 @@
+import base64
+import contextlib
+import functools
+import hashlib
+import json
+import os
+import tempfile
+from collections import OrderedDict
+from pathlib import Path
+
+import dill # type: ignore
+
+
+def create_cache_folder(func):
+ def wrapper(*args, **kwargs):
+ # Get the destination folder
+ cache_path = Path(tempfile.gettempdir()) / PREFIX
+
+ # Create the destination folder if it doesn't exist
+ os.makedirs(cache_path, exist_ok=True)
+
+ return func(*args, **kwargs)
+
+ return wrapper
+
+
+def memoize_dict(maxsize=128):
+ cache = OrderedDict()
+
+ def decorator(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ hashed = compute_dict_hash(args[0])
+ key = (func.__name__, hashed, frozenset(kwargs.items()))
+ if key not in cache:
+ result = func(*args, **kwargs)
+ cache[key] = result
+ if len(cache) > maxsize:
+ cache.popitem(last=False)
+ else:
+ result = cache[key]
+ return result
+
+ def clear_cache():
+ cache.clear()
+
+ wrapper.clear_cache = clear_cache
+ return wrapper
+
+ return decorator
+
+
+PREFIX = "langflow_cache"
+
+
+@create_cache_folder
+def clear_old_cache_files(max_cache_size: int = 3):
+ cache_dir = Path(tempfile.gettempdir()) / PREFIX
+ cache_files = list(cache_dir.glob("*.dill"))
+
+ if len(cache_files) > max_cache_size:
+ cache_files_sorted_by_mtime = sorted(
+ cache_files, key=lambda x: x.stat().st_mtime, reverse=True
+ )
+
+ for cache_file in cache_files_sorted_by_mtime[max_cache_size:]:
+ with contextlib.suppress(OSError):
+ os.remove(cache_file)
+
+
+def compute_dict_hash(graph_data):
+ graph_data = filter_json(graph_data)
+
+ cleaned_graph_json = json.dumps(graph_data, sort_keys=True)
+ return hashlib.sha256(cleaned_graph_json.encode("utf-8")).hexdigest()
+
+
+def filter_json(json_data):
+ filtered_data = json_data.copy()
+
+ # Remove 'viewport' and 'chatHistory' keys
+ if "viewport" in filtered_data:
+ del filtered_data["viewport"]
+ if "chatHistory" in filtered_data:
+ del filtered_data["chatHistory"]
+
+ # Filter nodes
+ if "nodes" in filtered_data:
+ for node in filtered_data["nodes"]:
+ if "position" in node:
+ del node["position"]
+ if "positionAbsolute" in node:
+ del node["positionAbsolute"]
+ if "selected" in node:
+ del node["selected"]
+ if "dragging" in node:
+ del node["dragging"]
+
+ return filtered_data
+
+
+@create_cache_folder
+def save_binary_file(content: str, file_name: str, accepted_types: list[str]) -> str:
+ """
+ Save a binary file to the specified folder.
+
+ Args:
+ content: The content of the file as a bytes object.
+ file_name: The name of the file, including its extension.
+
+ Returns:
+ The path to the saved file.
+ """
+ if not any(file_name.endswith(suffix) for suffix in accepted_types):
+ raise ValueError(f"File {file_name} is not accepted")
+
+ # Get the destination folder
+ cache_path = Path(tempfile.gettempdir()) / PREFIX
+
+ data = content.split(",")[1]
+ decoded_bytes = base64.b64decode(data)
+
+ # Create the full file path
+ file_path = os.path.join(cache_path, file_name)
+
+ # Save the binary content to the file
+ with open(file_path, "wb") as file:
+ file.write(decoded_bytes)
+
+ return file_path
+
+
+@create_cache_folder
+def save_cache(hash_val: str, chat_data, clean_old_cache_files: bool):
+ cache_path = Path(tempfile.gettempdir()) / PREFIX / f"{hash_val}.dill"
+ with cache_path.open("wb") as cache_file:
+ dill.dump(chat_data, cache_file)
+
+ if clean_old_cache_files:
+ clear_old_cache_files()
+
+
+@create_cache_folder
+def load_cache(hash_val):
+ cache_path = Path(tempfile.gettempdir()) / PREFIX / f"{hash_val}.dill"
+ if cache_path.exists():
+ with cache_path.open("rb") as cache_file:
+ return dill.load(cache_file)
+ return None
diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml
index 9b03aaeb1..9236d5996 100644
--- a/src/backend/langflow/config.yaml
+++ b/src/backend/langflow/config.yaml
@@ -1,27 +1,128 @@
chains:
- LLMChain
- LLMMathChain
- - LLMChecker
- # - ConversationChain
+ - LLMCheckerChain
+ - ConversationChain
+ - SeriesCharacterChain
+ - MidJourneyPromptChain
+ - TimeTravelGuideChain
+ - SQLDatabaseChain
agents:
- ZeroShotAgent
+ - JsonAgent
+ - CSVAgent
+ - initialize_agent
+ - VectorStoreAgent
+ - VectorStoreRouterAgent
+ - SQLAgent
prompts:
- PromptTemplate
- FewShotPromptTemplate
+ - ZeroShotPrompt
+ # Wait more tests
+ # - ChatPromptTemplate
+ # - SystemMessagePromptTemplate
+ # - HumanMessagePromptTemplate
llms:
- OpenAI
- - OpenAIChat
+ # - AzureOpenAI
+ - ChatOpenAI
+ - HuggingFaceHub
+ - LlamaCpp
tools:
- Search
- PAL-MATH
- Calculator
- Serper Search
+ - Tool
+ - PythonFunction
+ - JsonSpec
+ - News API
+ - TMDB API
+ - Podcast API
+ - QuerySQLDataBaseTool
+ - InfoSQLDatabaseTool
+ - ListSQLDatabaseTool
+ # - QueryCheckerTool
+ - BingSearchRun
+ - GoogleSearchRun
+ - GoogleSearchResults
+ - JsonListKeysTool
+ - JsonGetValueTool
+ - PythonREPLTool
+ - PythonAstREPLTool
+ - RequestsGetTool
+ - RequestsPostTool
+ - RequestsPatchTool
+ - RequestsPutTool
+ - RequestsDeleteTool
+ - WikipediaQueryRun
+ - WolframAlphaQueryRun
+
+wrappers:
+ - RequestsWrapper
+
+toolkits:
+ - OpenAPIToolkit
+ - JsonToolkit
+ - VectorStoreInfo
+ - VectorStoreRouterToolkit
memories:
- # - ConversationBufferMemory
+ - ConversationBufferMemory
+ - ConversationSummaryMemory
+ - ConversationKGMemory
+
+embeddings:
+ - OpenAIEmbeddings
+
+vectorstores:
+ - Chroma
+
+documentloaders:
+ - AirbyteJSONLoader
+ - CoNLLULoader
+ - CSVLoader
+ - UnstructuredEmailLoader
+ - EverNoteLoader
+ - FacebookChatLoader
+ - GutenbergLoader
+ - BSHTMLLoader
+ - UnstructuredHTMLLoader
+ # - UnstructuredImageLoader # Issue with Python 3.11 (https://github.com/Unstructured-IO/unstructured-inference/issues/83)
+ - UnstructuredMarkdownLoader
+ - PyPDFLoader
+ - UnstructuredPowerPointLoader
+ - SRTLoader
+ - TelegramChatLoader
+ - TextLoader
+ - UnstructuredWordDocumentLoader
+ - WebBaseLoader
+ - AZLyricsLoader
+ - CollegeConfidentialLoader
+ - HNLoader
+ - IFixitLoader
+ - IMSDbLoader
+ - GitbookLoader
+ - ReadTheDocsLoader
+
+textsplitters:
+ - CharacterTextSplitter
+
+utilities:
+ - BingSearchAPIWrapper
+ - GoogleSearchAPIWrapper
+ - GoogleSerperAPIWrapper
+ - SearxResults
+ - SearxSearchWrapper
+ - SerpAPIWrapper
+ - WikipediaAPIWrapper
+ - WolframAlphaAPIWrapper
+ # - ZapierNLAWrapper
+ - SQLDatabase
dev: false
diff --git a/src/backend/langflow/custom/customs.py b/src/backend/langflow/custom/customs.py
index cea81ac1a..d45221be7 100644
--- a/src/backend/langflow/custom/customs.py
+++ b/src/backend/langflow/custom/customs.py
@@ -1,42 +1,23 @@
-from langchain.agents.mrkl import prompt
+from langflow.template import nodes
+
+# These should always be instantiated
+CUSTOM_NODES = {
+ "prompts": {"ZeroShotPrompt": nodes.ZeroShotPromptNode()},
+ "tools": {"PythonFunction": nodes.PythonFunctionNode(), "Tool": nodes.ToolNode()},
+ "agents": {
+ "JsonAgent": nodes.JsonAgentNode(),
+ "CSVAgent": nodes.CSVAgentNode(),
+ "initialize_agent": nodes.InitializeAgentNode(),
+ "VectorStoreAgent": nodes.VectorStoreAgentNode(),
+ "VectorStoreRouterAgent": nodes.VectorStoreRouterAgentNode(),
+ "SQLAgent": nodes.SQLAgentNode(),
+ },
+ "utilities": {
+ "SQLDatabase": nodes.SQLDatabaseNode(),
+ },
+}
-def get_custom_prompts():
- """Get custom prompts."""
-
- return {
- "ZeroShotPrompt": {
- "template": {
- "_type": "zero_shot",
- "prefix": {
- "type": "str",
- "required": False,
- "placeholder": "",
- "list": False,
- "show": True,
- "multiline": True,
- "value": prompt.PREFIX,
- },
- "suffix": {
- "type": "str",
- "required": True,
- "placeholder": "",
- "list": False,
- "show": True,
- "multiline": True,
- "value": prompt.SUFFIX,
- },
- "format_instructions": {
- "type": "str",
- "required": False,
- "placeholder": "",
- "list": False,
- "show": True,
- "multiline": True,
- "value": prompt.FORMAT_INSTRUCTIONS,
- },
- },
- "description": "Prompt template for Zero Shot Agent.",
- "base_classes": ["BasePromptTemplate"],
- }
- }
+def get_custom_nodes(node_type: str):
+ """Get custom nodes."""
+ return CUSTOM_NODES.get(node_type, {})
diff --git a/src/backend/langflow/graph/__init__.py b/src/backend/langflow/graph/__init__.py
new file mode 100644
index 000000000..097b7a695
--- /dev/null
+++ b/src/backend/langflow/graph/__init__.py
@@ -0,0 +1,4 @@
+from langflow.graph.base import Edge, Node
+from langflow.graph.graph import Graph
+
+__all__ = ["Graph", "Node", "Edge"]
diff --git a/src/backend/langflow/graph/base.py b/src/backend/langflow/graph/base.py
new file mode 100644
index 000000000..6d998eed6
--- /dev/null
+++ b/src/backend/langflow/graph/base.py
@@ -0,0 +1,270 @@
+# Description: Graph class for building a graph of nodes and edges
+# Insights:
+# - Defer prompts building to the last moment or when they have all the tools
+# - Build each inner agent first, then build the outer agent
+
+import contextlib
+import types
+import warnings
+from copy import deepcopy
+from typing import Any, Dict, List, Optional
+
+from langflow.cache import utils as cache_utils
+from langflow.graph.constants import DIRECT_TYPES
+from langflow.interface import loading
+from langflow.interface.listing import ALL_TYPES_DICT
+from langflow.utils.logger import logger
+
+
+class Node:
+ def __init__(self, data: Dict, base_type: Optional[str] = None) -> None:
+ self.id: str = data["id"]
+ self._data = data
+ self.edges: List[Edge] = []
+ self.base_type: Optional[str] = base_type
+ self._parse_data()
+ self._built_object = None
+ self._built = False
+
+ def _parse_data(self) -> None:
+ self.data = self._data["data"]
+ self.output = self.data["node"]["base_classes"]
+ template_dicts = {
+ key: value
+ for key, value in self.data["node"]["template"].items()
+ if isinstance(value, dict)
+ }
+
+ self.required_inputs = [
+ template_dicts[key]["type"]
+ for key, value in template_dicts.items()
+ if value["required"]
+ ]
+ self.optional_inputs = [
+ template_dicts[key]["type"]
+ for key, value in template_dicts.items()
+ if not value["required"]
+ ]
+
+ template_dict = self.data["node"]["template"]
+ self.node_type = (
+ self.data["type"] if "Tool" not in self.output else template_dict["_type"]
+ )
+ if self.base_type is None:
+ for base_type, value in ALL_TYPES_DICT.items():
+ if self.node_type in value:
+ self.base_type = base_type
+ break
+
+ def _build_params(self):
+ # Some params are required, some are optional
+ # but most importantly, some params are python base classes
+ # like str and others are LangChain objects like LLMChain, BasePromptTemplate
+ # so we need to be able to distinguish between the two
+
+ # The dicts with "type" == "str" are the ones that are python base classes
+ # and most likely have a "value" key
+
+ # So for each key besides "_type" in the template dict, we have a dict
+ # with a "type" key. If the type is not "str", then we need to get the
+ # edge that connects to that node and get the Node with the required data
+ # and use that as the value for the param
+ # If the type is "str", then we need to get the value of the "value" key
+ # and use that as the value for the param
+ template_dict = {
+ key: value
+ for key, value in self.data["node"]["template"].items()
+ if isinstance(value, dict)
+ }
+ params = {}
+ for key, value in template_dict.items():
+ if key == "_type":
+ continue
+ # If the type is not transformable to a python base class
+ # then we need to get the edge that connects to this node
+ if value.get("type") == "file":
+ # Load the type in value.get('suffixes') using
+ # what is inside value.get('content')
+ # value.get('value') is the file name
+ file_name = value.get("value")
+ content = value.get("content")
+ type_to_load = value.get("suffixes")
+ file_path = cache_utils.save_binary_file(
+ content=content, file_name=file_name, accepted_types=type_to_load
+ )
+
+ params[key] = file_path
+
+ elif value.get("type") not in DIRECT_TYPES:
+ # Get the edge that connects to this node
+ edges = [
+ edge
+ for edge in self.edges
+ if edge.target == self and edge.matched_type in value["type"]
+ ]
+
+ # Get the output of the node that the edge connects to
+ # if the value['list'] is True, then there will be more
+ # than one time setting to params[key]
+ # so we need to append to a list if it exists
+ # or create a new list if it doesn't
+
+ if value["required"] and not edges:
+ # If a required parameter is not found, raise an error
+ raise ValueError(
+ f"Required input {key} for module {self.node_type} not found"
+ )
+ elif value["list"]:
+ # If this is a list parameter, append all sources to a list
+ params[key] = [edge.source for edge in edges]
+ elif edges:
+ # If a single parameter is found, use its source
+ params[key] = edges[0].source
+
+ elif value["required"] or value.get("value"):
+ # If value does not have value this still passes
+ # but then gives a keyError
+ # so we need to check if value has value
+ new_value = value.get("value")
+ if new_value is None:
+ warnings.warn(f"Value for {key} in {self.node_type} is None. ")
+ if value.get("type") == "int":
+ with contextlib.suppress(TypeError, ValueError):
+ new_value = int(new_value) # type: ignore
+ params[key] = new_value
+
+ # Add _type to params
+ self.params = params
+
+ def _build(self):
+ # The params dict is used to build the module
+ # it contains values and keys that point to nodes which
+ # have their own params dict
+ # When build is called, we iterate through the params dict
+ # and if the value is a node, we call build on that node
+ # and use the output of that build as the value for the param
+ # if the value is not a node, then we use the value as the param
+ # and continue
+ # Another aspect is that the node_type is the class that we need to import
+ # and instantiate with these built params
+ logger.debug(f"Building {self.node_type}")
+ # Build each node in the params dict
+ for key, value in self.params.copy().items():
+ # Check if Node or list of Nodes and not self
+ # to avoid recursion
+ if isinstance(value, Node):
+ if value == self:
+ del self.params[key]
+ continue
+ result = value.build()
+ # If the key is "func", then we need to use the run method
+ if key == "func" and not isinstance(result, types.FunctionType):
+ # func can be PythonFunction(code='\ndef upper_case(text: str) -> str:\n return text.upper()\n')
+ # so we need to check if there is an attribute called run
+ if hasattr(result, "run"):
+ result = result.run # type: ignore
+ elif hasattr(result, "get_function"):
+ result = result.get_function() # type: ignore
+
+ self.params[key] = result
+ elif isinstance(value, list) and all(
+ isinstance(node, Node) for node in value
+ ):
+ self.params[key] = [node.build() for node in value] # type: ignore
+
+ # Get the class from LANGCHAIN_TYPES_DICT
+ # and instantiate it with the params
+ # and return the instance
+
+ try:
+ self._built_object = loading.instantiate_class(
+ node_type=self.node_type,
+ base_type=self.base_type,
+ params=self.params,
+ )
+ except Exception as exc:
+ raise ValueError(
+ f"Error building node {self.node_type}: {str(exc)}"
+ ) from exc
+
+ if self._built_object is None:
+ raise ValueError(f"Node type {self.node_type} not found")
+
+ self._built = True
+
+ def build(self, force: bool = False) -> Any:
+ if not self._built or force:
+ self._build()
+
+ #! Deepcopy is breaking for vectorstores
+ if self.base_type in [
+ "vectorstores",
+ "VectorStoreRouterAgent",
+ "VectorStoreAgent",
+ "VectorStoreInfo",
+ ] or self.node_type in [
+ "VectorStoreInfo",
+ "VectorStoreRouterToolkit",
+ "SQLDatabase",
+ ]:
+ return self._built_object
+ return deepcopy(self._built_object)
+
+ def add_edge(self, edge: "Edge") -> None:
+ self.edges.append(edge)
+
+ def __repr__(self) -> str:
+ return f"Node(id={self.id}, data={self.data})"
+
+ def __eq__(self, __o: object) -> bool:
+ return self.id == __o.id if isinstance(__o, Node) else False
+
+ def __hash__(self) -> int:
+ return id(self)
+
+
+class Edge:
+ def __init__(self, source: "Node", target: "Node"):
+ self.source: "Node" = source
+ self.target: "Node" = target
+ self.validate_edge()
+
+ def validate_edge(self) -> None:
+ # Validate that the outputs of the source node are valid inputs
+ # for the target node
+ self.source_types = self.source.output
+ self.target_reqs = self.target.required_inputs + self.target.optional_inputs
+ # Both lists contain strings and sometimes a string contains the value we are
+ # looking for e.g. comgin_out=["Chain"] and target_reqs=["LLMChain"]
+ # so we need to check if any of the strings in source_types is in target_reqs
+ self.valid = any(
+ output in target_req
+ for output in self.source_types
+ for target_req in self.target_reqs
+ )
+ # Get what type of input the target node is expecting
+
+ self.matched_type = next(
+ (
+ output
+ for output in self.source_types
+ for target_req in self.target_reqs
+ if output in target_req
+ ),
+ None,
+ )
+ no_matched_type = self.matched_type is None
+ if no_matched_type:
+ logger.debug(self.source_types)
+ logger.debug(self.target_reqs)
+ if no_matched_type:
+ raise ValueError(
+ f"Edge between {self.source.node_type} and {self.target.node_type} "
+ f"has no matched type"
+ )
+
+ def __repr__(self) -> str:
+ return (
+ f"Edge(source={self.source.id}, target={self.target.id}, valid={self.valid}"
+ f", matched_type={self.matched_type})"
+ )
diff --git a/src/backend/langflow/graph/constants.py b/src/backend/langflow/graph/constants.py
new file mode 100644
index 000000000..8372e13a7
--- /dev/null
+++ b/src/backend/langflow/graph/constants.py
@@ -0,0 +1 @@
+DIRECT_TYPES = ["str", "bool", "code", "int", "float", "Any", "prompt"]
diff --git a/src/backend/langflow/graph/graph.py b/src/backend/langflow/graph/graph.py
new file mode 100644
index 000000000..b289d5c31
--- /dev/null
+++ b/src/backend/langflow/graph/graph.py
@@ -0,0 +1,166 @@
+from typing import Dict, List, Type, Union
+
+from langflow.graph.base import Edge, Node
+from langflow.graph.nodes import (
+ AgentNode,
+ ChainNode,
+ DocumentLoaderNode,
+ EmbeddingNode,
+ FileToolNode,
+ LLMNode,
+ MemoryNode,
+ PromptNode,
+ TextSplitterNode,
+ ToolkitNode,
+ ToolNode,
+ VectorStoreNode,
+ WrapperNode,
+)
+from langflow.interface.agents.base import agent_creator
+from langflow.interface.chains.base import chain_creator
+from langflow.interface.document_loaders.base import documentloader_creator
+from langflow.interface.embeddings.base import embedding_creator
+from langflow.interface.llms.base import llm_creator
+from langflow.interface.memories.base import memory_creator
+from langflow.interface.prompts.base import prompt_creator
+from langflow.interface.text_splitters.base import textsplitter_creator
+from langflow.interface.toolkits.base import toolkits_creator
+from langflow.interface.tools.base import tool_creator
+from langflow.interface.tools.constants import FILE_TOOLS
+from langflow.interface.vector_store.base import vectorstore_creator
+from langflow.interface.wrappers.base import wrapper_creator
+from langflow.utils import payload
+
+
+class Graph:
+ def __init__(
+ self,
+ nodes: List[Dict[str, Union[str, Dict[str, Union[str, List[str]]]]]],
+ edges: List[Dict[str, str]],
+ ) -> None:
+ self._nodes = nodes
+ self._edges = edges
+ self._build_graph()
+
+ def _build_graph(self) -> None:
+ self.nodes = self._build_nodes()
+ self.edges = self._build_edges()
+ for edge in self.edges:
+ edge.source.add_edge(edge)
+ edge.target.add_edge(edge)
+
+ # This is a hack to make sure that the LLM node is sent to
+ # the toolkit node
+ llm_node = None
+ for node in self.nodes:
+ node._build_params()
+
+ if isinstance(node, LLMNode):
+ llm_node = node
+
+ for node in self.nodes:
+ if isinstance(node, ToolkitNode):
+ node.params["llm"] = llm_node
+ # remove invalid nodes
+ self.nodes = [
+ node
+ for node in self.nodes
+ if self._validate_node(node)
+ or (len(self.nodes) == 1 and len(self.edges) == 0)
+ ]
+
+ def _validate_node(self, node: Node) -> bool:
+ # All nodes that do not have edges are invalid
+ return len(node.edges) > 0
+
+ def get_node(self, node_id: str) -> Union[None, Node]:
+ return next((node for node in self.nodes if node.id == node_id), None)
+
+ def get_nodes_with_target(self, node: Node) -> List[Node]:
+ connected_nodes: List[Node] = [
+ edge.source for edge in self.edges if edge.target == node
+ ]
+ return connected_nodes
+
+ def build(self) -> List[Node]:
+ # Get root node
+ root_node = payload.get_root_node(self)
+ if root_node is None:
+ raise ValueError("No root node found")
+ return root_node.build()
+
+ def get_node_neighbors(self, node: Node) -> Dict[Node, int]:
+ neighbors: Dict[Node, int] = {}
+ for edge in self.edges:
+ if edge.source == node:
+ neighbor = edge.target
+ if neighbor not in neighbors:
+ neighbors[neighbor] = 0
+ neighbors[neighbor] += 1
+ elif edge.target == node:
+ neighbor = edge.source
+ if neighbor not in neighbors:
+ neighbors[neighbor] = 0
+ neighbors[neighbor] += 1
+ return neighbors
+
+ def _build_edges(self) -> List[Edge]:
+ # Edge takes two nodes as arguments, so we need to build the nodes first
+ # and then build the edges
+ # if we can't find a node, we raise an error
+
+ edges: List[Edge] = []
+ for edge in self._edges:
+ source = self.get_node(edge["source"])
+ target = self.get_node(edge["target"])
+ if source is None:
+ raise ValueError(f"Source node {edge['source']} not found")
+ if target is None:
+ raise ValueError(f"Target node {edge['target']} not found")
+ edges.append(Edge(source, target))
+ return edges
+
+ def _get_node_class(self, node_type: str, node_lc_type: str) -> Type[Node]:
+ node_type_map: Dict[str, Type[Node]] = {
+ **{t: PromptNode for t in prompt_creator.to_list()},
+ **{t: AgentNode for t in agent_creator.to_list()},
+ **{t: ChainNode for t in chain_creator.to_list()},
+ **{t: ToolNode for t in tool_creator.to_list()},
+ **{t: ToolkitNode for t in toolkits_creator.to_list()},
+ **{t: WrapperNode for t in wrapper_creator.to_list()},
+ **{t: LLMNode for t in llm_creator.to_list()},
+ **{t: MemoryNode for t in memory_creator.to_list()},
+ **{t: EmbeddingNode for t in embedding_creator.to_list()},
+ **{t: VectorStoreNode for t in vectorstore_creator.to_list()},
+ **{t: DocumentLoaderNode for t in documentloader_creator.to_list()},
+ **{t: TextSplitterNode for t in textsplitter_creator.to_list()},
+ }
+
+ if node_type in FILE_TOOLS:
+ return FileToolNode
+ if node_type in node_type_map:
+ return node_type_map[node_type]
+ if node_lc_type in node_type_map:
+ return node_type_map[node_lc_type]
+ return Node
+
+ def _build_nodes(self) -> List[Node]:
+ nodes: List[Node] = []
+ for node in self._nodes:
+ node_data = node["data"]
+ node_type: str = node_data["type"] # type: ignore
+ node_lc_type: str = node_data["node"]["template"]["_type"] # type: ignore
+
+ NodeClass = self._get_node_class(node_type, node_lc_type)
+ nodes.append(NodeClass(node))
+
+ return nodes
+
+ def get_children_by_node_type(self, node: Node, node_type: str) -> List[Node]:
+ children = []
+ node_types = [node.data["type"]]
+ if "node" in node.data:
+ node_types += node.data["node"]["base_classes"]
+ if node_type in node_types:
+ children.append(node)
+ return children
diff --git a/src/backend/langflow/graph/nodes.py b/src/backend/langflow/graph/nodes.py
new file mode 100644
index 000000000..018174334
--- /dev/null
+++ b/src/backend/langflow/graph/nodes.py
@@ -0,0 +1,160 @@
+from copy import deepcopy
+from typing import Any, Dict, List, Optional, Union
+
+from langflow.graph.base import Node
+from langflow.graph.utils import extract_input_variables_from_prompt
+
+
+class AgentNode(Node):
+ def __init__(self, data: Dict):
+ super().__init__(data, base_type="agents")
+
+ self.tools: List[ToolNode] = []
+ self.chains: List[ChainNode] = []
+
+ def _set_tools_and_chains(self) -> None:
+ for edge in self.edges:
+ source_node = edge.source
+ if isinstance(source_node, ToolNode):
+ self.tools.append(source_node)
+ elif isinstance(source_node, ChainNode):
+ self.chains.append(source_node)
+
+ def build(self, force: bool = False) -> Any:
+ if not self._built or force:
+ self._set_tools_and_chains()
+ # First, build the tools
+ for tool_node in self.tools:
+ tool_node.build()
+
+ # Next, build the chains and the rest
+ for chain_node in self.chains:
+ chain_node.build(tools=self.tools)
+
+ self._build()
+
+ #! Cannot deepcopy VectorStore, VectorStoreRouter, or SQL agents
+ if self.node_type in ["VectorStoreAgent", "VectorStoreRouterAgent", "SQLAgent"]:
+ return self._built_object
+ return deepcopy(self._built_object)
+
+
+class ToolNode(Node):
+ def __init__(self, data: Dict):
+ super().__init__(data, base_type="tools")
+
+
+class PromptNode(Node):
+ def __init__(self, data: Dict):
+ super().__init__(data, base_type="prompts")
+
+ def build(
+ self,
+ force: bool = False,
+ tools: Optional[Union[List[Node], List[ToolNode]]] = None,
+ ) -> Any:
+ if not self._built or force:
+ if (
+ "input_variables" not in self.params
+ or self.params["input_variables"] is None
+ ):
+ self.params["input_variables"] = []
+ # Check if it is a ZeroShotPrompt and needs a tool
+ if "ShotPrompt" in self.node_type:
+ tools = (
+ [tool_node.build() for tool_node in tools]
+ if tools is not None
+ else []
+ )
+ self.params["tools"] = tools
+ prompt_params = [
+ key
+ for key, value in self.params.items()
+ if isinstance(value, str) and key != "format_instructions"
+ ]
+ else:
+ prompt_params = ["template"]
+ for param in prompt_params:
+ prompt_text = self.params[param]
+ variables = extract_input_variables_from_prompt(prompt_text)
+ self.params["input_variables"].extend(variables)
+ self.params["input_variables"] = list(set(self.params["input_variables"]))
+
+ self._build()
+ return deepcopy(self._built_object)
+
+
+class ChainNode(Node):
+ def __init__(self, data: Dict):
+ super().__init__(data, base_type="chains")
+
+ def build(
+ self,
+ force: bool = False,
+ tools: Optional[Union[List[Node], List[ToolNode]]] = None,
+ ) -> Any:
+ if not self._built or force:
+ # Check if the chain requires a PromptNode
+ for key, value in self.params.items():
+ if isinstance(value, PromptNode):
+ # Build the PromptNode, passing the tools if available
+ self.params[key] = value.build(tools=tools, force=force)
+
+ self._build()
+
+ #! Cannot deepcopy SQLDatabaseChain
+ if self.node_type in ["SQLDatabaseChain"]:
+ return self._built_object
+ return deepcopy(self._built_object)
+
+
+class LLMNode(Node):
+ def __init__(self, data: Dict):
+ super().__init__(data, base_type="llms")
+
+
+class ToolkitNode(Node):
+ def __init__(self, data: Dict):
+ super().__init__(data, base_type="toolkits")
+
+
+class FileToolNode(ToolNode):
+ def __init__(self, data: Dict):
+ super().__init__(data)
+
+
+class WrapperNode(Node):
+ def __init__(self, data: Dict):
+ super().__init__(data, base_type="wrappers")
+
+ def build(self, force: bool = False) -> Any:
+ if not self._built or force:
+ if "headers" in self.params:
+ self.params["headers"] = eval(self.params["headers"])
+ self._build()
+ return deepcopy(self._built_object)
+
+
+class DocumentLoaderNode(Node):
+ def __init__(self, data: Dict):
+ super().__init__(data, base_type="documentloaders")
+
+
+class EmbeddingNode(Node):
+ def __init__(self, data: Dict):
+ super().__init__(data, base_type="embeddings")
+
+
+class VectorStoreNode(Node):
+ def __init__(self, data: Dict):
+ super().__init__(data, base_type="vectorstores")
+
+
+class MemoryNode(Node):
+ def __init__(self, data: Dict):
+ super().__init__(data, base_type="memory")
+
+
+class TextSplitterNode(Node):
+ def __init__(self, data: Dict):
+ super().__init__(data, base_type="textsplitters")
diff --git a/src/backend/langflow/graph/utils.py b/src/backend/langflow/graph/utils.py
new file mode 100644
index 000000000..6d56e933e
--- /dev/null
+++ b/src/backend/langflow/graph/utils.py
@@ -0,0 +1,19 @@
+import re
+
+
+def validate_prompt(prompt: str):
+ """Validate prompt."""
+ if extract_input_variables_from_prompt(prompt):
+ return prompt
+
+ return fix_prompt(prompt)
+
+
+def fix_prompt(prompt: str):
+ """Fix prompt."""
+ return prompt + " {input}"
+
+
+def extract_input_variables_from_prompt(prompt: str) -> list[str]:
+ """Extract input variables from prompt."""
+ return re.findall(r"{(.*?)}", prompt)
diff --git a/src/backend/langflow/interface/agents/__init__.py b/src/backend/langflow/interface/agents/__init__.py
new file mode 100644
index 000000000..df15bc39b
--- /dev/null
+++ b/src/backend/langflow/interface/agents/__init__.py
@@ -0,0 +1,3 @@
+from langflow.interface.agents.base import AgentCreator
+
+__all__ = ["AgentCreator"]
diff --git a/src/backend/langflow/interface/agents/base.py b/src/backend/langflow/interface/agents/base.py
new file mode 100644
index 000000000..1d23b481e
--- /dev/null
+++ b/src/backend/langflow/interface/agents/base.py
@@ -0,0 +1,53 @@
+from typing import Dict, List, Optional
+
+from langchain.agents import loading
+
+from langflow.custom.customs import get_custom_nodes
+from langflow.interface.agents.custom import CUSTOM_AGENTS
+from langflow.interface.base import LangChainTypeCreator
+from langflow.settings import settings
+from langflow.utils.logger import logger
+from langflow.utils.util import build_template_from_class
+
+
+class AgentCreator(LangChainTypeCreator):
+ type_name: str = "agents"
+
+ @property
+ def type_to_loader_dict(self) -> Dict:
+ if self.type_dict is None:
+ self.type_dict = loading.AGENT_TO_CLASS
+ # Add JsonAgent to the list of agents
+ for name, agent in CUSTOM_AGENTS.items():
+ # TODO: validate AgentType
+ self.type_dict[name] = agent # type: ignore
+ return self.type_dict
+
+ def get_signature(self, name: str) -> Optional[Dict]:
+ try:
+ if name in get_custom_nodes(self.type_name).keys():
+ return get_custom_nodes(self.type_name)[name]
+ return build_template_from_class(
+ name, self.type_to_loader_dict, add_function=True
+ )
+ except ValueError as exc:
+ raise ValueError("Agent not found") from exc
+ except AttributeError as exc:
+ logger.error(f"Agent {name} not loaded: {exc}")
+ return None
+
+ # Now this is a generator
+ def to_list(self) -> List[str]:
+ names = []
+ for _, agent in self.type_to_loader_dict.items():
+ agent_name = (
+ agent.function_name()
+ if hasattr(agent, "function_name")
+ else agent.__name__
+ )
+ if agent_name in settings.agents or settings.dev:
+ names.append(agent_name)
+ return names
+
+
+agent_creator = AgentCreator()
diff --git a/src/backend/langflow/interface/agents/custom.py b/src/backend/langflow/interface/agents/custom.py
new file mode 100644
index 000000000..27159d22a
--- /dev/null
+++ b/src/backend/langflow/interface/agents/custom.py
@@ -0,0 +1,306 @@
+from typing import Any, List, Optional
+
+from langchain import LLMChain
+from langchain.agents import (
+ AgentExecutor,
+ Tool,
+ ZeroShotAgent,
+ initialize_agent,
+)
+from langchain.agents.agent_toolkits import (
+ SQLDatabaseToolkit,
+ VectorStoreInfo,
+ VectorStoreRouterToolkit,
+ VectorStoreToolkit,
+)
+from langchain.agents.agent_toolkits.json.prompt import JSON_PREFIX, JSON_SUFFIX
+from langchain.agents.agent_toolkits.json.toolkit import JsonToolkit
+from langchain.agents.agent_toolkits.pandas.prompt import PREFIX as PANDAS_PREFIX
+from langchain.agents.agent_toolkits.pandas.prompt import SUFFIX as PANDAS_SUFFIX
+from langchain.agents.agent_toolkits.sql.prompt import SQL_PREFIX, SQL_SUFFIX
+from langchain.agents.agent_toolkits.vectorstore.prompt import (
+ PREFIX as VECTORSTORE_PREFIX,
+)
+from langchain.agents.agent_toolkits.vectorstore.prompt import (
+ ROUTER_PREFIX as VECTORSTORE_ROUTER_PREFIX,
+)
+from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS
+from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS as SQL_FORMAT_INSTRUCTIONS
+from langchain.llms.base import BaseLLM
+from langchain.memory.chat_memory import BaseChatMemory
+from langchain.schema import BaseLanguageModel
+from langchain.sql_database import SQLDatabase
+from langchain.tools.python.tool import PythonAstREPLTool
+from langchain.tools.sql_database.prompt import QUERY_CHECKER
+
+
+class JsonAgent(AgentExecutor):
+ """Json agent"""
+
+ @staticmethod
+ def function_name():
+ return "JsonAgent"
+
+ @classmethod
+ def initialize(cls, *args, **kwargs):
+ return cls.from_toolkit_and_llm(*args, **kwargs)
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ @classmethod
+ def from_toolkit_and_llm(cls, toolkit: JsonToolkit, llm: BaseLanguageModel):
+ tools = toolkit.get_tools()
+ tool_names = [tool.name for tool in tools]
+ prompt = ZeroShotAgent.create_prompt(
+ tools,
+ prefix=JSON_PREFIX,
+ suffix=JSON_SUFFIX,
+ format_instructions=FORMAT_INSTRUCTIONS,
+ input_variables=None,
+ )
+ llm_chain = LLMChain(
+ llm=llm,
+ prompt=prompt,
+ )
+ agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names)
+ return cls.from_agent_and_tools(agent=agent, tools=tools, verbose=True)
+
+ def run(self, *args, **kwargs):
+ return super().run(*args, **kwargs)
+
+
+class CSVAgent(AgentExecutor):
+ """CSV agent"""
+
+ @staticmethod
+ def function_name():
+ return "CSVAgent"
+
+ @classmethod
+ def initialize(cls, *args, **kwargs):
+ return cls.from_toolkit_and_llm(*args, **kwargs)
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ @classmethod
+ def from_toolkit_and_llm(
+ cls,
+ path: str,
+ llm: BaseLanguageModel,
+ pandas_kwargs: Optional[dict] = None,
+ **kwargs: Any
+ ):
+ import pandas as pd # type: ignore
+
+ _kwargs = pandas_kwargs or {}
+ df = pd.read_csv(path, **_kwargs)
+
+ tools = [PythonAstREPLTool(locals={"df": df})] # type: ignore
+ prompt = ZeroShotAgent.create_prompt(
+ tools,
+ prefix=PANDAS_PREFIX,
+ suffix=PANDAS_SUFFIX,
+ input_variables=["df", "input", "agent_scratchpad"],
+ )
+ partial_prompt = prompt.partial(df=str(df.head()))
+ llm_chain = LLMChain(
+ llm=llm,
+ prompt=partial_prompt,
+ )
+ tool_names = [tool.name for tool in tools]
+ agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names, **kwargs)
+
+ return cls.from_agent_and_tools(agent=agent, tools=tools, verbose=True)
+
+ def run(self, *args, **kwargs):
+ return super().run(*args, **kwargs)
+
+
+class VectorStoreAgent(AgentExecutor):
+ """Vector Store agent"""
+
+ @staticmethod
+ def function_name():
+ return "VectorStoreAgent"
+
+ @classmethod
+ def initialize(cls, *args, **kwargs):
+ return cls.from_toolkit_and_llm(*args, **kwargs)
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ @classmethod
+ def from_toolkit_and_llm(
+ cls, llm: BaseLLM, vectorstoreinfo: VectorStoreInfo, **kwargs: Any
+ ):
+ """Construct a vectorstore agent from an LLM and tools."""
+
+ toolkit = VectorStoreToolkit(vectorstore_info=vectorstoreinfo, llm=llm)
+
+ tools = toolkit.get_tools()
+ prompt = ZeroShotAgent.create_prompt(tools, prefix=VECTORSTORE_PREFIX)
+ llm_chain = LLMChain(
+ llm=llm,
+ prompt=prompt,
+ )
+ tool_names = [tool.name for tool in tools]
+ agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names, **kwargs)
+ return AgentExecutor.from_agent_and_tools(
+ agent=agent, tools=tools, verbose=True
+ )
+
+ def run(self, *args, **kwargs):
+ return super().run(*args, **kwargs)
+
+
+class SQLAgent(AgentExecutor):
+ """SQL agent"""
+
+ @staticmethod
+ def function_name():
+ return "SQLAgent"
+
+ @classmethod
+ def initialize(cls, *args, **kwargs):
+ return cls.from_toolkit_and_llm(*args, **kwargs)
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ @classmethod
+ def from_toolkit_and_llm(cls, llm: BaseLLM, database_uri: str, **kwargs: Any):
+ """Construct a sql agent from an LLM and tools."""
+ db = SQLDatabase.from_uri(database_uri)
+ toolkit = SQLDatabaseToolkit(db=db)
+
+ # The right code should be this, but there is a problem with tools = toolkit.get_tools()
+ # related to `OPENAI_API_KEY`
+ # return create_sql_agent(llm=llm, toolkit=toolkit, verbose=True)
+ from langchain.prompts import PromptTemplate
+ from langchain.tools.sql_database.tool import (
+ InfoSQLDatabaseTool,
+ ListSQLDatabaseTool,
+ QueryCheckerTool,
+ QuerySQLDataBaseTool,
+ )
+
+ llmchain = LLMChain(
+ llm=llm,
+ prompt=PromptTemplate(
+ template=QUERY_CHECKER, input_variables=["query", "dialect"]
+ ),
+ )
+
+ tools = [
+ QuerySQLDataBaseTool(db=db), # type: ignore
+ InfoSQLDatabaseTool(db=db), # type: ignore
+ ListSQLDatabaseTool(db=db), # type: ignore
+ QueryCheckerTool(db=db, llm_chain=llmchain), # type: ignore
+ ]
+
+ prefix = SQL_PREFIX.format(dialect=toolkit.dialect, top_k=10)
+ prompt = ZeroShotAgent.create_prompt(
+ tools=tools, # type: ignore
+ prefix=prefix,
+ suffix=SQL_SUFFIX,
+ format_instructions=SQL_FORMAT_INSTRUCTIONS,
+ )
+ llm_chain = LLMChain(
+ llm=llm,
+ prompt=prompt,
+ )
+ tool_names = [tool.name for tool in tools] # type: ignore
+ agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names, **kwargs)
+ return AgentExecutor.from_agent_and_tools(
+ agent=agent,
+ tools=tools, # type: ignore
+ verbose=True,
+ max_iterations=15,
+ early_stopping_method="force",
+ )
+
+ def run(self, *args, **kwargs):
+ return super().run(*args, **kwargs)
+
+
+class VectorStoreRouterAgent(AgentExecutor):
+ """Vector Store Router Agent"""
+
+ @staticmethod
+ def function_name():
+ return "VectorStoreRouterAgent"
+
+ @classmethod
+ def initialize(cls, *args, **kwargs):
+ return cls.from_toolkit_and_llm(*args, **kwargs)
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ @classmethod
+ def from_toolkit_and_llm(
+ cls,
+ llm: BaseLanguageModel,
+ vectorstoreroutertoolkit: VectorStoreRouterToolkit,
+ **kwargs: Any
+ ):
+ """Construct a vector store router agent from an LLM and tools."""
+
+ tools = vectorstoreroutertoolkit.get_tools()
+ prompt = ZeroShotAgent.create_prompt(tools, prefix=VECTORSTORE_ROUTER_PREFIX)
+ llm_chain = LLMChain(
+ llm=llm,
+ prompt=prompt,
+ )
+ tool_names = [tool.name for tool in tools]
+ agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names, **kwargs)
+ return AgentExecutor.from_agent_and_tools(
+ agent=agent, tools=tools, verbose=True
+ )
+
+ def run(self, *args, **kwargs):
+ return super().run(*args, **kwargs)
+
+
+class InitializeAgent(AgentExecutor):
+ """Implementation of initialize_agent function"""
+
+ @staticmethod
+ def function_name():
+ return "initialize_agent"
+
+ @classmethod
+ def initialize(
+ cls,
+ llm: BaseLLM,
+ tools: List[Tool],
+ agent: str,
+ memory: Optional[BaseChatMemory] = None,
+ ):
+ return initialize_agent(
+ tools=tools,
+ llm=llm,
+ # LangChain now uses Enum for agent, but we still support string
+ agent=agent, # type: ignore
+ memory=memory,
+ return_intermediate_steps=True,
+ )
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ def run(self, *args, **kwargs):
+ return super().run(*args, **kwargs)
+
+
+CUSTOM_AGENTS = {
+ "JsonAgent": JsonAgent,
+ "CSVAgent": CSVAgent,
+ "initialize_agent": InitializeAgent,
+ "VectorStoreAgent": VectorStoreAgent,
+ "VectorStoreRouterAgent": VectorStoreRouterAgent,
+ "SQLAgent": SQLAgent,
+}
diff --git a/src/backend/langflow/interface/agents/prebuilt.py b/src/backend/langflow/interface/agents/prebuilt.py
new file mode 100644
index 000000000..e20ec3bde
--- /dev/null
+++ b/src/backend/langflow/interface/agents/prebuilt.py
@@ -0,0 +1,45 @@
+from langchain import LLMChain
+from langchain.agents import AgentExecutor, ZeroShotAgent
+from langchain.agents.agent_toolkits.json.prompt import JSON_PREFIX, JSON_SUFFIX
+from langchain.agents.agent_toolkits.json.toolkit import JsonToolkit
+from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS
+from langchain.schema import BaseLanguageModel
+
+
+class MalfoyAgent(AgentExecutor):
+ """Json agent"""
+
+ prefix = "Malfoy: "
+
+ @classmethod
+ def initialize(cls, *args, **kwargs):
+ return cls.from_toolkit_and_llm(*args, **kwargs)
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ @classmethod
+ def from_toolkit_and_llm(cls, toolkit: JsonToolkit, llm: BaseLanguageModel):
+ tools = toolkit.get_tools()
+ tool_names = [tool.name for tool in tools]
+ prompt = ZeroShotAgent.create_prompt(
+ tools,
+ prefix=JSON_PREFIX,
+ suffix=JSON_SUFFIX,
+ format_instructions=FORMAT_INSTRUCTIONS,
+ input_variables=None,
+ )
+ llm_chain = LLMChain(
+ llm=llm,
+ prompt=prompt,
+ )
+ agent = ZeroShotAgent(llm_chain=llm_chain, allowed_tools=tool_names)
+ return cls.from_agent_and_tools(agent=agent, tools=tools, verbose=True)
+
+ def run(self, *args, **kwargs):
+ return super().run(*args, **kwargs)
+
+
+PREBUILT_AGENTS = {
+ "MalfoyAgent": MalfoyAgent,
+}
diff --git a/src/backend/langflow/interface/base.py b/src/backend/langflow/interface/base.py
new file mode 100644
index 000000000..663700ffd
--- /dev/null
+++ b/src/backend/langflow/interface/base.py
@@ -0,0 +1,80 @@
+from abc import ABC, abstractmethod
+from typing import Any, Dict, List, Optional, Type, Union
+
+from pydantic import BaseModel
+
+from langflow.template.base import FrontendNode, Template, TemplateField
+from langflow.utils.logger import logger
+
+# Assuming necessary imports for Field, Template, and FrontendNode classes
+
+
+class LangChainTypeCreator(BaseModel, ABC):
+ type_name: str
+ type_dict: Optional[Dict] = None
+
+ @property
+ def frontend_node_class(self) -> Type[FrontendNode]:
+ """The class type of the FrontendNode created in frontend_node."""
+ return FrontendNode
+
+ @property
+ @abstractmethod
+ def type_to_loader_dict(self) -> Dict:
+ if self.type_dict is None:
+ raise NotImplementedError
+ return self.type_dict
+
+ @abstractmethod
+ def get_signature(self, name: str) -> Union[Optional[Dict[Any, Any]], FrontendNode]:
+ pass
+
+ @abstractmethod
+ def to_list(self) -> List[str]:
+ pass
+
+ def to_dict(self) -> Dict:
+ result: Dict = {self.type_name: {}}
+
+ for name in self.to_list():
+ # frontend_node.to_dict() returns a dict with the following structure:
+ # {name: {template: {fields}, description: str}}
+ # so we should update the result dict
+ node = self.frontend_node(name)
+ if node is not None:
+ node = node.to_dict()
+ result[self.type_name].update(node)
+
+ return result
+
+ def frontend_node(self, name) -> Union[FrontendNode, None]:
+ signature = self.get_signature(name)
+ if signature is None:
+ logger.error(f"Node {name} not loaded")
+ return None
+ if isinstance(signature, FrontendNode):
+ return signature
+ fields = [
+ TemplateField(
+ name=key,
+ field_type=value["type"],
+ required=value.get("required", False),
+ placeholder=value.get("placeholder", ""),
+ is_list=value.get("list", False),
+ show=value.get("show", True),
+ multiline=value.get("multiline", False),
+ value=value.get("value", None),
+ suffixes=value.get("suffixes", []),
+ file_types=value.get("fileTypes", []),
+ content=value.get("content", None),
+ )
+ for key, value in signature["template"].items()
+ if key != "_type"
+ ]
+ template = Template(type_name=name, fields=fields)
+ return self.frontend_node_class(
+ template=template,
+ description=signature.get("description", ""),
+ base_classes=signature["base_classes"],
+ name=name,
+ )
diff --git a/src/backend/langflow/interface/chains/__init__.py b/src/backend/langflow/interface/chains/__init__.py
new file mode 100644
index 000000000..2e5570b3c
--- /dev/null
+++ b/src/backend/langflow/interface/chains/__init__.py
@@ -0,0 +1,3 @@
+from langflow.interface.chains.base import ChainCreator
+
+__all__ = ["ChainCreator"]
diff --git a/src/backend/langflow/interface/chains/base.py b/src/backend/langflow/interface/chains/base.py
new file mode 100644
index 000000000..9dc8ded3f
--- /dev/null
+++ b/src/backend/langflow/interface/chains/base.py
@@ -0,0 +1,54 @@
+from typing import Dict, List, Optional, Type
+
+from langflow.custom.customs import get_custom_nodes
+from langflow.interface.base import LangChainTypeCreator
+from langflow.interface.custom_lists import chain_type_to_cls_dict
+from langflow.settings import settings
+from langflow.template.nodes import ChainFrontendNode
+from langflow.utils.logger import logger
+from langflow.utils.util import build_template_from_class
+
+# Assuming necessary imports for Field, Template, and FrontendNode classes
+
+
+class ChainCreator(LangChainTypeCreator):
+ type_name: str = "chains"
+
+ @property
+ def frontend_node_class(self) -> Type[ChainFrontendNode]:
+ return ChainFrontendNode
+
+ @property
+ def type_to_loader_dict(self) -> Dict:
+ if self.type_dict is None:
+ self.type_dict = chain_type_to_cls_dict
+ from langflow.interface.chains.custom import CUSTOM_CHAINS
+
+ self.type_dict.update(CUSTOM_CHAINS)
+ # Filter according to settings.chains
+ self.type_dict = {
+ name: chain
+ for name, chain in self.type_dict.items()
+ if name in settings.chains or settings.dev
+ }
+ return self.type_dict
+
+ def get_signature(self, name: str) -> Optional[Dict]:
+ try:
+ if name in get_custom_nodes(self.type_name).keys():
+ return get_custom_nodes(self.type_name)[name]
+ return build_template_from_class(name, self.type_to_loader_dict)
+ except ValueError as exc:
+ raise ValueError("Chain not found") from exc
+ except AttributeError as exc:
+ logger.error(f"Chain {name} not loaded: {exc}")
+ return None
+
+ def to_list(self) -> List[str]:
+ custom_chains = list(get_custom_nodes("chains").keys())
+ default_chains = list(self.type_to_loader_dict.keys())
+
+ return default_chains + custom_chains
+
+
+chain_creator = ChainCreator()
diff --git a/src/backend/langflow/interface/chains/custom.py b/src/backend/langflow/interface/chains/custom.py
new file mode 100644
index 000000000..cb76a53c8
--- /dev/null
+++ b/src/backend/langflow/interface/chains/custom.py
@@ -0,0 +1,101 @@
+from typing import Dict, Optional, Type
+
+from langchain.chains import ConversationChain
+from langchain.memory.buffer import ConversationBufferMemory
+from langchain.schema import BaseMemory
+from pydantic import Field, root_validator
+
+from langflow.graph.utils import extract_input_variables_from_prompt
+
+DEFAULT_SUFFIX = """"
+Current conversation:
+{history}
+Human: {input}
+{ai_prefix}"""
+
+
+class BaseCustomChain(ConversationChain):
+ """BaseCustomChain is a chain you can use to have a conversation with a custom character."""
+
+ template: Optional[str]
+
+ ai_prefix_value: Optional[str]
+ """Field to use as the ai_prefix. It needs to be set and has to be in the template"""
+
+ @root_validator(pre=False)
+ def build_template(cls, values):
+ format_dict = {}
+ input_variables = extract_input_variables_from_prompt(values["template"])
+
+ if values.get("ai_prefix_value", None) is None:
+ values["ai_prefix_value"] = values["memory"].ai_prefix
+
+ for key in input_variables:
+ new_value = values.get(key, f"{{{key}}}")
+ format_dict[key] = new_value
+ if key == values.get("ai_prefix_value", None):
+ values["memory"].ai_prefix = new_value
+
+ values["template"] = values["template"].format(**format_dict)
+
+ values["template"] = values["template"]
+ values["input_variables"] = extract_input_variables_from_prompt(
+ values["template"]
+ )
+ values["prompt"].template = values["template"]
+ values["prompt"].input_variables = values["input_variables"]
+ return values
+
+
+class SeriesCharacterChain(BaseCustomChain):
+ """SeriesCharacterChain is a chain you can use to have a conversation with a character from a series."""
+
+ character: str
+ series: str
+ template: Optional[
+ str
+ ] = """I want you to act like {character} from {series}.
+I want you to respond and answer like {character}. do not write any explanations. only answer like {character}.
+You must know all of the knowledge of {character}.
+Current conversation:
+{history}
+Human: {input}
+{character}:"""
+ memory: BaseMemory = Field(default_factory=ConversationBufferMemory)
+ ai_prefix_value: Optional[str] = "character"
+ """Default memory store."""
+
+
+class MidJourneyPromptChain(BaseCustomChain):
+ """MidJourneyPromptChain is a chain you can use to generate new MidJourney prompts."""
+
+ template: Optional[
+ str
+ ] = """I want you to act as a prompt generator for Midjourney's artificial intelligence program.
+ Your job is to provide detailed and creative descriptions that will inspire unique and interesting images from the AI.
+ Keep in mind that the AI is capable of understanding a wide range of language and can interpret abstract concepts, so feel free to be as imaginative and descriptive as possible.
+ For example, you could describe a scene from a futuristic city, or a surreal landscape filled with strange creatures.
+ The more detailed and imaginative your description, the more interesting the resulting image will be. Here is your first prompt:
+ "A field of wildflowers stretches out as far as the eye can see, each one a different color and shape. In the distance, a massive tree towers over the landscape, its branches reaching up to the sky like tentacles.\"
+
+ Current conversation:
+ {history}
+ Human: {input}
+ AI:""" # noqa: E501
+
+
+class TimeTravelGuideChain(BaseCustomChain):
+ template: Optional[
+ str
+ ] = """I want you to act as my time travel guide. You are helpful and creative. I will provide you with the historical period or future time I want to visit and you will suggest the best events, sights, or people to experience. Provide the suggestions and any necessary information.
+ Current conversation:
+ {history}
+ Human: {input}
+ AI:""" # noqa: E501
+
+
+CUSTOM_CHAINS: Dict[str, Type[ConversationChain]] = {
+ "SeriesCharacterChain": SeriesCharacterChain,
+ "MidJourneyPromptChain": MidJourneyPromptChain,
+ "TimeTravelGuideChain": TimeTravelGuideChain,
+}
diff --git a/src/backend/langflow/interface/custom_lists.py b/src/backend/langflow/interface/custom_lists.py
index 3e8a800f9..f07b03f04 100644
--- a/src/backend/langflow/interface/custom_lists.py
+++ b/src/backend/langflow/interface/custom_lists.py
@@ -1,43 +1,86 @@
-## LLM
+import inspect
from typing import Any
-from langchain import llms
-from langchain.llms.openai import OpenAIChat
+from langchain import (
+ chains,
+ document_loaders,
+ embeddings,
+ llms,
+ memory,
+ requests,
+ text_splitter,
+ utilities,
+ vectorstores,
+)
+from langchain.agents import agent_toolkits
+from langchain.chat_models import ChatOpenAI
+from langchain.sql_database import SQLDatabase
+from langflow.interface.importing.utils import import_class
+
+## LLMs
llm_type_to_cls_dict = llms.type_to_cls_dict
-llm_type_to_cls_dict["openai-chat"] = OpenAIChat
+llm_type_to_cls_dict["openai-chat"] = ChatOpenAI # type: ignore
-
-## Memory
-
-# from langchain.memory.buffer_window import ConversationBufferWindowMemory
-# from langchain.memory.chat_memory import ChatMessageHistory
-# from langchain.memory.combined import CombinedMemory
-# from langchain.memory.entity import ConversationEntityMemory
-# from langchain.memory.kg import ConversationKGMemory
-# from langchain.memory.readonly import ReadOnlySharedMemory
-# from langchain.memory.simple import SimpleMemory
-# from langchain.memory.summary import ConversationSummaryMemory
-# from langchain.memory.summary_buffer import ConversationSummaryBufferMemory
-
-memory_type_to_cls_dict: dict[str, Any] = {
- # "CombinedMemory": CombinedMemory,
- # "ConversationBufferWindowMemory": ConversationBufferWindowMemory,
- # "ConversationBufferMemory": ConversationBufferMemory,
- # "SimpleMemory": SimpleMemory,
- # "ConversationSummaryBufferMemory": ConversationSummaryBufferMemory,
- # "ConversationKGMemory": ConversationKGMemory,
- # "ConversationEntityMemory": ConversationEntityMemory,
- # "ConversationSummaryMemory": ConversationSummaryMemory,
- # "ChatMessageHistory": ChatMessageHistory,
- # "ConversationStringBufferMemory": ConversationStringBufferMemory,
- # "ReadOnlySharedMemory": ReadOnlySharedMemory,
+## Chains
+chain_type_to_cls_dict: dict[str, Any] = {
+ chain_name: import_class(f"langchain.chains.{chain_name}")
+ for chain_name in chains.__all__
}
+## Toolkits
+toolkit_type_to_loader_dict: dict[str, Any] = {
+ toolkit_name: import_class(f"langchain.agents.agent_toolkits.{toolkit_name}")
+ # if toolkit_name is lower case it is a loader
+ for toolkit_name in agent_toolkits.__all__
+ if toolkit_name.islower()
+}
-## Chain
-# from langchain.chains.loading import type_to_loader_dict
-# from langchain.chains.conversation.base import ConversationChain
+toolkit_type_to_cls_dict: dict[str, Any] = {
+ toolkit_name: import_class(f"langchain.agents.agent_toolkits.{toolkit_name}")
+ # if toolkit_name is not lower case it is a class
+ for toolkit_name in agent_toolkits.__all__
+ if not toolkit_name.islower()
+}
-# chain_type_to_cls_dict = type_to_loader_dict
-# chain_type_to_cls_dict["conversation_chain"] = ConversationChain
+## Memories
+memory_type_to_cls_dict: dict[str, Any] = {
+ memory_name: import_class(f"langchain.memory.{memory_name}")
+ for memory_name in memory.__all__
+}
+
+## Wrappers
+wrapper_type_to_cls_dict: dict[str, Any] = {
+ wrapper.__name__: wrapper for wrapper in [requests.RequestsWrapper]
+}
+
+## Embeddings
+embedding_type_to_cls_dict: dict[str, Any] = {
+ embedding_name: import_class(f"langchain.embeddings.{embedding_name}")
+ for embedding_name in embeddings.__all__
+}
+
+## Vector Stores
+vectorstores_type_to_cls_dict: dict[str, Any] = {
+ vectorstore_name: import_class(f"langchain.vectorstores.{vectorstore_name}")
+ for vectorstore_name in vectorstores.__all__
+}
+
+## Document Loaders
+documentloaders_type_to_cls_dict: dict[str, Any] = {
+ documentloader_name: import_class(
+ f"langchain.document_loaders.{documentloader_name}"
+ )
+ for documentloader_name in document_loaders.__all__
+}
+
+## Text Splitters
+textsplitter_type_to_cls_dict: dict[str, Any] = dict(
+ inspect.getmembers(text_splitter, inspect.isclass)
+)
+
+## Utilities
+utility_type_to_cls_dict: dict[str, Any] = dict(
+ inspect.getmembers(utilities, inspect.isclass)
+)
+utility_type_to_cls_dict["SQLDatabase"] = SQLDatabase
diff --git a/src/backend/langflow/interface/document_loaders/__init__.py b/src/backend/langflow/interface/document_loaders/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/backend/langflow/interface/document_loaders/base.py b/src/backend/langflow/interface/document_loaders/base.py
new file mode 100644
index 000000000..aab017c0f
--- /dev/null
+++ b/src/backend/langflow/interface/document_loaders/base.py
@@ -0,0 +1,146 @@
+from typing import Dict, List, Optional
+
+from langflow.interface.base import LangChainTypeCreator
+from langflow.interface.custom_lists import documentloaders_type_to_cls_dict
+from langflow.settings import settings
+from langflow.utils.logger import logger
+from langflow.utils.util import build_template_from_class
+
+
+def build_file_path_template(
+ suffixes: list, fileTypes: list, name: str = "file_path"
+) -> Dict:
+ """Build a file path template for a document loader."""
+ return {
+ "type": "file",
+ "required": True,
+ "show": True,
+ "name": name,
+ "value": "",
+ "suffixes": suffixes,
+ "fileTypes": fileTypes,
+ }
+
+
+class DocumentLoaderCreator(LangChainTypeCreator):
+ type_name: str = "documentloaders"
+
+ @property
+ def type_to_loader_dict(self) -> Dict:
+ return documentloaders_type_to_cls_dict
+
+ def get_signature(self, name: str) -> Optional[Dict]:
+ """Get the signature of a document loader."""
+ try:
+ signature = build_template_from_class(
+ name, documentloaders_type_to_cls_dict
+ )
+
+ file_path_templates = {
+ "AirbyteJSONLoader": build_file_path_template(
+ suffixes=[".json"], fileTypes=["json"]
+ ),
+ "CoNLLULoader": build_file_path_template(
+ suffixes=[".csv"], fileTypes=["csv"]
+ ),
+ "CSVLoader": build_file_path_template(
+ suffixes=[".csv"], fileTypes=["csv"]
+ ),
+ "UnstructuredEmailLoader": build_file_path_template(
+ suffixes=[".eml"], fileTypes=["eml"]
+ ),
+ "EverNoteLoader": build_file_path_template(
+ suffixes=[".xml"], fileTypes=["xml"]
+ ),
+ "FacebookChatLoader": build_file_path_template(
+ suffixes=[".json"], fileTypes=["json"]
+ ),
+ "GutenbergLoader": build_file_path_template(
+ suffixes=[".txt"], fileTypes=["txt"]
+ ),
+ "BSHTMLLoader": build_file_path_template(
+ suffixes=[".html"], fileTypes=["html"]
+ ),
+ "UnstructuredHTMLLoader": build_file_path_template(
+ suffixes=[".html"], fileTypes=["html"]
+ ),
+ "UnstructuredImageLoader": build_file_path_template(
+ suffixes=[".jpg", ".jpeg", ".png", ".gif", ".bmp"],
+ fileTypes=["jpg", "jpeg", "png", "gif", "bmp"],
+ ),
+ "UnstructuredMarkdownLoader": build_file_path_template(
+ suffixes=[".md"], fileTypes=["md"]
+ ),
+ "PyPDFLoader": build_file_path_template(
+ suffixes=[".pdf"], fileTypes=["pdf"]
+ ),
+ "UnstructuredPowerPointLoader": build_file_path_template(
+ suffixes=[".pptx", ".ppt"], fileTypes=["pptx", "ppt"]
+ ),
+ "SRTLoader": build_file_path_template(
+ suffixes=[".srt"], fileTypes=["srt"]
+ ),
+ "TelegramChatLoader": build_file_path_template(
+ suffixes=[".json"], fileTypes=["json"]
+ ),
+ "TextLoader": build_file_path_template(
+ suffixes=[".txt"], fileTypes=["txt"]
+ ),
+ "UnstructuredWordDocumentLoader": build_file_path_template(
+ suffixes=[".docx", ".doc"], fileTypes=["docx", "doc"]
+ ),
+ }
+
+ if name in file_path_templates:
+ signature["template"]["file_path"] = file_path_templates[name]
+ elif name in {
+ "WebBaseLoader",
+ "AZLyricsLoader",
+ "CollegeConfidentialLoader",
+ "HNLoader",
+ "IFixitLoader",
+ "IMSDbLoader",
+ }:
+ signature["template"]["web_path"] = {
+ "type": "str",
+ "required": True,
+ "show": True,
+ "name": "web_path",
+ "value": "",
+ "display_name": "Web Page",
+ }
+ elif name in {"GitbookLoader"}:
+ signature["template"]["web_page"] = {
+ "type": "str",
+ "required": True,
+ "show": True,
+ "name": "web_page",
+ "value": "",
+ "display_name": "Web Page",
+ }
+ elif name in {"ReadTheDocsLoader"}:
+ signature["template"]["path"] = {
+ "type": "str",
+ "required": True,
+ "show": True,
+ "name": "path",
+ "value": "",
+ "display_name": "Web Page",
+ }
+
+ return signature
+ except ValueError as exc:
+ raise ValueError(f"Documment Loader {name} not found") from exc
+ except AttributeError as exc:
+ logger.error(f"Documment Loader {name} not loaded: {exc}")
+ return None
+
+ def to_list(self) -> List[str]:
+ return [
+ documentloader.__name__
+ for documentloader in self.type_to_loader_dict.values()
+ if documentloader.__name__ in settings.documentloaders or settings.dev
+ ]
+
+
+documentloader_creator = DocumentLoaderCreator()
diff --git a/src/backend/langflow/interface/embeddings/__init__.py b/src/backend/langflow/interface/embeddings/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/backend/langflow/interface/embeddings/base.py b/src/backend/langflow/interface/embeddings/base.py
new file mode 100644
index 000000000..061b1d3b5
--- /dev/null
+++ b/src/backend/langflow/interface/embeddings/base.py
@@ -0,0 +1,36 @@
+from typing import Dict, List, Optional
+
+from langflow.interface.base import LangChainTypeCreator
+from langflow.interface.custom_lists import embedding_type_to_cls_dict
+from langflow.settings import settings
+from langflow.utils.logger import logger
+from langflow.utils.util import build_template_from_class
+
+
+class EmbeddingCreator(LangChainTypeCreator):
+ type_name: str = "embeddings"
+
+ @property
+ def type_to_loader_dict(self) -> Dict:
+ return embedding_type_to_cls_dict
+
+ def get_signature(self, name: str) -> Optional[Dict]:
+ """Get the signature of an embedding."""
+ try:
+ return build_template_from_class(name, embedding_type_to_cls_dict)
+ except ValueError as exc:
+ raise ValueError(f"Embedding {name} not found") from exc
+
+ except AttributeError as exc:
+ logger.error(f"Embedding {name} not loaded: {exc}")
+ return None
+
+ def to_list(self) -> List[str]:
+ return [
+ embedding.__name__
+ for embedding in self.type_to_loader_dict.values()
+ if embedding.__name__ in settings.embeddings or settings.dev
+ ]
+
+
+embedding_creator = EmbeddingCreator()
diff --git a/src/backend/langflow/interface/importing/__init__.py b/src/backend/langflow/interface/importing/__init__.py
new file mode 100644
index 000000000..317849f8e
--- /dev/null
+++ b/src/backend/langflow/interface/importing/__init__.py
@@ -0,0 +1,7 @@
+from langflow.interface.importing.utils import import_by_type # noqa: F401
+
+# This module is used to import any langchain class by name.
+
+ALL = [
+ "import_by_type",
+]
diff --git a/src/backend/langflow/interface/importing/utils.py b/src/backend/langflow/interface/importing/utils.py
new file mode 100644
index 000000000..e303da0eb
--- /dev/null
+++ b/src/backend/langflow/interface/importing/utils.py
@@ -0,0 +1,147 @@
+# This module is used to import any langchain class by name.
+
+import importlib
+from typing import Any, Type
+
+from langchain import PromptTemplate
+from langchain.agents import Agent
+from langchain.chains.base import Chain
+from langchain.chat_models.base import BaseChatModel
+from langchain.llms.base import BaseLLM
+from langchain.tools import BaseTool
+
+from langflow.interface.tools.base import tool_creator
+
+
+def import_module(module_path: str) -> Any:
+ """Import module from module path"""
+ if "from" not in module_path:
+ # Import the module using the module path
+ return importlib.import_module(module_path)
+ # Split the module path into its components
+ _, module_path, _, object_name = module_path.split()
+
+ # Import the module using the module path
+ module = importlib.import_module(module_path)
+
+ return getattr(module, object_name)
+
+
+def import_by_type(_type: str, name: str) -> Any:
+ """Import class by type and name"""
+ if _type is None:
+ raise ValueError(f"Type cannot be None. Check if {name} is in the config file.")
+ func_dict = {
+ "agents": import_agent,
+ "prompts": import_prompt,
+ "llms": {"llm": import_llm, "chat": import_chat_llm},
+ "tools": import_tool,
+ "chains": import_chain,
+ "toolkits": import_toolkit,
+ "wrappers": import_wrapper,
+ "memory": import_memory,
+ "embeddings": import_embedding,
+ "vectorstores": import_vectorstore,
+ "documentloaders": import_documentloader,
+ "textsplitters": import_textsplitter,
+ "utilities": import_utility,
+ }
+ if _type == "llms":
+ key = "chat" if "chat" in name.lower() else "llm"
+ loaded_func = func_dict[_type][key] # type: ignore
+ else:
+ loaded_func = func_dict[_type]
+
+ return loaded_func(name)
+
+
+def import_chat_llm(llm: str) -> BaseChatModel:
+ """Import chat llm from llm name"""
+ return import_class(f"langchain.chat_models.{llm}")
+
+
+def import_memory(memory: str) -> Any:
+ """Import memory from memory name"""
+ return import_module(f"from langchain.memory import {memory}")
+
+
+def import_class(class_path: str) -> Any:
+ """Import class from class path"""
+ module_path, class_name = class_path.rsplit(".", 1)
+ module = import_module(module_path)
+ return getattr(module, class_name)
+
+
+def import_prompt(prompt: str) -> Type[PromptTemplate]:
+ from langflow.interface.prompts.custom import CUSTOM_PROMPTS
+
+ """Import prompt from prompt name"""
+ if prompt == "ZeroShotPrompt":
+ return import_class("langchain.prompts.PromptTemplate")
+ elif prompt in CUSTOM_PROMPTS:
+ return CUSTOM_PROMPTS[prompt]
+ return import_class(f"langchain.prompts.{prompt}")
+
+
+def import_wrapper(wrapper: str) -> Any:
+ """Import wrapper from wrapper name"""
+ return import_module(f"from langchain.requests import {wrapper}")
+
+
+def import_toolkit(toolkit: str) -> Any:
+ """Import toolkit from toolkit name"""
+ return import_module(f"from langchain.agents.agent_toolkits import {toolkit}")
+
+
+def import_agent(agent: str) -> Agent:
+ """Import agent from agent name"""
+ # check for custom agent
+
+ return import_class(f"langchain.agents.{agent}")
+
+
+def import_llm(llm: str) -> BaseLLM:
+ """Import llm from llm name"""
+ return import_class(f"langchain.llms.{llm}")
+
+
+def import_tool(tool: str) -> BaseTool:
+ """Import tool from tool name"""
+
+ return tool_creator.type_to_loader_dict[tool]["fcn"]
+
+
+def import_chain(chain: str) -> Type[Chain]:
+ """Import chain from chain name"""
+ from langflow.interface.chains.custom import CUSTOM_CHAINS
+
+ if chain in CUSTOM_CHAINS:
+ return CUSTOM_CHAINS[chain]
+ return import_class(f"langchain.chains.{chain}")
+
+
+def import_embedding(embedding: str) -> Any:
+ """Import embedding from embedding name"""
+ return import_class(f"langchain.embeddings.{embedding}")
+
+
+def import_vectorstore(vectorstore: str) -> Any:
+ """Import vectorstore from vectorstore name"""
+ return import_class(f"langchain.vectorstores.{vectorstore}")
+
+
+def import_documentloader(documentloader: str) -> Any:
+ """Import documentloader from documentloader name"""
+ return import_class(f"langchain.document_loaders.{documentloader}")
+
+
+def import_textsplitter(textsplitter: str) -> Any:
+ """Import textsplitter from textsplitter name"""
+ return import_class(f"langchain.text_splitter.{textsplitter}")
+
+
+def import_utility(utility: str) -> Any:
+ """Import utility from utility name"""
+ if utility == "SQLDatabase":
+ return import_class(f"langchain.sql_database.{utility}")
+ return import_class(f"langchain.utilities.{utility}")
diff --git a/src/backend/langflow/interface/listing.py b/src/backend/langflow/interface/listing.py
index 21e763de4..3d73105c2 100644
--- a/src/backend/langflow/interface/listing.py
+++ b/src/backend/langflow/interface/listing.py
@@ -1,82 +1,41 @@
-from langchain import agents, chains, prompts
-from langchain.agents.load_tools import get_all_tool_names
-
-from langflow.custom import customs
-from langflow.interface.custom_lists import (
- llm_type_to_cls_dict,
- memory_type_to_cls_dict,
-)
-from langflow.settings import settings
-from langflow.utils import util
+from langflow.interface.agents.base import agent_creator
+from langflow.interface.chains.base import chain_creator
+from langflow.interface.document_loaders.base import documentloader_creator
+from langflow.interface.embeddings.base import embedding_creator
+from langflow.interface.llms.base import llm_creator
+from langflow.interface.memories.base import memory_creator
+from langflow.interface.prompts.base import prompt_creator
+from langflow.interface.text_splitters.base import textsplitter_creator
+from langflow.interface.toolkits.base import toolkits_creator
+from langflow.interface.tools.base import tool_creator
+from langflow.interface.utilities.base import utility_creator
+from langflow.interface.vector_store.base import vectorstore_creator
+from langflow.interface.wrappers.base import wrapper_creator
-def list_type(object_type: str):
- """List all components"""
+def get_type_dict():
return {
- "chains": list_chain_types,
- "agents": list_agents,
- "prompts": list_prompts,
- "llms": list_llms,
- "memories": list_memories,
- "tools": list_tools,
- }.get(object_type, lambda: "Invalid type")()
+ "agents": agent_creator.to_list(),
+ "prompts": prompt_creator.to_list(),
+ "llms": llm_creator.to_list(),
+ "tools": tool_creator.to_list(),
+ "chains": chain_creator.to_list(),
+ "memory": memory_creator.to_list(),
+ "toolkits": toolkits_creator.to_list(),
+ "wrappers": wrapper_creator.to_list(),
+ "documentLoaders": documentloader_creator.to_list(),
+ "vectorStore": vectorstore_creator.to_list(),
+ "embeddings": embedding_creator.to_list(),
+ "textSplitters": textsplitter_creator.to_list(),
+ "utilities": utility_creator.to_list(),
+ }
-def list_agents():
- """List all agent types"""
- return [
- agent.__name__
- for agent in agents.loading.AGENT_TO_CLASS.values()
- if agent.__name__ in settings.agents or settings.dev
- ]
+LANGCHAIN_TYPES_DICT = get_type_dict()
+# Now we'll build a dict with Langchain types and ours
-def list_prompts():
- """List all prompt types"""
- custom_prompts = customs.get_custom_prompts()
- library_prompts = [
- prompt.__annotations__["return"].__name__
- for prompt in prompts.loading.type_to_loader_dict.values()
- if prompt.__annotations__["return"].__name__ in settings.prompts or settings.dev
- ]
- return library_prompts + list(custom_prompts.keys())
-
-
-def list_tools():
- """List all load tools"""
-
- tools = []
-
- for tool in get_all_tool_names():
- tool_params = util.get_tool_params(util.get_tools_dict(tool))
- if tool_params and tool_params["name"] in settings.tools or settings.dev:
- tools.append(tool_params["name"])
-
- return tools
-
-
-def list_llms():
- """List all llm types"""
- return [
- llm.__name__
- for llm in llm_type_to_cls_dict.values()
- if llm.__name__ in settings.llms or settings.dev
- ]
-
-
-def list_chain_types():
- """List all chain types"""
- return [
- chain.__annotations__["return"].__name__
- for chain in chains.loading.type_to_loader_dict.values()
- if chain.__annotations__["return"].__name__ in settings.chains or settings.dev
- ]
-
-
-def list_memories():
- """List all memory types"""
- return [
- memory.__name__
- for memory in memory_type_to_cls_dict.values()
- if memory.__name__ in settings.memories or settings.dev
- ]
+ALL_TYPES_DICT = {
+ **LANGCHAIN_TYPES_DICT,
+ "Custom": ["Custom Tool", "Python Function"],
+}
diff --git a/src/backend/langflow/interface/llms/__init__.py b/src/backend/langflow/interface/llms/__init__.py
new file mode 100644
index 000000000..c5d7186fb
--- /dev/null
+++ b/src/backend/langflow/interface/llms/__init__.py
@@ -0,0 +1,3 @@
+from langflow.interface.llms.base import LLMCreator
+
+__all__ = ["LLMCreator"]
diff --git a/src/backend/langflow/interface/llms/base.py b/src/backend/langflow/interface/llms/base.py
new file mode 100644
index 000000000..04a36eb2d
--- /dev/null
+++ b/src/backend/langflow/interface/llms/base.py
@@ -0,0 +1,43 @@
+from typing import Dict, List, Optional, Type
+
+from langflow.interface.base import LangChainTypeCreator
+from langflow.interface.custom_lists import llm_type_to_cls_dict
+from langflow.settings import settings
+from langflow.template.nodes import LLMFrontendNode
+from langflow.utils.logger import logger
+from langflow.utils.util import build_template_from_class
+
+
+class LLMCreator(LangChainTypeCreator):
+ type_name: str = "llms"
+
+ @property
+ def frontend_node_class(self) -> Type[LLMFrontendNode]:
+ return LLMFrontendNode
+
+ @property
+ def type_to_loader_dict(self) -> Dict:
+ if self.type_dict is None:
+ self.type_dict = llm_type_to_cls_dict
+ return self.type_dict
+
+ def get_signature(self, name: str) -> Optional[Dict]:
+ """Get the signature of an llm."""
+ try:
+ return build_template_from_class(name, llm_type_to_cls_dict)
+ except ValueError as exc:
+ raise ValueError("LLM not found") from exc
+
+ except AttributeError as exc:
+ logger.error(f"LLM {name} not loaded: {exc}")
+ return None
+
+ def to_list(self) -> List[str]:
+ return [
+ llm.__name__
+ for llm in self.type_to_loader_dict.values()
+ if llm.__name__ in settings.llms or settings.dev
+ ]
+
+
+llm_creator = LLMCreator()
diff --git a/src/backend/langflow/interface/loading.py b/src/backend/langflow/interface/loading.py
index 619264988..4cf702ea2 100644
--- a/src/backend/langflow/interface/loading.py
+++ b/src/backend/langflow/interface/loading.py
@@ -1,7 +1,10 @@
import json
-from typing import Any, Dict, Optional
+from typing import Any, Callable, Dict, Optional
+from langchain.agents import ZeroShotAgent
+from langchain.agents import agent as agent_module
from langchain.agents.agent import AgentExecutor
+from langchain.agents.agent_toolkits.base import BaseToolkit
from langchain.agents.load_tools import (
_BASE_TOOLS,
_EXTRA_LLM_TOOLS,
@@ -15,29 +18,95 @@ from langchain.chains.loading import load_chain_from_config
from langchain.llms.base import BaseLLM
from langchain.llms.loading import load_llm_from_config
+from langflow.interface.agents.custom import CUSTOM_AGENTS
+from langflow.interface.importing.utils import import_by_type
+from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.types import get_type_list
-from langflow.utils import payload, util
+from langflow.interface.utils import load_file_into_dict
+from langflow.utils import util, validate
-def load_flow_from_json(path: str):
+def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any:
+ """Instantiate class from module type and key, and params"""
+ if node_type in CUSTOM_AGENTS:
+ if custom_agent := CUSTOM_AGENTS.get(node_type):
+ return custom_agent.initialize(**params) # type: ignore
+
+ class_object = import_by_type(_type=base_type, name=node_type)
+
+ if base_type == "agents":
+ # We need to initialize it differently
+ return load_agent_executor(class_object, params)
+ elif base_type == "prompts":
+ if node_type == "ZeroShotPrompt":
+ if "tools" not in params:
+ params["tools"] = []
+ return ZeroShotAgent.create_prompt(**params)
+ elif base_type == "tools":
+ if node_type == "JsonSpec":
+ params["dict_"] = load_file_into_dict(params.pop("path"))
+ return class_object(**params)
+ elif node_type == "PythonFunction":
+ # If the node_type is "PythonFunction"
+ # we need to get the function from the params
+ # which will be a str containing a python function
+ # and then we need to compile it and return the function
+ # as the instance
+ function_string = params["code"]
+ if isinstance(function_string, str):
+ return validate.eval_function(function_string)
+ raise ValueError("Function should be a string")
+ elif node_type.lower() == "tool":
+ return class_object(**params)
+ elif base_type == "toolkits":
+ loaded_toolkit = class_object(**params)
+ # Check if node_type has a loader
+ if toolkits_creator.has_create_function(node_type):
+ return load_toolkits_executor(node_type, loaded_toolkit, params)
+ return loaded_toolkit
+ elif base_type == "embeddings":
+ params.pop("model")
+ return class_object(**params)
+ elif base_type == "vectorstores":
+ if len(params.get("documents", [])) == 0:
+ # Error when the pdf or other source was not correctly
+ # loaded.
+ raise ValueError(
+ "The source you provided did not load correctly or was empty."
+ "This may cause an error in the vectorstore."
+ )
+ return class_object.from_documents(**params)
+ elif base_type == "documentloaders":
+ return class_object(**params).load()
+ elif base_type == "textsplitters":
+ documents = params.pop("documents")
+ text_splitter = class_object(**params)
+ return text_splitter.split_documents(documents)
+ elif base_type == "utilities":
+ if node_type == "SQLDatabase":
+ return class_object.from_uri(params.pop("uri"))
+
+ return class_object(**params)
+
+
+def load_flow_from_json(path: str, build=True):
+ # This is done to avoid circular imports
+ from langflow.graph import Graph
+
"""Load flow from json file"""
- with open(path, "r") as f:
+ with open(path, "r", encoding="utf-8") as f:
flow_graph = json.load(f)
data_graph = flow_graph["data"]
- extracted_json = extract_json(data_graph)
- return load_langchain_type_from_config(config=extracted_json)
-
-
-def extract_json(data_graph):
nodes = data_graph["nodes"]
# Substitute ZeroShotPrompt with PromptTemplate
- nodes = replace_zero_shot_prompt_with_prompt_template(nodes)
+ # nodes = replace_zero_shot_prompt_with_prompt_template(nodes)
# Add input variables
- nodes = payload.extract_input_variables(nodes)
+ # nodes = payload.extract_input_variables(nodes)
+
# Nodes, edges and root node
edges = data_graph["edges"]
- root = payload.get_root_node(nodes, edges)
- return payload.build_json(root, nodes, edges)
+ graph = Graph(nodes, edges)
+ return graph.build() if build else graph
def replace_zero_shot_prompt_with_prompt_template(nodes):
@@ -92,6 +161,25 @@ def load_agent_executor_from_config(
)
+def load_agent_executor(agent_class: type[agent_module.Agent], params, **kwargs):
+ """Load agent executor from agent class, tools and chain"""
+ allowed_tools = params["allowed_tools"]
+ llm_chain = params["llm_chain"]
+ tool_names = [tool.name for tool in allowed_tools]
+ agent = agent_class(allowed_tools=tool_names, llm_chain=llm_chain)
+ return AgentExecutor.from_agent_and_tools(
+ agent=agent,
+ tools=allowed_tools,
+ **kwargs,
+ )
+
+
+def load_toolkits_executor(node_type: str, toolkit: BaseToolkit, params: dict):
+ create_function: Callable = toolkits_creator.get_create_function(node_type)
+ if llm := params.get("llm"):
+ return create_function(llm=llm, toolkit=toolkit)
+
+
def load_tools_from_config(tool_list: list[dict]) -> list:
"""Load tools based on a config list.
diff --git a/src/backend/langflow/interface/memories/__init__.py b/src/backend/langflow/interface/memories/__init__.py
new file mode 100644
index 000000000..845eb29fe
--- /dev/null
+++ b/src/backend/langflow/interface/memories/__init__.py
@@ -0,0 +1,3 @@
+from langflow.interface.memories.base import MemoryCreator
+
+__all__ = ["MemoryCreator"]
diff --git a/src/backend/langflow/interface/memories/base.py b/src/backend/langflow/interface/memories/base.py
new file mode 100644
index 000000000..f26b09351
--- /dev/null
+++ b/src/backend/langflow/interface/memories/base.py
@@ -0,0 +1,44 @@
+from typing import Dict, List, Optional, Type
+
+from langflow.interface.base import LangChainTypeCreator
+from langflow.interface.custom_lists import memory_type_to_cls_dict
+from langflow.settings import settings
+from langflow.template.base import FrontendNode
+from langflow.template.nodes import MemoryFrontendNode
+from langflow.utils.logger import logger
+from langflow.utils.util import build_template_from_class
+
+
+class MemoryCreator(LangChainTypeCreator):
+ type_name: str = "memories"
+
+ @property
+ def frontend_node_class(self) -> Type[FrontendNode]:
+ """The class type of the FrontendNode created in frontend_node."""
+ return MemoryFrontendNode
+
+ @property
+ def type_to_loader_dict(self) -> Dict:
+ if self.type_dict is None:
+ self.type_dict = memory_type_to_cls_dict
+ return self.type_dict
+
+ def get_signature(self, name: str) -> Optional[Dict]:
+ """Get the signature of a memory."""
+ try:
+ return build_template_from_class(name, memory_type_to_cls_dict)
+ except ValueError as exc:
+ raise ValueError("Memory not found") from exc
+ except AttributeError as exc:
+ logger.error(f"Memory {name} not loaded: {exc}")
+ return None
+
+ def to_list(self) -> List[str]:
+ return [
+ memory.__name__
+ for memory in self.type_to_loader_dict.values()
+ if memory.__name__ in settings.memories or settings.dev
+ ]
+
+
+memory_creator = MemoryCreator()
diff --git a/src/backend/langflow/interface/prompts/__init__.py b/src/backend/langflow/interface/prompts/__init__.py
new file mode 100644
index 000000000..2a81e8bf0
--- /dev/null
+++ b/src/backend/langflow/interface/prompts/__init__.py
@@ -0,0 +1,3 @@
+from langflow.interface.prompts.base import PromptCreator
+
+__all__ = ["PromptCreator"]
diff --git a/src/backend/langflow/interface/prompts/base.py b/src/backend/langflow/interface/prompts/base.py
new file mode 100644
index 000000000..5f83a5412
--- /dev/null
+++ b/src/backend/langflow/interface/prompts/base.py
@@ -0,0 +1,64 @@
+from typing import Dict, List, Optional, Type
+
+from langchain import prompts
+
+from langflow.custom.customs import get_custom_nodes
+from langflow.interface.base import LangChainTypeCreator
+from langflow.interface.importing.utils import import_class
+from langflow.settings import settings
+from langflow.template.nodes import PromptFrontendNode
+from langflow.utils.logger import logger
+from langflow.utils.util import build_template_from_class
+
+
+class PromptCreator(LangChainTypeCreator):
+ type_name: str = "prompts"
+
+ @property
+ def frontend_node_class(self) -> Type[PromptFrontendNode]:
+ return PromptFrontendNode
+
+ @property
+ def type_to_loader_dict(self) -> Dict:
+ if self.type_dict is None:
+ self.type_dict = {
+ prompt_name: import_class(f"langchain.prompts.{prompt_name}")
+ # if prompt_name is not lower case it is a class
+ for prompt_name in prompts.__all__
+ }
+ # Merge CUSTOM_PROMPTS into self.type_dict
+ from langflow.interface.prompts.custom import CUSTOM_PROMPTS
+
+ self.type_dict.update(CUSTOM_PROMPTS)
+ # Now filter according to settings.prompts
+ self.type_dict = {
+ name: prompt
+ for name, prompt in self.type_dict.items()
+ if name in settings.prompts or settings.dev
+ }
+ return self.type_dict
+
+ def get_signature(self, name: str) -> Optional[Dict]:
+ try:
+ if name in get_custom_nodes(self.type_name).keys():
+ return get_custom_nodes(self.type_name)[name]
+ return build_template_from_class(name, self.type_to_loader_dict)
+ except ValueError as exc:
+ # raise ValueError("Prompt not found") from exc
+ logger.error(f"Prompt {name} not found: {exc}")
+ except AttributeError as exc:
+ logger.error(f"Prompt {name} not loaded: {exc}")
+ return None
+
+ def to_list(self) -> List[str]:
+ custom_prompts = get_custom_nodes("prompts")
+ # library_prompts = [
+ # prompt.__annotations__["return"].__name__
+ # for prompt in self.type_to_loader_dict.values()
+ # if prompt.__annotations__["return"].__name__ in settings.prompts
+ # or settings.dev
+ # ]
+ return list(self.type_to_loader_dict.keys()) + list(custom_prompts.keys())
+
+
+prompt_creator = PromptCreator()
diff --git a/src/backend/langflow/interface/prompts/custom.py b/src/backend/langflow/interface/prompts/custom.py
new file mode 100644
index 000000000..b1dbef370
--- /dev/null
+++ b/src/backend/langflow/interface/prompts/custom.py
@@ -0,0 +1,77 @@
+from typing import Dict, List, Optional, Type
+
+from langchain.prompts import PromptTemplate
+from pydantic import root_validator
+
+from langflow.graph.utils import extract_input_variables_from_prompt
+
+# Steps to create a BaseCustomPrompt:
+# 1. Create a prompt template that endes with:
+# Current conversation:
+# {history}
+# Human: {input}
+# {ai_prefix}:
+# 2. Create a class that inherits from BaseCustomPrompt
+# 3. Add the following class attributes:
+# template: str = ""
+# description: Optional[str]
+# ai_prefix: Optional[str] = "{ai_prefix}"
+# 3.1. The ai_prefix should be a value in input_variables
+# SeriesCharacterPrompt is a working example
+# If used in a LLMChain, with a Memory module, it will work as expected
+# We should consider creating ConversationalChains that expose custom parameters
+# That way it will be easier to create custom prompts
+
+
+class BaseCustomPrompt(PromptTemplate):
+ template: str = ""
+ description: Optional[str]
+ ai_prefix: Optional[str]
+
+ @root_validator(pre=False)
+ def build_template(cls, values):
+ format_dict = {}
+ ai_prefix_format_dict = {}
+ for key in values.get("input_variables", []):
+ new_value = values.get(key, f"{{{key}}}")
+ format_dict[key] = new_value
+ if key in values["ai_prefix"]:
+ ai_prefix_format_dict[key] = new_value
+
+ values["ai_prefix"] = values["ai_prefix"].format(**ai_prefix_format_dict)
+ values["template"] = values["template"].format(**format_dict)
+
+ values["template"] = values["template"]
+ values["input_variables"] = extract_input_variables_from_prompt(
+ values["template"]
+ )
+ return values
+
+
+class SeriesCharacterPrompt(BaseCustomPrompt):
+ # Add a very descriptive description for the prompt generator
+ description: Optional[
+ str
+ ] = "A prompt that asks the AI to act like a character from a series."
+ character: str
+ series: str
+ template: str = """I want you to act like {character} from {series}.
+I want you to respond and answer like {character}. do not write any explanations. only answer like {character}.
+You must know all of the knowledge of {character}.
+
+Current conversation:
+{history}
+Human: {input}
+{character}:"""
+
+ ai_prefix: str = "{character}"
+ input_variables: List[str] = ["character", "series"]
+
+
+CUSTOM_PROMPTS: Dict[str, Type[BaseCustomPrompt]] = {
+ "SeriesCharacterPrompt": SeriesCharacterPrompt
+}
+
+if __name__ == "__main__":
+ prompt = SeriesCharacterPrompt(character="Harry Potter", series="Harry Potter")
+ print(prompt.template)
diff --git a/src/backend/langflow/interface/run.py b/src/backend/langflow/interface/run.py
index 0844e2073..047ba733e 100644
--- a/src/backend/langflow/interface/run.py
+++ b/src/backend/langflow/interface/run.py
@@ -1,47 +1,286 @@
import contextlib
import io
-import re
from typing import Any, Dict
+from chromadb.errors import NotEnoughElementsException # type: ignore
+from langflow.cache.utils import compute_dict_hash, load_cache, memoize_dict
+from langflow.graph.graph import Graph
from langflow.interface import loading
+from langflow.utils.logger import logger
-def process_data_graph(data_graph: Dict[str, Any]):
+def load_langchain_object(data_graph, is_first_message=False):
"""
- Process data graph by extracting input variables and replacing ZeroShotPrompt
+ Load langchain object from cache if it exists, otherwise build it.
+ """
+ computed_hash = compute_dict_hash(data_graph)
+ if is_first_message:
+ langchain_object = build_langchain_object(data_graph)
+ else:
+ logger.debug("Loading langchain object from cache")
+ langchain_object = load_cache(computed_hash)
+
+ return computed_hash, langchain_object
+
+
+def load_or_build_langchain_object(data_graph, is_first_message=False):
+ """
+ Load langchain object from cache if it exists, otherwise build it.
+ """
+ if is_first_message:
+ build_langchain_object_with_caching.clear_cache()
+ return build_langchain_object_with_caching(data_graph)
+
+
+@memoize_dict(maxsize=1)
+def build_langchain_object_with_caching(data_graph):
+ """
+ Build langchain object from data_graph.
+ """
+
+ logger.debug("Building langchain object")
+ nodes = data_graph["nodes"]
+ # Add input variables
+ # nodes = payload.extract_input_variables(nodes)
+ # Nodes, edges and root node
+ edges = data_graph["edges"]
+ graph = Graph(nodes, edges)
+
+ return graph.build()
+
+
+def build_langchain_object(data_graph):
+ """
+ Build langchain object from data_graph.
+ """
+
+ logger.debug("Building langchain object")
+ nodes = data_graph["nodes"]
+ # Add input variables
+ # nodes = payload.extract_input_variables(nodes)
+ # Nodes, edges and root node
+ edges = data_graph["edges"]
+ graph = Graph(nodes, edges)
+
+ return graph.build()
+
+
+def process_graph(data_graph: Dict[str, Any]):
+ """
+ Process graph by extracting input variables and replacing ZeroShotPrompt
with PromptTemplate,then run the graph and return the result and thought.
"""
+ # Load langchain object
+ logger.debug("Loading langchain object")
+ message = data_graph.pop("message", "")
+ is_first_message = len(data_graph.get("chatHistory", [])) == 0
+ computed_hash, langchain_object = load_langchain_object(
+ data_graph, is_first_message
+ )
+ logger.debug("Loaded langchain object")
- extracted_json = loading.extract_json(data_graph)
+ if langchain_object is None:
+ # Raise user facing error
+ raise ValueError(
+ "There was an error loading the langchain_object. Please, check all the nodes and try again."
+ )
- message = data_graph["message"]
+ # Generate result and thought
+ logger.debug("Generating result and thought")
+ result, thought = get_result_and_thought_using_graph(langchain_object, message)
+ logger.debug("Generated result and thought")
- # Process json
- result, thought = get_result_and_thought(extracted_json, message)
+ # Save langchain_object to cache
+ # We have to save it here because if the
+ # memory is updated we need to keep the new values
+ logger.debug("Saving langchain object to cache")
+ # save_cache(computed_hash, langchain_object, is_first_message)
+ logger.debug("Saved langchain object to cache")
+ return {"result": str(result), "thought": thought.strip()}
- return {
- "result": result,
- "thought": re.sub(
- r"\x1b\[([0-9,A-Z]{1,2}(;[0-9,A-Z]{1,2})?)?[m|K]", "", thought
- ).strip(),
+
+def process_graph_cached(data_graph: Dict[str, Any]):
+ """
+ Process graph by extracting input variables and replacing ZeroShotPrompt
+ with PromptTemplate,then run the graph and return the result and thought.
+ """
+ # Load langchain object
+ message = data_graph.pop("message", "")
+ is_first_message = len(data_graph.get("chatHistory", [])) == 0
+ langchain_object = load_or_build_langchain_object(data_graph, is_first_message)
+ logger.debug("Loaded langchain object")
+
+ if langchain_object is None:
+ # Raise user facing error
+ raise ValueError(
+ "There was an error loading the langchain_object. Please, check all the nodes and try again."
+ )
+
+ # Generate result and thought
+ logger.debug("Generating result and thought")
+ result, thought = get_result_and_thought_using_graph(langchain_object, message)
+ logger.debug("Generated result and thought")
+ return {"result": str(result), "thought": thought.strip()}
+
+
+def get_memory_key(langchain_object):
+ """
+ Given a LangChain object, this function retrieves the current memory key from the object's memory attribute.
+ It then checks if the key exists in a dictionary of known memory keys and returns the corresponding key,
+ or None if the current key is not recognized.
+ """
+ mem_key_dict = {
+ "chat_history": "history",
+ "history": "chat_history",
}
+ memory_key = langchain_object.memory.memory_key
+ return mem_key_dict.get(memory_key)
+
+
+def update_memory_keys(langchain_object, possible_new_mem_key):
+ """
+ Given a LangChain object and a possible new memory key, this function updates the input and output keys in the
+ object's memory attribute to exclude the current memory key and the possible new key. It then sets the memory key
+ to the possible new key.
+ """
+ input_key = [
+ key
+ for key in langchain_object.input_keys
+ if key not in [langchain_object.memory.memory_key, possible_new_mem_key]
+ ][0]
+
+ output_key = [
+ key
+ for key in langchain_object.output_keys
+ if key not in [langchain_object.memory.memory_key, possible_new_mem_key]
+ ][0]
+
+ langchain_object.memory.input_key = input_key
+ langchain_object.memory.output_key = output_key
+ langchain_object.memory.memory_key = possible_new_mem_key
+
+
+def fix_memory_inputs(langchain_object):
+ """
+ Given a LangChain object, this function checks if it has a memory attribute and if that memory key exists in the
+ object's input variables. If so, it does nothing. Otherwise, it gets a possible new memory key using the
+ get_memory_key function and updates the memory keys using the update_memory_keys function.
+ """
+ if hasattr(langchain_object, "memory") and langchain_object.memory is not None:
+ try:
+ if langchain_object.memory.memory_key in langchain_object.input_variables:
+ return
+ except AttributeError:
+ input_variables = (
+ langchain_object.prompt.input_variables
+ if hasattr(langchain_object, "prompt")
+ else langchain_object.input_keys
+ )
+ if langchain_object.memory.memory_key in input_variables:
+ return
+
+ possible_new_mem_key = get_memory_key(langchain_object)
+ if possible_new_mem_key is not None:
+ update_memory_keys(langchain_object, possible_new_mem_key)
+
+
+def get_result_and_thought_using_graph(langchain_object, message: str):
+ """Get result and thought from extracted json"""
+ try:
+ if hasattr(langchain_object, "verbose"):
+ langchain_object.verbose = True
+ chat_input = None
+ memory_key = ""
+ if hasattr(langchain_object, "memory") and langchain_object.memory is not None:
+ memory_key = langchain_object.memory.memory_key
+
+ if hasattr(langchain_object, "input_keys"):
+ for key in langchain_object.input_keys:
+ if key not in [memory_key, "chat_history"]:
+ chat_input = {key: message}
+ else:
+ chat_input = message # type: ignore
+
+ if hasattr(langchain_object, "return_intermediate_steps"):
+ # https://github.com/hwchase17/langchain/issues/2068
+ # Deactivating until we have a frontend solution
+ # to display intermediate steps
+ langchain_object.return_intermediate_steps = False
+
+ fix_memory_inputs(langchain_object)
+
+ with io.StringIO() as output_buffer, contextlib.redirect_stdout(output_buffer):
+ try:
+ output = langchain_object(chat_input)
+ except ValueError as exc:
+ # make the error message more informative
+ logger.debug(f"Error: {str(exc)}")
+ output = langchain_object.run(chat_input)
+
+ intermediate_steps = (
+ output.get("intermediate_steps", []) if isinstance(output, dict) else []
+ )
+
+ result = (
+ output.get(langchain_object.output_keys[0])
+ if isinstance(output, dict)
+ else output
+ )
+ if intermediate_steps:
+ thought = format_intermediate_steps(intermediate_steps)
+ else:
+ thought = output_buffer.getvalue()
+
+ except NotEnoughElementsException as exc:
+ raise ValueError(
+ "Error: Not enough documents for ChromaDB to index. Try reducing chunk size in TextSplitter."
+ ) from exc
+ except Exception as exc:
+ raise ValueError(f"Error: {str(exc)}") from exc
+ return result, thought
def get_result_and_thought(extracted_json: Dict[str, Any], message: str):
"""Get result and thought from extracted json"""
try:
- loaded_langchain = loading.load_langchain_type_from_config(
+ langchain_object = loading.load_langchain_type_from_config(
config=extracted_json
)
with io.StringIO() as output_buffer, contextlib.redirect_stdout(output_buffer):
- result = loaded_langchain(message)
- result = (
- result.get(loaded_langchain.output_keys[0])
- if isinstance(result, dict)
- else result
+ output = langchain_object(message)
+ intermediate_steps = (
+ output.get("intermediate_steps", []) if isinstance(output, dict) else []
)
- thought = output_buffer.getvalue()
+ result = (
+ output.get(langchain_object.output_keys[0])
+ if isinstance(output, dict)
+ else output
+ )
+
+ if intermediate_steps:
+ thought = format_intermediate_steps(intermediate_steps)
+ else:
+ thought = output_buffer.getvalue()
+
except Exception as e:
result = f"Error: {str(e)}"
thought = ""
return result, thought
+
+
+def format_intermediate_steps(intermediate_steps):
+ formatted_chain = "> Entering new AgentExecutor chain...\n"
+ for step in intermediate_steps:
+ action = step[0]
+ observation = step[1]
+
+ formatted_chain += (
+ f" {action.log}\nAction: {action.tool}\nAction Input: {action.tool_input}\n"
+ )
+ formatted_chain += f"Observation: {observation}\n"
+
+ final_answer = f"Final Answer: {observation}\n"
+ formatted_chain += f"Thought: I now know the final answer\n{final_answer}\n"
+ formatted_chain += "> Finished chain.\n"
+
+ return formatted_chain
diff --git a/src/backend/langflow/interface/signature.py b/src/backend/langflow/interface/signature.py
deleted file mode 100644
index 69e83fad6..000000000
--- a/src/backend/langflow/interface/signature.py
+++ /dev/null
@@ -1,133 +0,0 @@
-from typing import Any, Dict # noqa: F401
-
-from langchain import agents, chains, prompts
-from langchain.agents.load_tools import (
- _BASE_TOOLS,
- _EXTRA_LLM_TOOLS,
- _EXTRA_OPTIONAL_TOOLS,
- _LLM_TOOLS,
- get_all_tool_names,
-)
-
-from langflow.custom import customs
-from langflow.interface.custom_lists import (
- llm_type_to_cls_dict,
- memory_type_to_cls_dict,
-)
-from langflow.utils import util
-
-
-def get_signature(name: str, object_type: str):
- """Get the signature of an object."""
- return {
- "chains": get_chain_signature,
- "agents": get_agent_signature,
- "prompts": get_prompt_signature,
- "llms": get_llm_signature,
- "memories": get_memory_signature,
- "tools": get_tool_signature,
- }.get(object_type, lambda name: f"Invalid type: {name}")(name)
-
-
-def get_chain_signature(name: str):
- """Get the chain type by signature."""
- try:
- return util.build_template_from_function(
- name, chains.loading.type_to_loader_dict
- )
- except ValueError as exc:
- raise ValueError("Chain not found") from exc
-
-
-def get_agent_signature(name: str):
- """Get the signature of an agent."""
- try:
- return util.build_template_from_class(name, agents.loading.AGENT_TO_CLASS)
- except ValueError as exc:
- raise ValueError("Agent not found") from exc
-
-
-def get_prompt_signature(name: str):
- """Get the signature of a prompt."""
- try:
- if name in customs.get_custom_prompts().keys():
- return customs.get_custom_prompts()[name]
- return util.build_template_from_function(
- name, prompts.loading.type_to_loader_dict
- )
- except ValueError as exc:
- raise ValueError("Prompt not found") from exc
-
-
-def get_llm_signature(name: str):
- """Get the signature of an llm."""
- try:
- return util.build_template_from_class(name, llm_type_to_cls_dict)
- except ValueError as exc:
- raise ValueError("LLM not found") from exc
-
-
-def get_memory_signature(name: str):
- """Get the signature of a memory."""
- try:
- return util.build_template_from_class(name, memory_type_to_cls_dict)
- except ValueError as exc:
- raise ValueError("Memory not found") from exc
-
-
-def get_tool_signature(name: str):
- """Get the signature of a tool."""
-
- all_tools = {}
- for tool in get_all_tool_names():
- if tool_params := util.get_tool_params(util.get_tools_dict(tool)):
- all_tools[tool_params["name"]] = tool
-
- # Raise error if name is not in tools
- if name not in all_tools.keys():
- raise ValueError("Tool not found")
-
- type_dict = {
- "str": {
- "type": "str",
- "required": True,
- "list": False,
- "show": True,
- "placeholder": "",
- "value": "",
- },
- "llm": {"type": "BaseLLM", "required": True, "list": False, "show": True},
- }
-
- tool_type = all_tools[name]
-
- if tool_type in _BASE_TOOLS:
- params = []
- elif tool_type in _LLM_TOOLS:
- params = ["llm"]
- elif tool_type in _EXTRA_LLM_TOOLS:
- _, extra_keys = _EXTRA_LLM_TOOLS[tool_type]
- params = ["llm"] + extra_keys
- elif tool_type in _EXTRA_OPTIONAL_TOOLS:
- _, extra_keys = _EXTRA_OPTIONAL_TOOLS[tool_type]
- params = extra_keys
- else:
- params = []
-
- template = {
- param: (type_dict[param].copy() if param == "llm" else type_dict["str"].copy())
- for param in params
- }
-
- # Remove required from aiosession
- if "aiosession" in template.keys():
- template["aiosession"]["required"] = False
- template["aiosession"]["show"] = False
-
- template["_type"] = tool_type # type: ignore
-
- return {
- "template": template,
- **util.get_tool_params(util.get_tools_dict(tool_type)),
- "base_classes": ["Tool"],
- }
diff --git a/src/backend/langflow/interface/text_splitters/__init__.py b/src/backend/langflow/interface/text_splitters/__init__.py
new file mode 100644
index 000000000..4bb9dd1b0
--- /dev/null
+++ b/src/backend/langflow/interface/text_splitters/__init__.py
@@ -0,0 +1,3 @@
+from langflow.interface.text_splitters.base import TextSplitterCreator
+
+__all__ = ["TextSplitterCreator"]
diff --git a/src/backend/langflow/interface/text_splitters/base.py b/src/backend/langflow/interface/text_splitters/base.py
new file mode 100644
index 000000000..e58a5bfa6
--- /dev/null
+++ b/src/backend/langflow/interface/text_splitters/base.py
@@ -0,0 +1,71 @@
+from typing import Dict, List, Optional
+
+from langflow.interface.base import LangChainTypeCreator
+from langflow.interface.custom_lists import textsplitter_type_to_cls_dict
+from langflow.settings import settings
+from langflow.utils.logger import logger
+from langflow.utils.util import build_template_from_class
+
+
+class TextSplitterCreator(LangChainTypeCreator):
+ type_name: str = "textsplitters"
+
+ @property
+ def type_to_loader_dict(self) -> Dict:
+ return textsplitter_type_to_cls_dict
+
+ def get_signature(self, name: str) -> Optional[Dict]:
+ """Get the signature of a text splitter."""
+ try:
+ signature = build_template_from_class(name, textsplitter_type_to_cls_dict)
+
+ signature["template"]["documents"] = {
+ "type": "BaseLoader",
+ "required": True,
+ "show": True,
+ "name": "documents",
+ }
+
+ signature["template"]["separator"] = {
+ "type": "str",
+ "required": True,
+ "show": True,
+ "value": ".",
+ "name": "separator",
+ "display_name": "Separator",
+ }
+
+ signature["template"]["chunk_size"] = {
+ "type": "int",
+ "required": True,
+ "show": True,
+ "value": 4000,
+ "name": "chunk_size",
+ "display_name": "Chunk Size",
+ }
+
+ signature["template"]["chunk_overlap"] = {
+ "type": "int",
+ "required": True,
+ "show": True,
+ "value": 200,
+ "name": "chunk_overlap",
+ "display_name": "Chunk Overlap",
+ }
+
+ return signature
+ except ValueError as exc:
+ raise ValueError(f"Text Splitter {name} not found") from exc
+ except AttributeError as exc:
+ logger.error(f"Text Splitter {name} not loaded: {exc}")
+ return None
+
+ def to_list(self) -> List[str]:
+ return [
+ textsplitter.__name__
+ for textsplitter in self.type_to_loader_dict.values()
+ if textsplitter.__name__ in settings.textsplitters or settings.dev
+ ]
+
+
+textsplitter_creator = TextSplitterCreator()
diff --git a/src/backend/langflow/interface/toolkits/__init__.py b/src/backend/langflow/interface/toolkits/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/backend/langflow/interface/toolkits/base.py b/src/backend/langflow/interface/toolkits/base.py
new file mode 100644
index 000000000..cbe625f0d
--- /dev/null
+++ b/src/backend/langflow/interface/toolkits/base.py
@@ -0,0 +1,69 @@
+from typing import Callable, Dict, List, Optional
+
+from langchain.agents import agent_toolkits
+
+from langflow.interface.base import LangChainTypeCreator
+from langflow.interface.importing.utils import import_class, import_module
+from langflow.settings import settings
+from langflow.utils.logger import logger
+from langflow.utils.util import build_template_from_class
+
+
+class ToolkitCreator(LangChainTypeCreator):
+ type_name: str = "toolkits"
+ all_types: List[str] = agent_toolkits.__all__
+ create_functions: Dict = {
+ "JsonToolkit": [],
+ "SQLDatabaseToolkit": [],
+ "OpenAPIToolkit": ["create_openapi_agent"],
+ "VectorStoreToolkit": [
+ "create_vectorstore_agent",
+ "create_vectorstore_router_agent",
+ "VectorStoreInfo",
+ ],
+ "ZapierToolkit": [],
+ "PandasToolkit": ["create_pandas_dataframe_agent"],
+ "CSVToolkit": ["create_csv_agent"],
+ }
+
+ @property
+ def type_to_loader_dict(self) -> Dict:
+ if self.type_dict is None:
+ self.type_dict = {
+ toolkit_name: import_class(
+ f"langchain.agents.agent_toolkits.{toolkit_name}"
+ )
+ # if toolkit_name is not lower case it is a class
+ for toolkit_name in agent_toolkits.__all__
+ if not toolkit_name.islower() and toolkit_name in settings.toolkits
+ }
+
+ return self.type_dict
+
+ def get_signature(self, name: str) -> Optional[Dict]:
+ try:
+ return build_template_from_class(name, self.type_to_loader_dict)
+ except ValueError as exc:
+ raise ValueError("Prompt not found") from exc
+ except AttributeError as exc:
+ logger.error(f"Prompt {name} not loaded: {exc}")
+ return None
+
+ def to_list(self) -> List[str]:
+ return list(self.type_to_loader_dict.keys())
+
+ def get_create_function(self, name: str) -> Callable:
+ if loader_name := self.create_functions.get(name, None):
+ # import loader
+ return import_module(
+ f"from langchain.agents.agent_toolkits import {loader_name[0]}"
+ )
+ else:
+ raise ValueError("Loader not found")
+
+ def has_create_function(self, name: str) -> bool:
+ # check if the function list is not empty
+ return bool(self.create_functions.get(name, None))
+
+
+toolkits_creator = ToolkitCreator()
diff --git a/src/backend/langflow/interface/toolkits/custom.py b/src/backend/langflow/interface/toolkits/custom.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/backend/langflow/interface/tools/__init__.py b/src/backend/langflow/interface/tools/__init__.py
new file mode 100644
index 000000000..148892e90
--- /dev/null
+++ b/src/backend/langflow/interface/tools/__init__.py
@@ -0,0 +1,3 @@
+from langflow.interface.tools.base import ToolCreator
+
+__all__ = ["ToolCreator"]
diff --git a/src/backend/langflow/interface/tools/base.py b/src/backend/langflow/interface/tools/base.py
new file mode 100644
index 000000000..e756ecbd6
--- /dev/null
+++ b/src/backend/langflow/interface/tools/base.py
@@ -0,0 +1,164 @@
+from typing import Dict, List, Optional
+
+from langchain.agents.load_tools import (
+ _EXTRA_LLM_TOOLS,
+ _EXTRA_OPTIONAL_TOOLS,
+ _LLM_TOOLS,
+)
+
+from langflow.custom import customs
+from langflow.interface.base import LangChainTypeCreator
+from langflow.interface.tools.constants import (
+ ALL_TOOLS_NAMES,
+ CUSTOM_TOOLS,
+ FILE_TOOLS,
+ OTHER_TOOLS,
+)
+from langflow.interface.tools.util import get_tool_params
+from langflow.settings import settings
+from langflow.template.base import Template, TemplateField
+from langflow.utils import util
+from langflow.utils.util import build_template_from_class
+
+TOOL_INPUTS = {
+ "str": TemplateField(
+ field_type="str",
+ required=True,
+ is_list=False,
+ show=True,
+ placeholder="",
+ value="",
+ ),
+ "llm": TemplateField(field_type="BaseLLM", required=True, is_list=False, show=True),
+ "func": TemplateField(
+ field_type="function",
+ required=True,
+ is_list=False,
+ show=True,
+ multiline=True,
+ ),
+ "code": TemplateField(
+ field_type="str",
+ required=True,
+ is_list=False,
+ show=True,
+ value="",
+ multiline=True,
+ ),
+ "path": TemplateField(
+ field_type="file",
+ required=True,
+ is_list=False,
+ show=True,
+ value="",
+ suffixes=[".json", ".yaml", ".yml"],
+ fileTypes=["json", "yaml", "yml"],
+ ),
+}
+
+
+class ToolCreator(LangChainTypeCreator):
+ type_name: str = "tools"
+ tools_dict: Optional[Dict] = None
+
+ @property
+ def type_to_loader_dict(self) -> Dict:
+ if self.tools_dict is None:
+ all_tools = {}
+ for tool, tool_fcn in ALL_TOOLS_NAMES.items():
+ tool_params = get_tool_params(tool_fcn)
+ tool_name = tool_params.get("name", tool)
+
+ if tool_name in settings.tools or settings.dev:
+ if tool_name == "JsonSpec":
+ tool_params["path"] = tool_params.pop("dict_") # type: ignore
+ all_tools[tool_name] = {
+ "type": tool,
+ "params": tool_params,
+ "fcn": tool_fcn,
+ }
+
+ self.tools_dict = all_tools
+
+ return self.tools_dict
+
+ def get_signature(self, name: str) -> Optional[Dict]:
+ """Get the signature of a tool."""
+
+ base_classes = ["Tool"]
+ fields = []
+ params = []
+ tool_params = {}
+
+ # Raise error if name is not in tools
+ if name not in self.type_to_loader_dict.keys():
+ raise ValueError("Tool not found")
+
+ tool_type: str = self.type_to_loader_dict[name]["type"] # type: ignore
+
+ # if tool_type in _BASE_TOOLS.keys():
+ # params = []
+ if tool_type in _LLM_TOOLS.keys():
+ params = ["llm"]
+ elif tool_type in _EXTRA_LLM_TOOLS.keys():
+ extra_keys = _EXTRA_LLM_TOOLS[tool_type][1]
+ params = ["llm"] + extra_keys
+ elif tool_type in _EXTRA_OPTIONAL_TOOLS.keys():
+ extra_keys = _EXTRA_OPTIONAL_TOOLS[tool_type][1]
+ params = extra_keys
+ # elif tool_type == "Tool":
+ # params = ["name", "description", "func"]
+ elif tool_type in CUSTOM_TOOLS:
+ # Get custom tool params
+ params = self.type_to_loader_dict[name]["params"] # type: ignore
+ base_classes = ["function"]
+ if node := customs.get_custom_nodes("tools").get(tool_type):
+ return node
+ elif tool_type in FILE_TOOLS:
+ params = self.type_to_loader_dict[name]["params"] # type: ignore
+ base_classes += [name]
+ elif tool_type in OTHER_TOOLS:
+ tool_dict = build_template_from_class(tool_type, OTHER_TOOLS)
+ fields = tool_dict["template"]
+
+ # Pop unnecessary fields and add name
+ fields.pop("_type") # type: ignore
+ fields.pop("return_direct") # type: ignore
+ fields.pop("verbose") # type: ignore
+
+ tool_params = {
+ "name": fields.pop("name")["value"], # type: ignore
+ "description": fields.pop("description")["value"], # type: ignore
+ }
+
+ fields = [
+ TemplateField(name=name, field_type=field["type"], **field)
+ for name, field in fields.items() # type: ignore
+ ]
+ base_classes += tool_dict["base_classes"]
+
+ # Copy the field and add the name
+ for param in params:
+ field = TOOL_INPUTS.get(param, TOOL_INPUTS["str"]).copy()
+ field.name = param
+ if param == "aiosession":
+ field.show = False
+ field.required = False
+ fields.append(field)
+
+ template = Template(fields=fields, type_name=tool_type)
+
+ tool_params = {**tool_params, **self.type_to_loader_dict[name]["params"]}
+ return {
+ "template": util.format_dict(template.to_dict()),
+ **tool_params,
+ "base_classes": base_classes,
+ }
+
+ def to_list(self) -> List[str]:
+ """List all load tools"""
+
+ return list(self.type_to_loader_dict.keys())
+
+
+tool_creator = ToolCreator()
diff --git a/src/backend/langflow/interface/tools/constants.py b/src/backend/langflow/interface/tools/constants.py
new file mode 100644
index 000000000..34890a684
--- /dev/null
+++ b/src/backend/langflow/interface/tools/constants.py
@@ -0,0 +1,60 @@
+from langchain.agents import Tool
+from langchain.agents.load_tools import (
+ _BASE_TOOLS,
+ _EXTRA_LLM_TOOLS,
+ _EXTRA_OPTIONAL_TOOLS,
+ _LLM_TOOLS,
+)
+from langchain.tools.bing_search.tool import BingSearchRun
+from langchain.tools.google_search.tool import GoogleSearchResults, GoogleSearchRun
+from langchain.tools.json.tool import JsonGetValueTool, JsonListKeysTool, JsonSpec
+from langchain.tools.python.tool import PythonAstREPLTool, PythonREPLTool
+from langchain.tools.requests.tool import (
+ RequestsDeleteTool,
+ RequestsGetTool,
+ RequestsPatchTool,
+ RequestsPostTool,
+ RequestsPutTool,
+)
+from langchain.tools.sql_database.tool import (
+ InfoSQLDatabaseTool,
+ ListSQLDatabaseTool,
+ QueryCheckerTool,
+ QuerySQLDataBaseTool,
+)
+from langchain.tools.wikipedia.tool import WikipediaQueryRun
+from langchain.tools.wolfram_alpha.tool import WolframAlphaQueryRun
+
+from langflow.interface.tools.custom import PythonFunction
+
+FILE_TOOLS = {"JsonSpec": JsonSpec}
+CUSTOM_TOOLS = {"Tool": Tool, "PythonFunction": PythonFunction}
+OTHER_TOOLS = {
+ "QuerySQLDataBaseTool": QuerySQLDataBaseTool,
+ "InfoSQLDatabaseTool": InfoSQLDatabaseTool,
+ "ListSQLDatabaseTool": ListSQLDatabaseTool,
+ "QueryCheckerTool": QueryCheckerTool,
+ "BingSearchRun": BingSearchRun,
+ "GoogleSearchRun": GoogleSearchRun,
+ "GoogleSearchResults": GoogleSearchResults,
+ "JsonListKeysTool": JsonListKeysTool,
+ "JsonGetValueTool": JsonGetValueTool,
+ "PythonREPLTool": PythonREPLTool,
+ "PythonAstREPLTool": PythonAstREPLTool,
+ "RequestsGetTool": RequestsGetTool,
+ "RequestsPostTool": RequestsPostTool,
+ "RequestsPatchTool": RequestsPatchTool,
+ "RequestsPutTool": RequestsPutTool,
+ "RequestsDeleteTool": RequestsDeleteTool,
+ "WikipediaQueryRun": WikipediaQueryRun,
+ "WolframAlphaQueryRun": WolframAlphaQueryRun,
+}
+ALL_TOOLS_NAMES = {
+ **_BASE_TOOLS,
+ **_LLM_TOOLS, # type: ignore
+ **{k: v[0] for k, v in _EXTRA_LLM_TOOLS.items()}, # type: ignore
+ **{k: v[0] for k, v in _EXTRA_OPTIONAL_TOOLS.items()},
+ **CUSTOM_TOOLS,
+ **FILE_TOOLS, # type: ignore
+ **OTHER_TOOLS,
+}
diff --git a/src/backend/langflow/interface/tools/custom.py b/src/backend/langflow/interface/tools/custom.py
new file mode 100644
index 000000000..4c641f388
--- /dev/null
+++ b/src/backend/langflow/interface/tools/custom.py
@@ -0,0 +1,37 @@
+from typing import Callable, Optional
+
+from pydantic import BaseModel, validator
+
+from langflow.utils import validate
+
+
+class Function(BaseModel):
+ code: str
+ function: Optional[Callable] = None
+ imports: Optional[str] = None
+
+ # Eval code and store the function
+ def __init__(self, **data):
+ super().__init__(**data)
+
+ # Validate the function
+ @validator("code")
+ def validate_func(cls, v):
+ try:
+ validate.eval_function(v)
+ except Exception as e:
+ raise e
+
+ return v
+
+ def get_function(self):
+ """Get the function"""
+ function_name = validate.extract_function_name(self.code)
+
+ return validate.create_function(self.code, function_name)
+
+
+class PythonFunction(Function):
+ """Python function"""
+
+ code: str
diff --git a/src/backend/langflow/interface/tools/util.py b/src/backend/langflow/interface/tools/util.py
new file mode 100644
index 000000000..f1d66696a
--- /dev/null
+++ b/src/backend/langflow/interface/tools/util.py
@@ -0,0 +1,97 @@
+import ast
+import inspect
+from typing import Dict, Union
+
+from langchain.agents.tools import Tool
+
+
+def get_func_tool_params(func, **kwargs) -> Union[Dict, None]:
+ tree = ast.parse(inspect.getsource(func))
+
+ # Iterate over the statements in the abstract syntax tree
+ for node in ast.walk(tree):
+ # Find the first return statement
+ if isinstance(node, ast.Return):
+ tool = node.value
+ if isinstance(tool, ast.Call):
+ if isinstance(tool.func, ast.Name) and tool.func.id == "Tool":
+ if tool.keywords:
+ tool_params = {}
+ for keyword in tool.keywords:
+ if keyword.arg == "name":
+ tool_params["name"] = ast.literal_eval(keyword.value)
+ elif keyword.arg == "description":
+ tool_params["description"] = ast.literal_eval(
+ keyword.value
+ )
+
+ return tool_params
+ return {
+ "name": ast.literal_eval(tool.args[0]),
+ "description": ast.literal_eval(tool.args[2]),
+ }
+ #
+ else:
+ # get the class object from the return statement
+ try:
+ class_obj = eval(
+ compile(ast.Expression(tool), "", "eval")
+ )
+ except Exception:
+ return None
+
+ return {
+ "name": getattr(class_obj, "name"),
+ "description": getattr(class_obj, "description"),
+ }
+ # Return None if no return statement was found
+ return None
+
+
+def get_class_tool_params(cls, **kwargs) -> Union[Dict, None]:
+ tree = ast.parse(inspect.getsource(cls))
+
+ tool_params = {}
+
+ # Iterate over the statements in the abstract syntax tree
+ for node in ast.walk(tree):
+ if isinstance(node, ast.ClassDef):
+ # Find the class definition and look for methods
+ for stmt in node.body:
+ if isinstance(stmt, ast.FunctionDef) and stmt.name == "__init__":
+ # There is no assignment statements in the __init__ method
+ # So we need to get the params from the function definition
+ for arg in stmt.args.args:
+ if arg.arg == "name":
+ # It should be the name of the class
+ tool_params[arg.arg] = cls.__name__
+ elif arg.arg == "self":
+ continue
+ # If there is not default value, set it to an empty string
+ else:
+ try:
+ annotation = ast.literal_eval(arg.annotation) # type: ignore
+ tool_params[arg.arg] = annotation
+ except ValueError:
+ tool_params[arg.arg] = ""
+ # Get the attribute name and the annotation
+ elif cls != Tool and isinstance(stmt, ast.AnnAssign):
+ # Get the attribute name and the annotation
+ tool_params[stmt.target.id] = "" # type: ignore
+
+ return tool_params
+
+
+def get_tool_params(tool, **kwargs) -> Dict:
+ # Parse the function code into an abstract syntax tree
+ # Define if it is a function or a class
+ if inspect.isfunction(tool):
+ return get_func_tool_params(tool, **kwargs) or {}
+ elif inspect.isclass(tool):
+ # Get the parameters necessary to
+ # instantiate the class
+
+ return get_class_tool_params(tool, **kwargs) or {}
+
+ else:
+ raise ValueError("Tool must be a function or class.")
diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py
index 57f3a2578..085537756 100644
--- a/src/backend/langflow/interface/types.py
+++ b/src/backend/langflow/interface/types.py
@@ -1,12 +1,23 @@
-from langflow.interface.listing import list_type
-from langflow.interface.signature import get_signature
+from langflow.interface.agents.base import agent_creator
+from langflow.interface.chains.base import chain_creator
+from langflow.interface.document_loaders.base import documentloader_creator
+from langflow.interface.embeddings.base import embedding_creator
+from langflow.interface.llms.base import llm_creator
+from langflow.interface.memories.base import memory_creator
+from langflow.interface.prompts.base import prompt_creator
+from langflow.interface.text_splitters.base import textsplitter_creator
+from langflow.interface.toolkits.base import toolkits_creator
+from langflow.interface.tools.base import tool_creator
+from langflow.interface.utilities.base import utility_creator
+from langflow.interface.vector_store.base import vectorstore_creator
+from langflow.interface.wrappers.base import wrapper_creator
def get_type_list():
"""Get a list of all langchain types"""
all_types = build_langchain_types_dict()
- all_types.pop("tools")
+ # all_types.pop("tools")
for key, value in all_types.items():
all_types[key] = [item["template"]["_type"] for item in value.values()]
@@ -14,23 +25,30 @@ def get_type_list():
return all_types
-def build_langchain_types_dict():
+def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union
"""Build a dictionary of all langchain types"""
- return {
- "chains": {
- chain: get_signature(chain, "chains") for chain in list_type("chains")
- },
- "agents": {
- agent: get_signature(agent, "agents") for agent in list_type("agents")
- },
- "prompts": {
- prompt: get_signature(prompt, "prompts") for prompt in list_type("prompts")
- },
- "llms": {llm: get_signature(llm, "llms") for llm in list_type("llms")},
- "memories": {
- memory: get_signature(memory, "memories")
- for memory in list_type("memories")
- },
- "tools": {tool: get_signature(tool, "tools") for tool in list_type("tools")},
- }
+ all_types = {}
+
+ creators = [
+ chain_creator,
+ agent_creator,
+ prompt_creator,
+ llm_creator,
+ memory_creator,
+ tool_creator,
+ toolkits_creator,
+ wrapper_creator,
+ embedding_creator,
+ vectorstore_creator,
+ documentloader_creator,
+ textsplitter_creator,
+ utility_creator,
+ ]
+
+ all_types = {}
+ for creator in creators:
+ created_types = creator.to_dict()
+ if created_types[creator.type_name].values():
+ all_types.update(created_types)
+ return all_types
diff --git a/src/backend/langflow/interface/utilities/__init__.py b/src/backend/langflow/interface/utilities/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/backend/langflow/interface/utilities/base.py b/src/backend/langflow/interface/utilities/base.py
new file mode 100644
index 000000000..e60e344ad
--- /dev/null
+++ b/src/backend/langflow/interface/utilities/base.py
@@ -0,0 +1,39 @@
+from typing import Dict, List, Optional
+
+from langflow.custom.customs import get_custom_nodes
+from langflow.interface.base import LangChainTypeCreator
+from langflow.interface.custom_lists import utility_type_to_cls_dict
+from langflow.settings import settings
+from langflow.utils.logger import logger
+from langflow.utils.util import build_template_from_class
+
+
+class UtilityCreator(LangChainTypeCreator):
+ type_name: str = "utilities"
+
+ @property
+ def type_to_loader_dict(self) -> Dict:
+ return utility_type_to_cls_dict
+
+ def get_signature(self, name: str) -> Optional[Dict]:
+ """Get the signature of a utility."""
+ try:
+ if name in get_custom_nodes(self.type_name).keys():
+ return get_custom_nodes(self.type_name)[name]
+ return build_template_from_class(name, utility_type_to_cls_dict)
+ except ValueError as exc:
+ raise ValueError(f"Utility {name} not found") from exc
+
+ except AttributeError as exc:
+ logger.error(f"Utility {name} not loaded: {exc}")
+ return None
+
+ def to_list(self) -> List[str]:
+ return [
+ utility.__name__
+ for utility in self.type_to_loader_dict.values()
+ if utility.__name__ in settings.utilities or settings.dev
+ ]
+
+
+utility_creator = UtilityCreator()
diff --git a/src/backend/langflow/interface/utils.py b/src/backend/langflow/interface/utils.py
new file mode 100644
index 000000000..b3b154790
--- /dev/null
+++ b/src/backend/langflow/interface/utils.py
@@ -0,0 +1,22 @@
+import json
+import os
+
+import yaml
+
+
+def load_file_into_dict(file_path: str) -> dict:
+ if not os.path.exists(file_path):
+ raise FileNotFoundError(f"File not found: {file_path}")
+
+ file_extension = os.path.splitext(file_path)[1].lower()
+
+ if file_extension == ".json":
+ with open(file_path, "r") as json_file:
+ data = json.load(json_file)
+ elif file_extension in [".yaml", ".yml"]:
+ with open(file_path, "r") as yaml_file:
+ data = yaml.safe_load(yaml_file)
+ else:
+ raise ValueError("Unsupported file type. Please provide a JSON or YAML file.")
+
+ return data
diff --git a/src/backend/langflow/interface/vector_store/__init__.py b/src/backend/langflow/interface/vector_store/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/backend/langflow/interface/vector_store/base.py b/src/backend/langflow/interface/vector_store/base.py
new file mode 100644
index 000000000..7fca2ba0c
--- /dev/null
+++ b/src/backend/langflow/interface/vector_store/base.py
@@ -0,0 +1,55 @@
+from typing import Dict, List, Optional
+
+from langflow.interface.base import LangChainTypeCreator
+from langflow.interface.custom_lists import vectorstores_type_to_cls_dict
+from langflow.settings import settings
+from langflow.utils.logger import logger
+from langflow.utils.util import build_template_from_class
+
+
+class VectorstoreCreator(LangChainTypeCreator):
+ type_name: str = "vectorstores"
+
+ @property
+ def type_to_loader_dict(self) -> Dict:
+ return vectorstores_type_to_cls_dict
+
+ def get_signature(self, name: str) -> Optional[Dict]:
+ """Get the signature of an embedding."""
+ try:
+ signature = build_template_from_class(name, vectorstores_type_to_cls_dict)
+
+ # TODO: Use FrontendendNode class to build the signature
+ signature["template"] = {
+ "documents": {
+ "type": "TextSplitter",
+ "required": True,
+ "show": True,
+ "name": "documents",
+ "display_name": "Text Splitter",
+ },
+ "embedding": {
+ "type": "Embeddings",
+ "required": True,
+ "show": True,
+ "name": "embedding",
+ "display_name": "Embedding",
+ },
+ }
+ return signature
+
+ except ValueError as exc:
+ raise ValueError(f"Vector Store {name} not found") from exc
+ except AttributeError as exc:
+ logger.error(f"Vector Store {name} not loaded: {exc}")
+ return None
+
+ def to_list(self) -> List[str]:
+ return [
+ vectorstore
+ for vectorstore in self.type_to_loader_dict.keys()
+ if vectorstore in settings.vectorstores or settings.dev
+ ]
+
+
+vectorstore_creator = VectorstoreCreator()
diff --git a/src/backend/langflow/interface/wrappers/__init__.py b/src/backend/langflow/interface/wrappers/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/backend/langflow/interface/wrappers/base.py b/src/backend/langflow/interface/wrappers/base.py
new file mode 100644
index 000000000..f5773d07a
--- /dev/null
+++ b/src/backend/langflow/interface/wrappers/base.py
@@ -0,0 +1,34 @@
+from typing import Dict, List, Optional
+
+from langchain import requests
+
+from langflow.interface.base import LangChainTypeCreator
+from langflow.utils.logger import logger
+from langflow.utils.util import build_template_from_class
+
+
+class WrapperCreator(LangChainTypeCreator):
+ type_name: str = "wrappers"
+
+ @property
+ def type_to_loader_dict(self) -> Dict:
+ if self.type_dict is None:
+ self.type_dict = {
+ wrapper.__name__: wrapper for wrapper in [requests.TextRequestsWrapper]
+ }
+ return self.type_dict
+
+ def get_signature(self, name: str) -> Optional[Dict]:
+ try:
+ return build_template_from_class(name, self.type_to_loader_dict)
+ except ValueError as exc:
+ raise ValueError("Wrapper not found") from exc
+ except AttributeError as exc:
+ logger.error(f"Wrapper {name} not loaded: {exc}")
+ return None
+
+ def to_list(self) -> List[str]:
+ return list(self.type_to_loader_dict.keys())
+
+
+wrapper_creator = WrapperCreator()
diff --git a/src/backend/langflow/main.py b/src/backend/langflow/main.py
index a2a02465e..176e46236 100644
--- a/src/backend/langflow/main.py
+++ b/src/backend/langflow/main.py
@@ -2,8 +2,7 @@ from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from langflow.api.endpoints import router as endpoints_router
-from langflow.api.list_endpoints import router as list_router
-from langflow.api.signature import router as signatures_router
+from langflow.api.validate import router as validate_router
def create_app():
@@ -23,8 +22,7 @@ def create_app():
)
app.include_router(endpoints_router)
- app.include_router(list_router)
- app.include_router(signatures_router)
+ app.include_router(validate_router)
return app
diff --git a/src/backend/langflow/settings.py b/src/backend/langflow/settings.py
index f4dd4ae30..48aa5939d 100644
--- a/src/backend/langflow/settings.py
+++ b/src/backend/langflow/settings.py
@@ -1,29 +1,51 @@
import os
-from typing import List, Optional
+from typing import List
import yaml
-from pydantic import BaseSettings, Field, root_validator
+from pydantic import BaseSettings, root_validator
class Settings(BaseSettings):
- chains: Optional[List[str]] = Field(...)
- agents: Optional[List[str]] = Field(...)
- prompts: Optional[List[str]] = Field(...)
- llms: Optional[List[str]] = Field(...)
- tools: Optional[List[str]] = Field(...)
- memories: Optional[List[str]] = Field(...)
- dev: bool = Field(...)
+ chains: List[str] = []
+ agents: List[str] = []
+ prompts: List[str] = []
+ llms: List[str] = []
+ tools: List[str] = []
+ memories: List[str] = []
+ embeddings: List[str] = []
+ vectorstores: List[str] = []
+ documentloaders: List[str] = []
+ wrappers: List[str] = []
+ toolkits: List[str] = []
+ textsplitters: List[str] = []
+ utilities: List[str] = []
+ dev: bool = False
class Config:
validate_assignment = True
+ extra = "ignore"
- @root_validator
+ @root_validator(allow_reuse=True)
def validate_lists(cls, values):
for key, value in values.items():
if key != "dev" and not value:
values[key] = []
return values
+ def update_from_yaml(self, file_path: str):
+ new_settings = load_settings_from_yaml(file_path)
+ self.chains = new_settings.chains or []
+ self.agents = new_settings.agents or []
+ self.prompts = new_settings.prompts or []
+ self.llms = new_settings.llms or []
+ self.tools = new_settings.tools or []
+ self.memories = new_settings.memories or []
+ self.wrappers = new_settings.wrappers or []
+ self.toolkits = new_settings.toolkits or []
+ self.textsplitters = new_settings.textsplitters or []
+ self.utilities = new_settings.utilities or []
+ self.dev = new_settings.dev or False
+
def save_settings_to_yaml(settings: Settings, file_path: str):
with open(file_path, "w") as f:
@@ -41,9 +63,8 @@ def load_settings_from_yaml(file_path: str) -> Settings:
with open(file_path, "r") as f:
settings_dict = yaml.safe_load(f)
- a = Settings.parse_obj(settings_dict)
- return a
+ return Settings(**settings_dict)
settings = load_settings_from_yaml("config.yaml")
diff --git a/src/backend/langflow/template/__init__.py b/src/backend/langflow/template/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/backend/langflow/template/base.py b/src/backend/langflow/template/base.py
new file mode 100644
index 000000000..ecadde108
--- /dev/null
+++ b/src/backend/langflow/template/base.py
@@ -0,0 +1,234 @@
+from abc import ABC
+from typing import Any, Callable, Dict, Optional, Union
+
+from pydantic import BaseModel
+
+from langflow.template.constants import FORCE_SHOW_FIELDS
+from langflow.utils import constants
+
+
+class TemplateFieldCreator(BaseModel, ABC):
+ field_type: str = "str"
+ required: bool = False
+ placeholder: str = ""
+ is_list: bool = False
+ show: bool = True
+ multiline: bool = False
+ value: Any = None
+ suffixes: list[str] = []
+ fileTypes: list[str] = []
+ file_types: list[str] = []
+ content: Union[str, None] = None
+ password: bool = False
+ options: list[str] = []
+ name: str = ""
+ display_name: Optional[str] = None
+
+ def to_dict(self):
+ result = self.dict()
+ # Remove key if it is None
+ for key in list(result.keys()):
+ if result[key] is None or result[key] == []:
+ del result[key]
+ result["type"] = result.pop("field_type")
+ result["list"] = result.pop("is_list")
+
+ if result.get("file_types"):
+ result["fileTypes"] = result.pop("file_types")
+
+ if self.field_type == "file":
+ result["content"] = self.content
+ return result
+
+ def process_field(
+ self, key: str, value: Dict[str, Any], name: Optional[str] = None
+ ) -> None:
+ _type = value["type"]
+
+ # Remove 'Optional' wrapper
+ if "Optional" in _type:
+ _type = _type.replace("Optional[", "")[:-1]
+
+ # Check for list type
+ if "List" in _type:
+ _type = _type.replace("List[", "")[:-1]
+ self.is_list = True
+
+ # Replace 'Mapping' with 'dict'
+ if "Mapping" in _type:
+ _type = _type.replace("Mapping", "dict")
+
+ # Change type from str to Tool
+ self.field_type = "Tool" if key in {"allowed_tools"} else self.field_type
+
+ self.field_type = "int" if key in {"max_value_length"} else self.field_type
+
+ # Show or not field
+ self.show = bool(
+ (self.required and key not in ["input_variables"])
+ or key in FORCE_SHOW_FIELDS
+ or "api_key" in key
+ )
+
+ # Add password field
+ self.password = any(
+ text in key.lower() for text in {"password", "token", "api", "key"}
+ )
+
+ # Add multline
+ self.multiline = key in {
+ "suffix",
+ "prefix",
+ "template",
+ "examples",
+ "code",
+ "headers",
+ }
+
+ # Replace dict type with str
+ if "dict" in self.field_type.lower():
+ self.field_type = "code"
+
+ if key == "dict_":
+ self.field_type = "file"
+ self.suffixes = [".json", ".yaml", ".yml"]
+ self.file_types = ["json", "yaml", "yml"]
+
+ # Replace default value with actual value
+ if "default" in value:
+ self.value = value["default"]
+
+ if key == "headers":
+ self.value = """{'Authorization':
+ 'Bearer '}"""
+
+ # Add options to openai
+ if name == "OpenAI" and key == "model_name":
+ self.options = constants.OPENAI_MODELS
+ self.is_list = True
+ elif name == "ChatOpenAI" and key == "model_name":
+ self.options = constants.CHAT_OPENAI_MODELS
+ self.is_list = True
+
+
+class TemplateField(TemplateFieldCreator):
+ pass
+
+
+class Template(BaseModel):
+ type_name: str
+ fields: list[TemplateField]
+
+ def process_fields(
+ self,
+ name: Optional[str] = None,
+ format_field_func: Union[Callable, None] = None,
+ ):
+ if format_field_func:
+ for field in self.fields:
+ format_field_func(field, name)
+
+ def to_dict(self, format_field_func=None):
+ self.process_fields(self.type_name, format_field_func)
+ result = {field.name: field.to_dict() for field in self.fields}
+ result["_type"] = self.type_name # type: ignore
+ return result
+
+
+class FrontendNode(BaseModel):
+ template: Template
+ description: str
+ base_classes: list
+ name: str = ""
+
+ def to_dict(self):
+ return {
+ self.name: {
+ "template": self.template.to_dict(self.format_field),
+ "description": self.description,
+ "base_classes": self.base_classes,
+ }
+ }
+
+ @staticmethod
+ def format_field(field: TemplateField, name: Optional[str] = None) -> None:
+ key = field.name
+ value = field.to_dict()
+ _type = value["type"]
+
+ # Remove 'Optional' wrapper
+ if "Optional" in _type:
+ _type = _type.replace("Optional[", "")[:-1]
+
+ # Check for list type
+ if "List" in _type:
+ _type = _type.replace("List[", "")[:-1]
+ field.is_list = True
+
+ # Replace 'Mapping' with 'dict'
+ if "Mapping" in _type:
+ _type = _type.replace("Mapping", "dict")
+
+ # Change type from str to Tool
+ field.field_type = "Tool" if key in {"allowed_tools"} else field.field_type
+
+ field.field_type = "int" if key in {"max_value_length"} else field.field_type
+
+ # Show or not field
+ field.show = bool(
+ (field.required and key not in ["input_variables"])
+ or key in FORCE_SHOW_FIELDS
+ or "api" in key
+ or ("key" in key and "input" not in key and "output" not in key)
+ )
+
+ # Add password field
+ field.password = (
+ any(text in key.lower() for text in {"password", "token", "api", "key"})
+ and field.show
+ )
+
+ # Add multline
+ field.multiline = key in {
+ "suffix",
+ "prefix",
+ "template",
+ "examples",
+ "code",
+ "headers",
+ }
+
+ # Replace dict type with str
+ if "dict" in field.field_type.lower():
+ field.field_type = "code"
+
+ if key == "dict_":
+ field.field_type = "file"
+ field.suffixes = [".json", ".yaml", ".yml"]
+ field.file_types = ["json", "yaml", "yml"]
+
+ # Replace default value with actual value
+ if "default" in value:
+ field.value = value["default"]
+
+ if key == "headers":
+ field.value = """{'Authorization':
+ 'Bearer '}"""
+
+ # Add options to openai
+ if name == "OpenAI" and key == "model_name":
+ field.options = constants.OPENAI_MODELS
+ field.is_list = True
+ elif name == "ChatOpenAI":
+ if key == "model_name":
+ field.options = constants.CHAT_OPENAI_MODELS
+ field.is_list = True
+ if "api_key" in key and "OpenAI" in str(name):
+ field.display_name = "OpenAI API Key"
+ field.required = False
+ if field.value is None:
+ field.value = ""
+ # If the field.name contains api or api and key, then it might be an api key
+ # other conditions are to make sure that it is not an input or output variable
+ if "api" in key.lower() and "key" in key.lower():
+ field.required = False
diff --git a/src/backend/langflow/template/constants.py b/src/backend/langflow/template/constants.py
new file mode 100644
index 000000000..ae08d3691
--- /dev/null
+++ b/src/backend/langflow/template/constants.py
@@ -0,0 +1,32 @@
+FORCE_SHOW_FIELDS = [
+ "allowed_tools",
+ "memory",
+ "prefix",
+ "examples",
+ "temperature",
+ "model_name",
+ "headers",
+ "max_value_length",
+ "max_tokens",
+]
+
+DEFAULT_PROMPT = """
+I want you to act as a naming consultant for new companies.
+
+Here are some examples of good company names:
+
+- search engine, Google
+- social media, Facebook
+- video sharing, YouTube
+
+The name should be short, catchy and easy to remember.
+
+What is a good name for a company that makes {product}?
+"""
+
+SYSTEM_PROMPT = """
+You are a helpful assistant that talks casually about life in general.
+You are a good listener and you can talk about anything.
+"""
+
+HUMAN_PROMPT = "{input}"
diff --git a/src/backend/langflow/template/fields.py b/src/backend/langflow/template/fields.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/backend/langflow/template/nodes.py b/src/backend/langflow/template/nodes.py
new file mode 100644
index 000000000..b174a9363
--- /dev/null
+++ b/src/backend/langflow/template/nodes.py
@@ -0,0 +1,461 @@
+from typing import Optional
+
+from langchain.agents import loading
+from langchain.agents.mrkl import prompt
+
+from langflow.template.base import FrontendNode, Template, TemplateField
+from langflow.template.constants import DEFAULT_PROMPT, HUMAN_PROMPT, SYSTEM_PROMPT
+from langflow.utils.constants import DEFAULT_PYTHON_FUNCTION
+
+NON_CHAT_AGENTS = {
+ agent_type: agent_class
+ for agent_type, agent_class in loading.AGENT_TO_CLASS.items()
+ if "chat" not in agent_type.value
+}
+
+
+class BasePromptFrontendNode(FrontendNode):
+ name: str
+ template: Template
+ description: str
+ base_classes: list[str]
+
+ def to_dict(self):
+ return super().to_dict()
+
+
+class ZeroShotPromptNode(BasePromptFrontendNode):
+ name: str = "ZeroShotPrompt"
+ template: Template = Template(
+ type_name="zero_shot",
+ fields=[
+ TemplateField(
+ field_type="str",
+ required=False,
+ placeholder="",
+ is_list=False,
+ show=True,
+ multiline=True,
+ value=prompt.PREFIX,
+ name="prefix",
+ ),
+ TemplateField(
+ field_type="str",
+ required=True,
+ placeholder="",
+ is_list=False,
+ show=True,
+ multiline=True,
+ value=prompt.SUFFIX,
+ name="suffix",
+ ),
+ TemplateField(
+ field_type="str",
+ required=False,
+ placeholder="",
+ is_list=False,
+ show=True,
+ multiline=True,
+ value=prompt.FORMAT_INSTRUCTIONS,
+ name="format_instructions",
+ ),
+ ],
+ )
+ description: str = "Prompt template for Zero Shot Agent."
+ base_classes: list[str] = ["BasePromptTemplate"]
+
+ def to_dict(self):
+ return super().to_dict()
+
+
+class PromptTemplateNode(FrontendNode):
+ name: str = "PromptTemplate"
+ template: Template
+ description: str
+ base_classes: list[str] = ["BasePromptTemplate"]
+
+ def to_dict(self):
+ return super().to_dict()
+
+
+class PythonFunctionNode(FrontendNode):
+ name: str = "PythonFunction"
+ template: Template = Template(
+ type_name="python_function",
+ fields=[
+ TemplateField(
+ field_type="code",
+ required=True,
+ placeholder="",
+ is_list=False,
+ show=True,
+ value=DEFAULT_PYTHON_FUNCTION,
+ name="code",
+ )
+ ],
+ )
+ description: str = "Python function to be executed."
+ base_classes: list[str] = ["function"]
+
+ def to_dict(self):
+ return super().to_dict()
+
+
+class ToolNode(FrontendNode):
+ name: str = "Tool"
+ template: Template = Template(
+ type_name="Tool",
+ fields=[
+ TemplateField(
+ field_type="str",
+ required=True,
+ placeholder="",
+ is_list=False,
+ show=True,
+ multiline=True,
+ value="",
+ name="name",
+ ),
+ TemplateField(
+ field_type="str",
+ required=True,
+ placeholder="",
+ is_list=False,
+ show=True,
+ multiline=True,
+ value="",
+ name="description",
+ ),
+ TemplateField(
+ name="func",
+ field_type="function",
+ required=True,
+ is_list=False,
+ show=True,
+ multiline=True,
+ ),
+ TemplateField(
+ field_type="bool",
+ required=True,
+ placeholder="",
+ is_list=False,
+ show=True,
+ multiline=False,
+ value=False,
+ name="return_direct",
+ ),
+ ],
+ )
+ description: str = "Tool to be used in the flow."
+ base_classes: list[str] = ["Tool"]
+
+ def to_dict(self):
+ return super().to_dict()
+
+
+class JsonAgentNode(FrontendNode):
+ name: str = "JsonAgent"
+ template: Template = Template(
+ type_name="json_agent",
+ fields=[
+ TemplateField(
+ field_type="BaseToolkit",
+ required=True,
+ show=True,
+ name="toolkit",
+ ),
+ TemplateField(
+ field_type="BaseLanguageModel",
+ required=True,
+ show=True,
+ name="llm",
+ ),
+ ],
+ )
+ description: str = """Construct a json agent from an LLM and tools."""
+ base_classes: list[str] = ["AgentExecutor"]
+
+ def to_dict(self):
+ return super().to_dict()
+
+
+class InitializeAgentNode(FrontendNode):
+ name: str = "initialize_agent"
+ template: Template = Template(
+ type_name="initailize_agent",
+ fields=[
+ TemplateField(
+ field_type="str",
+ required=True,
+ is_list=True,
+ show=True,
+ multiline=False,
+ options=list(NON_CHAT_AGENTS.keys()),
+ value=list(NON_CHAT_AGENTS.keys())[0],
+ name="agent",
+ ),
+ TemplateField(
+ field_type="BaseChatMemory",
+ required=False,
+ show=True,
+ name="memory",
+ ),
+ TemplateField(
+ field_type="Tool",
+ required=False,
+ show=True,
+ name="tools",
+ is_list=True,
+ ),
+ TemplateField(
+ field_type="BaseLanguageModel",
+ required=True,
+ show=True,
+ name="llm",
+ ),
+ ],
+ )
+ description: str = """Construct a json agent from an LLM and tools."""
+ base_classes: list[str] = ["AgentExecutor"]
+
+ def to_dict(self):
+ return super().to_dict()
+
+ @staticmethod
+ def format_field(field: TemplateField, name: Optional[str] = None) -> None:
+ # do nothing and don't return anything
+ pass
+
+
+class CSVAgentNode(FrontendNode):
+ name: str = "CSVAgent"
+ template: Template = Template(
+ type_name="csv_agent",
+ fields=[
+ TemplateField(
+ field_type="file",
+ required=True,
+ show=True,
+ name="path",
+ value="",
+ suffixes=[".csv"],
+ fileTypes=["csv"],
+ ),
+ TemplateField(
+ field_type="BaseLanguageModel",
+ required=True,
+ show=True,
+ name="llm",
+ ),
+ ],
+ )
+ description: str = """Construct a json agent from a CSV and tools."""
+ base_classes: list[str] = ["AgentExecutor"]
+
+ def to_dict(self):
+ return super().to_dict()
+
+
+class SQLDatabaseNode(FrontendNode):
+ name: str = "SQLDatabase"
+ template: Template = Template(
+ type_name="sql_database",
+ fields=[
+ TemplateField(
+ field_type="str",
+ required=True,
+ is_list=False,
+ show=True,
+ multiline=False,
+ value="",
+ name="uri",
+ ),
+ ],
+ )
+ description: str = """SQLAlchemy wrapper around a database."""
+ base_classes: list[str] = ["SQLDatabase"]
+
+ def to_dict(self):
+ return super().to_dict()
+
+
+class VectorStoreAgentNode(FrontendNode):
+ name: str = "VectorStoreAgent"
+ template: Template = Template(
+ type_name="vectorstore_agent",
+ fields=[
+ TemplateField(
+ field_type="VectorStoreInfo",
+ required=True,
+ show=True,
+ name="vectorstoreinfo",
+ display_name="Vector Store Info",
+ ),
+ TemplateField(
+ field_type="BaseLanguageModel",
+ required=True,
+ show=True,
+ name="llm",
+ display_name="LLM",
+ ),
+ ],
+ )
+ description: str = """Construct an agent from a Vector Store."""
+ base_classes: list[str] = ["AgentExecutor"]
+
+ def to_dict(self):
+ return super().to_dict()
+
+
+class VectorStoreRouterAgentNode(FrontendNode):
+ name: str = "VectorStoreRouterAgent"
+ template: Template = Template(
+ type_name="vectorstorerouter_agent",
+ fields=[
+ TemplateField(
+ field_type="VectorStoreRouterToolkit",
+ required=True,
+ show=True,
+ name="vectorstoreroutertoolkit",
+ display_name="Vector Store Router Toolkit",
+ ),
+ TemplateField(
+ field_type="BaseLanguageModel",
+ required=True,
+ show=True,
+ name="llm",
+ display_name="LLM",
+ ),
+ ],
+ )
+ description: str = """Construct an agent from a Vector Store Router."""
+ base_classes: list[str] = ["AgentExecutor"]
+
+ def to_dict(self):
+ return super().to_dict()
+
+
+class SQLAgentNode(FrontendNode):
+ name: str = "SQLAgent"
+ template: Template = Template(
+ type_name="sql_agent",
+ fields=[
+ TemplateField(
+ field_type="str",
+ required=True,
+ placeholder="",
+ is_list=False,
+ show=True,
+ multiline=False,
+ value="",
+ name="database_uri",
+ ),
+ TemplateField(
+ field_type="BaseLanguageModel",
+ required=True,
+ show=True,
+ name="llm",
+ display_name="LLM",
+ ),
+ ],
+ )
+ description: str = """Construct an agent from a Vector Store Router."""
+ base_classes: list[str] = ["AgentExecutor"]
+
+ def to_dict(self):
+ return super().to_dict()
+
+
+class PromptFrontendNode(FrontendNode):
+ @staticmethod
+ def format_field(field: TemplateField, name: Optional[str] = None) -> None:
+ # if field.field_type == "StringPromptTemplate"
+ # change it to str
+ PROMPT_FIELDS = [
+ "template",
+ "suffix",
+ "prefix",
+ "examples",
+ ]
+ if field.field_type == "StringPromptTemplate" and "Message" in str(name):
+ field.field_type = "prompt"
+ field.multiline = True
+ field.value = HUMAN_PROMPT if "Human" in field.name else SYSTEM_PROMPT
+ if field.name == "template" and field.value == "":
+ field.value = DEFAULT_PROMPT
+
+ if field.name in PROMPT_FIELDS:
+ field.field_type = "prompt"
+
+ if (
+ "Union" in field.field_type
+ and "BaseMessagePromptTemplate" in field.field_type
+ ):
+ field.field_type = "BaseMessagePromptTemplate"
+
+ # All prompt fields should be password=False
+ field.password = False
+
+
+class MemoryFrontendNode(FrontendNode):
+ @staticmethod
+ def format_field(field: TemplateField, name: Optional[str] = None) -> None:
+ FrontendNode.format_field(field, name)
+
+ if not isinstance(field.value, str):
+ field.value = None
+ if field.name == "k":
+ field.required = True
+ field.show = True
+ field.field_type = "int"
+ field.value = 10
+ field.display_name = "Memory Size"
+ field.password = False
+
+
+class ChainFrontendNode(FrontendNode):
+ @staticmethod
+ def format_field(field: TemplateField, name: Optional[str] = None) -> None:
+ FrontendNode.format_field(field, name)
+
+ if "key" in field.name:
+ field.password = False
+ field.show = False
+ if field.name in ["input_key", "output_key"]:
+ field.required = True
+ field.show = True
+ # Separated for possible future changes
+ if field.name == "prompt":
+ # if no prompt is provided, use the default prompt
+ field.required = False
+ field.show = True
+
+
+class LLMFrontendNode(FrontendNode):
+ @staticmethod
+ def format_field(field: TemplateField, name: Optional[str] = None) -> None:
+ display_names_dict = {
+ "huggingfacehub_api_token": "HuggingFace Hub API Token",
+ }
+ FrontendNode.format_field(field, name)
+ SHOW_FIELDS = ["repo_id", "task", "model_kwargs"]
+ if field.name in SHOW_FIELDS:
+ field.show = True
+
+ if "api" in field.name and ("key" in field.name or "token" in field.name):
+ field.password = True
+ field.show = True
+ # Required should be False to support
+ # loading the API key from environment variables
+ field.required = False
+
+ if field.name == "task":
+ field.required = True
+ field.show = True
+ field.is_list = True
+ field.options = ["text-generation", "text2text-generation"]
+
+ if display_name := display_names_dict.get(field.name):
+ field.display_name = display_name
+ if field.name == "model_kwargs":
+ field.field_type = "code"
diff --git a/src/backend/langflow/utils/constants.py b/src/backend/langflow/utils/constants.py
index 73a50ce40..2d101ab98 100644
--- a/src/backend/langflow/utils/constants.py
+++ b/src/backend/langflow/utils/constants.py
@@ -6,3 +6,10 @@ OPENAI_MODELS = [
"text-ada-001",
]
CHAT_OPENAI_MODELS = ["gpt-3.5-turbo", "gpt-4", "gpt-4-32k"]
+
+
+DEFAULT_PYTHON_FUNCTION = """
+def python_function(text: str) -> str:
+ \"\"\"This is a default python function that returns the input text\"\"\"
+ return text
+"""
diff --git a/src/backend/langflow/utils/logger.py b/src/backend/langflow/utils/logger.py
new file mode 100644
index 000000000..b70a451d4
--- /dev/null
+++ b/src/backend/langflow/utils/logger.py
@@ -0,0 +1,30 @@
+import logging
+from pathlib import Path
+
+from rich.logging import RichHandler
+
+logger = logging.getLogger("langflow")
+
+
+def configure(log_level: str = "INFO", log_file: Path = None): # type: ignore
+ log_format = "%(asctime)s - %(levelname)s - %(message)s"
+ log_level_value = getattr(logging, log_level.upper(), logging.INFO)
+
+ logging.basicConfig(
+ level=log_level_value,
+ format=log_format,
+ datefmt="[%X]",
+ handlers=[RichHandler(rich_tracebacks=True)],
+ )
+
+ if log_file:
+ log_file = Path(log_file)
+ log_file.parent.mkdir(parents=True, exist_ok=True)
+
+ file_handler = logging.FileHandler(log_file)
+ file_handler.setFormatter(logging.Formatter(log_format))
+ logger.addHandler(file_handler)
+
+ logger.info(f"Logger set up with log level: {log_level_value}({log_level})")
+ if log_file:
+ logger.info(f"Log file: {log_file}")
diff --git a/src/backend/langflow/utils/payload.py b/src/backend/langflow/utils/payload.py
index e7d38139e..cac23a0d6 100644
--- a/src/backend/langflow/utils/payload.py
+++ b/src/backend/langflow/utils/payload.py
@@ -1,5 +1,6 @@
import contextlib
import re
+from typing import Dict
def extract_input_variables(nodes):
@@ -27,48 +28,63 @@ def extract_input_variables(nodes):
return nodes
-def get_root_node(nodes, edges):
+def get_root_node(graph):
"""
Returns the root node of the template.
"""
- incoming_edges = {edge["source"] for edge in edges}
- return next((node for node in nodes if node["id"] not in incoming_edges), None)
+ incoming_edges = {edge.source for edge in graph.edges}
+
+ if not incoming_edges and len(graph.nodes) == 1:
+ return graph.nodes[0]
+
+ return next((node for node in graph.nodes if node not in incoming_edges), None)
-def build_json(root, nodes, edges):
- """
- Builds a json from the nodes and edges
- """
- edge_ids = [edge["source"] for edge in edges if edge["target"] == root["id"]]
- local_nodes = [node for node in nodes if node["id"] in edge_ids]
+def build_json(root, graph) -> Dict:
+ if "node" not in root.data:
+ # If the root node has no "node" key, then it has only one child,
+ # which is the target of the single outgoing edge
+ edge = root.edges[0]
+ local_nodes = [edge.target]
+ else:
+ # Otherwise, find all children whose type matches the type
+ # specified in the template
+ node_type = root.node_type
+ local_nodes = graph.get_nodes_with_target(root)
- if "node" not in root["data"]:
- return build_json(local_nodes[0], nodes, edges)
-
- final_dict = root["data"]["node"]["template"].copy()
+ if len(local_nodes) == 1:
+ return build_json(local_nodes[0], graph)
+ # Build a dictionary from the template
+ template = root.data["node"]["template"]
+ final_dict = template.copy()
for key, value in final_dict.items():
if key == "_type":
continue
- module_type = value["type"]
+ node_type = value["type"]
if "value" in value and value["value"] is not None:
+ # If the value is specified, use it
value = value["value"]
- elif "dict" in module_type:
+ elif "dict" in node_type:
+ # If the value is a dictionary, create an empty dictionary
value = {}
else:
+ # Otherwise, recursively build the child nodes
children = []
- for c in local_nodes:
- module_types = [c["data"]["type"]]
- if "node" in c["data"]:
- module_types += c["data"]["node"]["base_classes"]
- if module_type in module_types:
- children.append(c)
+ for local_node in local_nodes:
+ node_children = graph.get_children_by_node_type(local_node, node_type)
+ children.extend(node_children)
if value["required"] and not children:
- raise ValueError(f"No child with type {module_type} found")
- values = [build_json(child, nodes, edges) for child in children]
- value = list(values) if value["list"] else next(iter(values), None)
+ raise ValueError(f"No child with type {node_type} found")
+ values = [build_json(child, graph) for child in children]
+ value = (
+ list(values)
+ if value["list"]
+ else next(iter(values), None) # type: ignore
+ )
final_dict[key] = value
+
return final_dict
diff --git a/src/backend/langflow/utils/util.py b/src/backend/langflow/utils/util.py
index 4710114ed..b31a3bed1 100644
--- a/src/backend/langflow/utils/util.py
+++ b/src/backend/langflow/utils/util.py
@@ -1,20 +1,60 @@
-import ast
import importlib
import inspect
import re
from typing import Dict, Optional
-from langchain.agents.load_tools import (
- _BASE_TOOLS,
- _EXTRA_LLM_TOOLS,
- _EXTRA_OPTIONAL_TOOLS,
- _LLM_TOOLS,
-)
+from docstring_parser import parse # type: ignore
+from langflow.template.constants import FORCE_SHOW_FIELDS
from langflow.utils import constants
-def build_template_from_function(name: str, type_to_loader_dict: Dict):
+def build_template_from_parameters(
+ name: str, type_to_loader_dict: Dict, add_function: bool = False
+):
+ # Retrieve the function that matches the provided name
+ func = None
+ for _, v in type_to_loader_dict.items():
+ if v.__name__ == name:
+ func = v
+ break
+
+ if func is None:
+ raise ValueError(f"{name} not found")
+
+ # Process parameters
+ parameters = func.__annotations__
+ variables = {}
+ for param_name, param_type in parameters.items():
+ if param_name in ["return", "kwargs"]:
+ continue
+
+ variables[param_name] = {
+ "type": param_type.__name__,
+ "default": parameters[param_name].__repr_args__()[0][1],
+ # Op
+ "placeholder": "",
+ }
+
+ # Get the base classes of the return type
+ return_type = parameters.get("return")
+ base_classes = get_base_classes(return_type) if return_type else []
+ if add_function:
+ base_classes.append("function")
+
+ # Get the function's docstring
+ docs = inspect.getdoc(func) or ""
+
+ return {
+ "template": format_dict(variables, name),
+ "description": docs["Description"], # type: ignore
+ "base_classes": base_classes,
+ }
+
+
+def build_template_from_function(
+ name: str, type_to_loader_dict: Dict, add_function: bool = False
+):
classes = [
item.__annotations__["return"].__name__ for item in type_to_loader_dict.values()
]
@@ -27,52 +67,8 @@ def build_template_from_function(name: str, type_to_loader_dict: Dict):
if v.__annotations__["return"].__name__ == name:
_class = v.__annotations__["return"]
- docs = get_class_doc(_class)
-
- variables = {"_type": _type}
- for class_field_items, value in _class.__fields__.items():
- if class_field_items in ["callback_manager", "requests_wrapper"]:
- continue
- variables[class_field_items] = {}
- for name_, value_ in value.__repr_args__():
- if name_ == "default_factory":
- try:
- variables[class_field_items][
- "default"
- ] = get_default_factory(
- module=_class.__base__.__module__, function=value_
- )
- except Exception:
- variables[class_field_items]["default"] = None
- elif name_ not in ["name"]:
- variables[class_field_items][name_] = value_
-
- variables[class_field_items]["placeholder"] = (
- docs["Attributes"][class_field_items]
- if class_field_items in docs["Attributes"]
- else ""
- )
-
- return {
- "template": format_dict(variables, name),
- "description": docs["Description"],
- "base_classes": get_base_classes(_class),
- }
-
-
-def build_template_from_class(name: str, type_to_cls_dict: Dict):
- classes = [item.__name__ for item in type_to_cls_dict.values()]
-
- # Raise error if name is not in chains
- if name not in classes:
- raise ValueError(f"{name} not found.")
-
- for _type, v in type_to_cls_dict.items():
- if v.__name__ == name:
- _class = v
-
# Get the docstring
- docs = get_class_doc(_class)
+ docs = parse(_class.__doc__)
variables = {"_type": _type}
for class_field_items, value in _class.__fields__.items():
@@ -93,30 +89,97 @@ def build_template_from_class(name: str, type_to_cls_dict: Dict):
variables[class_field_items][name_] = value_
variables[class_field_items]["placeholder"] = (
- docs["Attributes"][class_field_items]
- if class_field_items in docs["Attributes"]
+ docs.params[class_field_items]
+ if class_field_items in docs.params
else ""
)
+ # Adding function to base classes to allow
+ # the output to be a function
+ base_classes = get_base_classes(_class)
+ if add_function:
+ base_classes.append("function")
return {
"template": format_dict(variables, name),
- "description": docs["Description"],
- "base_classes": get_base_classes(_class),
+ "description": docs.short_description or "",
+ "base_classes": base_classes,
+ }
+
+
+def build_template_from_class(
+ name: str, type_to_cls_dict: Dict, add_function: bool = False
+):
+ classes = [item.__name__ for item in type_to_cls_dict.values()]
+
+ # Raise error if name is not in chains
+ if name not in classes:
+ raise ValueError(f"{name} not found.")
+
+ for _type, v in type_to_cls_dict.items():
+ if v.__name__ == name:
+ _class = v
+
+ # Get the docstring
+ docs = parse(_class.__doc__)
+
+ variables = {"_type": _type}
+
+ if "__fields__" in _class.__dict__:
+ for class_field_items, value in _class.__fields__.items():
+ if class_field_items in ["callback_manager"]:
+ continue
+ variables[class_field_items] = {}
+ for name_, value_ in value.__repr_args__():
+ if name_ == "default_factory":
+ try:
+ variables[class_field_items][
+ "default"
+ ] = get_default_factory(
+ module=_class.__base__.__module__, function=value_
+ )
+ except Exception:
+ variables[class_field_items]["default"] = None
+ elif name_ not in ["name"]:
+ variables[class_field_items][name_] = value_
+
+ variables[class_field_items]["placeholder"] = (
+ docs.params[class_field_items]
+ if class_field_items in docs.params
+ else ""
+ )
+ base_classes = get_base_classes(_class)
+ # Adding function to base classes to allow
+ # the output to be a function
+ if add_function:
+ base_classes.append("function")
+ return {
+ "template": format_dict(variables, name),
+ "description": docs.short_description or "",
+ "base_classes": base_classes,
}
def get_base_classes(cls):
- bases = cls.__bases__
- if not bases:
- return []
- else:
+ """Get the base classes of a class.
+ These are used to determine the output of the nodes.
+ """
+ if bases := cls.__bases__:
result = []
for base in bases:
if any(type in base.__module__ for type in ["pydantic", "abc"]):
continue
result.append(base.__name__)
- result.extend(get_base_classes(base))
- return result
+ base_classes = get_base_classes(base)
+ # check if the base_classes are in the result
+ # if not, add them
+ for base_class in base_classes:
+ if base_class not in result:
+ result.append(base_class)
+ else:
+ result = [cls.__name__]
+ if not result:
+ result = [cls.__name__]
+ return list(set(result + [cls.__name__]))
def get_default_factory(module: str, function: str):
@@ -128,114 +191,6 @@ def get_default_factory(module: str, function: str):
return None
-def get_tools_dict(name: Optional[str] = None):
- """Get the tools dictionary."""
- tools = {
- **_BASE_TOOLS,
- **_LLM_TOOLS, # type: ignore
- **{k: v[0] for k, v in _EXTRA_LLM_TOOLS.items()}, # type: ignore
- **{k: v[0] for k, v in _EXTRA_OPTIONAL_TOOLS.items()},
- }
- return tools[name] if name else tools
-
-
-def get_tool_params(func, **kwargs):
- # Parse the function code into an abstract syntax tree
- tree = ast.parse(inspect.getsource(func))
-
- # Iterate over the statements in the abstract syntax tree
- for node in ast.walk(tree):
- # Find the first return statement
- if isinstance(node, ast.Return):
- tool = node.value
- if isinstance(tool, ast.Call):
- if tool.func.id == "Tool":
- if tool.keywords:
- tool_params = {}
- for keyword in tool.keywords:
- if keyword.arg == "name":
- tool_params["name"] = ast.literal_eval(keyword.value)
- elif keyword.arg == "description":
- tool_params["description"] = ast.literal_eval(
- keyword.value
- )
- return tool_params
- return {
- "name": ast.literal_eval(tool.args[0]),
- "description": ast.literal_eval(tool.args[2]),
- }
- else:
- # get the class object from the return statement
- try:
- class_obj = eval(
- compile(ast.Expression(tool), "", "eval")
- )
- except Exception:
- return None
-
- return {
- "name": getattr(class_obj, "name"),
- "description": getattr(class_obj, "description"),
- }
-
- # Return None if no return statement was found
- return None
-
-
-def get_class_doc(class_name):
- """
- Extracts information from the docstring of a given class.
-
- Args:
- class_name: the class to extract information from
-
- Returns:
- A dictionary containing the extracted information, with keys
- for 'Description', 'Parameters', 'Attributes', and 'Returns'.
- """
- # Template
- data = {
- "Description": "",
- "Parameters": {},
- "Attributes": {},
- "Example": [],
- "Returns": {},
- }
-
- # Get the class docstring
- docstring = class_name.__doc__
-
- if not docstring:
- return data
-
- # Parse the docstring to extract information
- lines = docstring.split("\n")
-
- current_section = "Description"
-
- for line in lines:
- line = line.strip()
-
- if not line:
- continue
-
- if (
- line.startswith(tuple(data.keys()))
- and len(line.split()) == 1
- and line.endswith(":")
- ):
- current_section = line[:-1]
- continue
-
- if current_section in ["Description", "Example"]:
- data[current_section] += line
- else:
- param, desc = line.split(":")
- data[current_section][param.strip()] = desc.strip()
-
- return data
-
-
def format_dict(d, name: Optional[str] = None):
"""
Formats a dictionary by removing certain keys and modifying the
@@ -272,41 +227,58 @@ def format_dict(d, name: Optional[str] = None):
_type = _type.replace("Mapping", "dict")
# Change type from str to Tool
- value["type"] = "Tool" if key == "allowed_tools" else _type
+ value["type"] = "Tool" if key in ["allowed_tools"] else _type
+
+ value["type"] = "int" if key in ["max_value_length"] else value["type"]
# Show or not field
value["show"] = bool(
(value["required"] and key not in ["input_variables"])
- or key
- in [
- "allowed_tools",
- "memory",
- "prefix",
- "examples",
- "temperature",
- "model_name",
- ]
+ or key in FORCE_SHOW_FIELDS
or "api_key" in key
)
# Add password field
value["password"] = any(
- text in key for text in ["password", "token", "api", "key"]
+ text in key.lower() for text in ["password", "token", "api", "key"]
)
# Add multline
- value["multiline"] = key in ["suffix", "prefix", "template", "examples"]
+ value["multiline"] = key in [
+ "suffix",
+ "prefix",
+ "template",
+ "examples",
+ "code",
+ "headers",
+ ]
+
+ # Replace dict type with str
+ if "dict" in value["type"].lower():
+ value["type"] = "code"
+
+ if key == "dict_":
+ value["type"] = "file"
+ value["suffixes"] = [".json", ".yaml", ".yml"]
+ value["fileTypes"] = ["json", "yaml", "yml"]
# Replace default value with actual value
if "default" in value:
value["value"] = value["default"]
value.pop("default")
+ if key == "headers":
+ value[
+ "value"
+ ] = """{'Authorization':
+ 'Bearer '}"""
# Add options to openai
if name == "OpenAI" and key == "model_name":
value["options"] = constants.OPENAI_MODELS
- elif name == "OpenAIChat" and key == "model_name":
+ value["list"] = True
+ elif name == "ChatOpenAI" and key == "model_name":
value["options"] = constants.CHAT_OPENAI_MODELS
+ value["list"] = True
return d
diff --git a/src/backend/langflow/utils/validate.py b/src/backend/langflow/utils/validate.py
new file mode 100644
index 000000000..d1353bd77
--- /dev/null
+++ b/src/backend/langflow/utils/validate.py
@@ -0,0 +1,173 @@
+import ast
+import importlib
+import types
+from typing import Dict
+
+
+def add_type_ignores():
+ if not hasattr(ast, "TypeIgnore"):
+
+ class TypeIgnore(ast.AST):
+ _fields = ()
+
+ ast.TypeIgnore = TypeIgnore
+
+
+def validate_code(code):
+ # Initialize the errors dictionary
+ errors = {"imports": {"errors": []}, "function": {"errors": []}}
+
+ # Parse the code string into an abstract syntax tree (AST)
+ try:
+ tree = ast.parse(code)
+ except Exception as e:
+ errors["function"]["errors"].append(str(e))
+ return errors
+
+ # Add a dummy type_ignores field to the AST
+ add_type_ignores()
+ tree.type_ignores = []
+
+ # Evaluate the import statements
+ for node in tree.body:
+ if isinstance(node, ast.Import):
+ for alias in node.names:
+ try:
+ importlib.import_module(alias.name)
+ except ModuleNotFoundError as e:
+ errors["imports"]["errors"].append(str(e))
+
+ # Evaluate the function definition
+ for node in tree.body:
+ if isinstance(node, ast.FunctionDef):
+ code_obj = compile(
+ ast.Module(body=[node], type_ignores=[]), "", "exec"
+ )
+ try:
+ exec(code_obj)
+ except Exception as e:
+ errors["function"]["errors"].append(str(e))
+
+ # Return the errors dictionary
+ return errors
+
+
+def eval_function(function_string: str):
+ # Create an empty dictionary to serve as a separate namespace
+ namespace: Dict = {}
+
+ # Execute the code string in the new namespace
+ exec(function_string, namespace)
+ function_object = next(
+ (
+ obj
+ for name, obj in namespace.items()
+ if isinstance(obj, types.FunctionType)
+ and obj.__code__.co_filename == ""
+ ),
+ None,
+ )
+ if function_object is None:
+ raise ValueError("Function string does not contain a function")
+ return function_object
+
+
+def execute_function(code, function_name, *args, **kwargs):
+ add_type_ignores()
+
+ module = ast.parse(code)
+ exec_globals = globals().copy()
+
+ for node in module.body:
+ if isinstance(node, ast.Import):
+ for alias in node.names:
+ try:
+ exec(
+ f"{alias.asname or alias.name} = importlib.import_module('{alias.name}')",
+ exec_globals,
+ locals(),
+ )
+ exec_globals[alias.asname or alias.name] = importlib.import_module(
+ alias.name
+ )
+ except ModuleNotFoundError as e:
+ raise ModuleNotFoundError(
+ f"Module {alias.name} not found. Please install it and try again."
+ ) from e
+
+ function_code = next(
+ node
+ for node in module.body
+ if isinstance(node, ast.FunctionDef) and node.name == function_name
+ )
+ function_code.parent = None
+ code_obj = compile(
+ ast.Module(body=[function_code], type_ignores=[]), "", "exec"
+ )
+ try:
+ exec(code_obj, exec_globals, locals())
+ except Exception as exc:
+ raise ValueError("Function string does not contain a function") from exc
+
+ # Add the function to the exec_globals dictionary
+ exec_globals[function_name] = locals()[function_name]
+
+ return exec_globals[function_name](*args, **kwargs)
+
+
+def create_function(code, function_name):
+ if not hasattr(ast, "TypeIgnore"):
+
+ class TypeIgnore(ast.AST):
+ _fields = ()
+
+ ast.TypeIgnore = TypeIgnore
+
+ module = ast.parse(code)
+ exec_globals = globals().copy()
+
+ for node in module.body:
+ if isinstance(node, ast.Import):
+ for alias in node.names:
+ try:
+ exec_globals[alias.asname or alias.name] = importlib.import_module(
+ alias.name
+ )
+ except ModuleNotFoundError as e:
+ raise ModuleNotFoundError(
+ f"Module {alias.name} not found. Please install it and try again."
+ ) from e
+
+ function_code = next(
+ node
+ for node in module.body
+ if isinstance(node, ast.FunctionDef) and node.name == function_name
+ )
+ function_code.parent = None
+ code_obj = compile(
+ ast.Module(body=[function_code], type_ignores=[]), "", "exec"
+ )
+ try:
+ exec(code_obj, exec_globals, locals())
+ except Exception:
+ pass
+
+ exec_globals[function_name] = locals()[function_name]
+
+ # Return a function that imports necessary modules and calls the target function
+ def wrapped_function(*args, **kwargs):
+ for module_name, module in exec_globals.items():
+ if isinstance(module, type(importlib)):
+ globals()[module_name] = module
+
+ return exec_globals[function_name](*args, **kwargs)
+
+ return wrapped_function
+
+
+def extract_function_name(code):
+ module = ast.parse(code)
+ for node in module.body:
+ if isinstance(node, ast.FunctionDef):
+ return node.name
+ raise ValueError("No function definition found in the code string")
diff --git a/src/frontend/package-lock.json b/src/frontend/package-lock.json
index f0a40a452..35d27c5f4 100644
--- a/src/frontend/package-lock.json
+++ b/src/frontend/package-lock.json
@@ -14,6 +14,7 @@
"@heroicons/react": "^2.0.15",
"@mui/material": "^5.11.9",
"@tailwindcss/forms": "^0.5.3",
+ "@tailwindcss/line-clamp": "^0.4.4",
"@testing-library/jest-dom": "^5.16.5",
"@testing-library/react": "^13.4.0",
"@testing-library/user-event": "^13.5.0",
@@ -21,9 +22,12 @@
"@types/node": "^16.18.12",
"@types/react": "^18.0.27",
"@types/react-dom": "^18.0.10",
+ "ace-builds": "^1.16.0",
+ "ansi-to-html": "^0.7.2",
"axios": "^1.3.2",
"lodash": "^4.17.21",
"react": "^18.2.0",
+ "react-ace": "^10.1.0",
"react-cookie": "^4.1.1",
"react-dom": "^18.2.0",
"react-error-boundary": "^4.0.2",
@@ -3927,6 +3931,14 @@
"tailwindcss": ">=3.0.0 || >= 3.0.0-alpha.1"
}
},
+ "node_modules/@tailwindcss/line-clamp": {
+ "version": "0.4.4",
+ "resolved": "https://registry.npmjs.org/@tailwindcss/line-clamp/-/line-clamp-0.4.4.tgz",
+ "integrity": "sha512-5U6SY5z8N42VtrCrKlsTAA35gy2VSyYtHWCsg1H87NU1SXnEfekTVlrga9fzUDrrHcGi2Lb5KenUWb4lRQT5/g==",
+ "peerDependencies": {
+ "tailwindcss": ">=2.0.0 || >=3.0.0 || >=3.0.0-alpha.1"
+ }
+ },
"node_modules/@testing-library/dom": {
"version": "8.20.0",
"resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-8.20.0.tgz",
@@ -5098,6 +5110,11 @@
"node": ">= 0.6"
}
},
+ "node_modules/ace-builds": {
+ "version": "1.16.0",
+ "resolved": "https://registry.npmjs.org/ace-builds/-/ace-builds-1.16.0.tgz",
+ "integrity": "sha512-EriMhoxdfhh0zKm7icSt8EXekODAOVsYh9fpnlru9ALwf0Iw7J7bpuqLjhi3QRxvVKR7P0teQdJwTvjVMcYHuw=="
+ },
"node_modules/acorn": {
"version": "8.8.2",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz",
@@ -5308,6 +5325,20 @@
"node": ">=4"
}
},
+ "node_modules/ansi-to-html": {
+ "version": "0.7.2",
+ "resolved": "https://registry.npmjs.org/ansi-to-html/-/ansi-to-html-0.7.2.tgz",
+ "integrity": "sha512-v6MqmEpNlxF+POuyhKkidusCHWWkaLcGRURzivcU3I9tv7k4JVhFcnukrM5Rlk2rUywdZuzYAZ+kbZqWCnfN3g==",
+ "dependencies": {
+ "entities": "^2.2.0"
+ },
+ "bin": {
+ "ansi-to-html": "bin/ansi-to-html"
+ },
+ "engines": {
+ "node": ">=8.0.0"
+ }
+ },
"node_modules/anymatch": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz",
@@ -7205,6 +7236,11 @@
"resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz",
"integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw=="
},
+ "node_modules/diff-match-patch": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/diff-match-patch/-/diff-match-patch-1.0.5.tgz",
+ "integrity": "sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw=="
+ },
"node_modules/diff-sequences": {
"version": "27.5.1",
"resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.5.1.tgz",
@@ -12409,6 +12445,16 @@
"resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz",
"integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow=="
},
+ "node_modules/lodash.get": {
+ "version": "4.4.2",
+ "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz",
+ "integrity": "sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ=="
+ },
+ "node_modules/lodash.isequal": {
+ "version": "4.5.0",
+ "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz",
+ "integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ=="
+ },
"node_modules/lodash.memoize": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz",
@@ -14803,6 +14849,22 @@
"node": ">=0.10.0"
}
},
+ "node_modules/react-ace": {
+ "version": "10.1.0",
+ "resolved": "https://registry.npmjs.org/react-ace/-/react-ace-10.1.0.tgz",
+ "integrity": "sha512-VkvUjZNhdYTuKOKQpMIZi7uzZZVgzCjM7cLYu6F64V0mejY8a2XTyPUIMszC6A4trbeMIHbK5fYFcT/wkP/8VA==",
+ "dependencies": {
+ "ace-builds": "^1.4.14",
+ "diff-match-patch": "^1.0.5",
+ "lodash.get": "^4.4.2",
+ "lodash.isequal": "^4.5.0",
+ "prop-types": "^15.7.2"
+ },
+ "peerDependencies": {
+ "react": "^0.13.0 || ^0.14.0 || ^15.0.1 || ^16.0.0 || ^17.0.0 || ^18.0.0",
+ "react-dom": "^0.13.0 || ^0.14.0 || ^15.0.1 || ^16.0.0 || ^17.0.0 || ^18.0.0"
+ }
+ },
"node_modules/react-app-polyfill": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/react-app-polyfill/-/react-app-polyfill-3.0.0.tgz",
diff --git a/src/frontend/package.json b/src/frontend/package.json
index 312e52051..6eba117fc 100644
--- a/src/frontend/package.json
+++ b/src/frontend/package.json
@@ -9,6 +9,7 @@
"@heroicons/react": "^2.0.15",
"@mui/material": "^5.11.9",
"@tailwindcss/forms": "^0.5.3",
+ "@tailwindcss/line-clamp": "^0.4.4",
"@testing-library/jest-dom": "^5.16.5",
"@testing-library/react": "^13.4.0",
"@testing-library/user-event": "^13.5.0",
@@ -16,9 +17,12 @@
"@types/node": "^16.18.12",
"@types/react": "^18.0.27",
"@types/react-dom": "^18.0.10",
+ "ace-builds": "^1.16.0",
+ "ansi-to-html": "^0.7.2",
"axios": "^1.3.2",
"lodash": "^4.17.21",
"react": "^18.2.0",
+ "react-ace": "^10.1.0",
"react-cookie": "^4.1.1",
"react-dom": "^18.2.0",
"react-error-boundary": "^4.0.2",
diff --git a/src/frontend/public/favicon.ico b/src/frontend/public/favicon.ico
new file mode 100644
index 000000000..0612ba9b3
Binary files /dev/null and b/src/frontend/public/favicon.ico differ
diff --git a/src/frontend/public/index.html b/src/frontend/public/index.html
index 57757fb21..b2e9c4b82 100644
--- a/src/frontend/public/index.html
+++ b/src/frontend/public/index.html
@@ -4,7 +4,8 @@
- LangFLow
+
+ LangFlow
diff --git a/src/frontend/src/App.css b/src/frontend/src/App.css
index 60ff72aba..319e2ba39 100644
--- a/src/frontend/src/App.css
+++ b/src/frontend/src/App.css
@@ -40,3 +40,8 @@
transform: rotate(360deg);
}
}
+
+@font-face{
+ font-family: text-security-disc;
+ src: url("assets/text-security-disc.woff") format("woff");
+}
\ No newline at end of file
diff --git a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx
index c1a6d5f5f..1a2eeb9e6 100644
--- a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx
+++ b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx
@@ -10,6 +10,11 @@ import { typesContext } from "../../../../contexts/typesContext";
import { ParameterComponentType } from "../../../../types/components";
import FloatComponent from "../../../../components/floatComponent";
import Dropdown from "../../../../components/dropdownComponent";
+import CodeAreaComponent from "../../../../components/codeAreaComponent";
+import InputFileComponent from "../../../../components/inputFileComponent";
+import { TabsContext } from "../../../../contexts/tabsContext";
+import IntComponent from "../../../../components/intComponent";
+import PromptAreaComponent from "../../../../components/promptComponent";
export default function ParameterComponent({
left,
@@ -42,6 +47,7 @@ export default function ParameterComponent({
const { reactFlowInstance } = useContext(typesContext);
let disabled =
reactFlowInstance?.getEdges().some((e) => e.targetHandle === id) ?? false;
+ const { save } = useContext(TabsContext);
return (