diff --git a/.githooks/pre-commit b/.githooks/pre-commit
old mode 100644
new mode 100755
diff --git a/.gitignore b/.gitignore
index b730d67bc..ee7c1a707 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,5 @@
+# This is to avoid Opencommit hook from getting pushed
+prepare-commit-msg
# Logs
logs
*.log
@@ -242,4 +244,5 @@ dmypy.json
# Poetry
.testenv/*
langflow.db
+.githooks/prepare-commit-msg
langchain.db
\ No newline at end of file
diff --git a/Makefile b/Makefile
index bfed91f37..79e27833e 100644
--- a/Makefile
+++ b/Makefile
@@ -5,6 +5,8 @@ all: help
init:
@echo 'Installing pre-commit hooks'
git config core.hooksPath .githooks
+ @echo 'Making pre-commit hook executable'
+ chmod +x .githooks/pre-commit
@echo 'Installing backend dependencies'
make install_backend
@echo 'Installing frontend dependencies'
diff --git a/README.md b/README.md
index 647210d38..b07ec482d 100644
--- a/README.md
+++ b/README.md
@@ -31,12 +31,13 @@
- [Table of Contents](#table-of-contents)
- [📦 Installation](#-installation)
- [Locally](#locally)
+ - [HuggingFace Spaces](#huggingface-spaces)
- [🖥️ Command Line Interface (CLI)](#️-command-line-interface-cli)
- [Usage](#usage)
- [Environment Variables](#environment-variables)
- [Deployment](#deployment)
- - [Deploy Langflow on Google Cloud Platform](#deploy-langflow-on-google-cloud-platform)
- - [Deploy Langflow on Jina AI Cloud](#deploy-langflow-on-jina-ai-cloud)
+ - [Deploy Langflow on Google Cloud Platform](#deploy-langflow-on-google-cloud-platform)
+ - [Deploy Langflow on Jina AI Cloud](#deploy-langflow-on-jina-ai-cloud)
- [API Usage](#api-usage)
- [🎨 Creating Flows](#-creating-flows)
- [👋 Contributing](#-contributing)
@@ -61,6 +62,8 @@ or
langflow # or langflow --help
```
+### HuggingFace Spaces
+You can also check it out on [HuggingFace Spaces](https://huggingface.co/spaces/Logspace/Langflow) and run it in your browser! You can even clone it and have your own copy of Langflow to play with.
# 🖥️ Command Line Interface (CLI)
@@ -103,7 +106,7 @@ A sample `.env` file named `.env.example` is included with the project. Copy thi
# Deployment
-### Deploy Langflow on Google Cloud Platform
+## Deploy Langflow on Google Cloud Platform
Follow our step-by-step guide to deploy Langflow on Google Cloud Platform (GCP) using Google Cloud Shell. The guide is available in the [**Langflow in Google Cloud Platform**](GCP_DEPLOYMENT.md) document.
@@ -112,7 +115,7 @@ Alternatively, click the **"Open in Cloud Shell"** button below to launch Google
[](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/logspace-ai/langflow&working_dir=scripts&shellonly=true&tutorial=walkthroughtutorial_spot.md)
-### Deploy Langflow on [Jina AI Cloud](https://github.com/jina-ai/langchain-serve)
+## Deploy Langflow on [Jina AI Cloud](https://github.com/jina-ai/langchain-serve)
Langflow integrates with langchain-serve to provide a one-command deployment to Jina AI Cloud.
@@ -219,6 +222,13 @@ print(run_flow("Your message", flow_id=FLOW_ID, tweaks=TWEAKS))
> Read more about resource customization, cost, and management of Langflow apps on Jina AI Cloud in the **[langchain-serve](https://github.com/jina-ai/langchain-serve)** repository.
+## Deploy on Railway
+[](https://railway.app/template/Emy2sU?referralCode=MnPSdg)
+
+## Deploy on Render
+
+
+
# 🎨 Creating Flows
diff --git a/poetry.lock b/poetry.lock
index dd4e1116e..2da880491 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -150,23 +150,23 @@ files = [
[[package]]
name = "anthropic"
-version = "0.3.2"
+version = "0.3.4"
description = "Client library for the anthropic API"
category = "main"
optional = false
python-versions = ">=3.7,<4.0"
files = [
- {file = "anthropic-0.3.2-py3-none-any.whl", hash = "sha256:43ad86df406bf91419e3c651e20dcc69ae273c932c92c26973a1621a72ff1d86"},
- {file = "anthropic-0.3.2.tar.gz", hash = "sha256:f968e970bb0dfa38b1ec59db7bb4162fd1e0f2bef95c3203e926effe62bfcf38"},
+ {file = "anthropic-0.3.4-py3-none-any.whl", hash = "sha256:7b0396f663b0e4eaaf485ae59a0be014cddfc0f0b8f4dad79bb35d8f28439097"},
+ {file = "anthropic-0.3.4.tar.gz", hash = "sha256:36184840bd33184697666d4f1ec951d78ef5da22e87d936cd3c04b611d84e93c"},
]
[package.dependencies]
-anyio = ">=3.5.0"
-distro = ">=1.7.0"
-httpx = ">=0.23.0"
+anyio = ">=3.5.0,<4"
+distro = ">=1.7.0,<2"
+httpx = ">=0.23.0,<1"
pydantic = ">=1.9.0,<2.0.0"
tokenizers = ">=0.13.0"
-typing-extensions = ">=4.1.1"
+typing-extensions = ">=4.1.1,<5"
[[package]]
name = "anyio"
@@ -353,37 +353,34 @@ lxml = ["lxml"]
[[package]]
name = "black"
-version = "23.3.0"
+version = "23.7.0"
description = "The uncompromising code formatter."
category = "dev"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"},
- {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"},
- {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"},
- {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"},
- {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"},
- {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"},
- {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"},
- {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"},
- {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"},
- {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"},
- {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"},
- {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"},
- {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"},
- {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"},
- {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"},
- {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"},
- {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"},
- {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"},
- {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"},
- {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"},
- {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"},
- {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"},
- {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"},
- {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"},
- {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"},
+ {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"},
+ {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"},
+ {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"},
+ {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"},
+ {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"},
+ {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"},
+ {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"},
+ {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"},
+ {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"},
+ {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"},
+ {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"},
+ {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"},
+ {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"},
+ {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"},
+ {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"},
+ {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"},
+ {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"},
+ {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"},
+ {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"},
+ {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"},
+ {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"},
+ {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"},
]
[package.dependencies]
@@ -535,87 +532,87 @@ files = [
[[package]]
name = "charset-normalizer"
-version = "3.1.0"
+version = "3.2.0"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "main"
optional = false
python-versions = ">=3.7.0"
files = [
- {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"},
- {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"},
- {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"},
- {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"},
- {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"},
- {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"},
- {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"},
+ {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"},
+ {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"},
+ {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"},
]
[[package]]
@@ -650,14 +647,14 @@ uvicorn = {version = ">=0.18.3", extras = ["standard"]}
[[package]]
name = "click"
-version = "8.1.3"
+version = "8.1.4"
description = "Composable command line interface toolkit"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
- {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
+ {file = "click-8.1.4-py3-none-any.whl", hash = "sha256:2739815aaa5d2c986a88f1e9230c55e17f0caad3d958a5e13ad0797c166db9e3"},
+ {file = "click-8.1.4.tar.gz", hash = "sha256:b97d0c74955da062a7d4ef92fadb583806a585b2ea81958a81bd72726cbb8e37"},
]
[package.dependencies]
@@ -680,81 +677,82 @@ click = "*"
[[package]]
name = "clickhouse-connect"
-version = "0.6.4"
+version = "0.6.6"
description = "ClickHouse Database Core Driver for Python, Pandas, and Superset"
category = "main"
optional = false
python-versions = "~=3.7"
files = [
- {file = "clickhouse-connect-0.6.4.tar.gz", hash = "sha256:0afe555e7a20df2e06341d00935b4298b6a5a1eabee3db43a897719a9bf7f047"},
- {file = "clickhouse_connect-0.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:387f4c3bc4a988ba6b233de642bc849718fc6e142130f3ff62529b7b093e4242"},
- {file = "clickhouse_connect-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3483a9945ecfbab9d498422d7a0e7e600c3c7e2e7a6178852e355fbda9871ac6"},
- {file = "clickhouse_connect-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db39119dc1905487a80be8b9a8505b45dae98f39d8f2ebfa355f9489d6a9958d"},
- {file = "clickhouse_connect-0.6.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:025438e7420d8f610d34e5743c17c273fda74a72741c561767896632f896709a"},
- {file = "clickhouse_connect-0.6.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29ffc92bea0c8ed2eb9e45a79bb708816f9bb5041c23fca0a44b4c73a79d9d53"},
- {file = "clickhouse_connect-0.6.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:040602c1be63c6bdbd9c5b03218c3aca60ce33ee22871b56f810671665e31d27"},
- {file = "clickhouse_connect-0.6.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:50b037c41453dcb7a47160dca8f3c05f4817e49d5d4ed01aace2c619c0109cfd"},
- {file = "clickhouse_connect-0.6.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ca5da538f50771004e931508f0d84680299df35576a05665edd39400b6d1d486"},
- {file = "clickhouse_connect-0.6.4-cp310-cp310-win32.whl", hash = "sha256:2286687bcff686c68df349686694557f3142e2792506ef0ca41664ee54b48122"},
- {file = "clickhouse_connect-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:49fd9b6fa03025b3e04d6ddc3c3443e4383f44b63dcf551d8fddd8a149c06993"},
- {file = "clickhouse_connect-0.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:82fb4120968fb007408e41b6e799a389e1e5f94144362dd25640c89633424295"},
- {file = "clickhouse_connect-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3b6a4e0f5c84b516142353feb315a7c93fee2fe732536cc1f0e3f994001d6771"},
- {file = "clickhouse_connect-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04c77676f0afcb45f8443e051849d9d34d88d7925adcb2d14a5320188e3d9ad3"},
- {file = "clickhouse_connect-0.6.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbc27ca039da6260f749b81740e3ecff4f6d251f39f3c507510a4bd06455b49a"},
- {file = "clickhouse_connect-0.6.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec2f5ab36dea0037d9cc783b80c4994f176b38aed419c32f6ac7168be76e7667"},
- {file = "clickhouse_connect-0.6.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:167f87309d0384ca290bb5891293e9be1f89fa2557b7642a9d3cbeeb423271cc"},
- {file = "clickhouse_connect-0.6.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9e3b422e64661820d47f98eebe7e27d26082c6eba82a83d82e1682d33e6b92b0"},
- {file = "clickhouse_connect-0.6.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fbc6b3a39dd0d55e0723f0394b8a6608ebd864c0e4775e40a65acbd13fbb07cc"},
- {file = "clickhouse_connect-0.6.4-cp311-cp311-win32.whl", hash = "sha256:744bb4e40834b026f7422b990f5e1c9dc0cb3c9b6da9d79e9479edd53dd873fb"},
- {file = "clickhouse_connect-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:cde51f67054fd465925928fa3eb40a23ee691057c55ae58ccf8fae6903abcbe7"},
- {file = "clickhouse_connect-0.6.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:76d4e4c388f6ca1bc310f6d40791ee84f7ddba8e06d8f737d21669a6e4f58f0f"},
- {file = "clickhouse_connect-0.6.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:785b6386ed1a43912db2badd2f1f480fe4817e87f8b88296c335243288aa1077"},
- {file = "clickhouse_connect-0.6.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66a4baa63a073c843a86fbff64b4c500f636bf8978beddf3c0181491d57d5c8a"},
- {file = "clickhouse_connect-0.6.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:896d9c812e7429581ee99e920bd0064c004b2cce258a3548c124ad95b2ac46d8"},
- {file = "clickhouse_connect-0.6.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c236b8db0a382f4998a564b47504727c815d276c9527f6ea43128c323742f6f5"},
- {file = "clickhouse_connect-0.6.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8dfa63f063a3c75236e499ea8582b1d3d4d56180dad316cc3644967c03db4f36"},
- {file = "clickhouse_connect-0.6.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:aa46ca9c76bef313e67b9eae0774517b76d2e22bfe4df7e092bad3838b82dee8"},
- {file = "clickhouse_connect-0.6.4-cp37-cp37m-win32.whl", hash = "sha256:c20f83955356302250e1830408654cf665ca9101794621dd67301529540715e1"},
- {file = "clickhouse_connect-0.6.4-cp37-cp37m-win_amd64.whl", hash = "sha256:10193393835a28a7211bb16daeb1a3d98e4dd9eba649279faad68de328d79136"},
- {file = "clickhouse_connect-0.6.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b3ec2ea59da24177f8128ce75421cf498d8d647006c2134f388f4437f9171149"},
- {file = "clickhouse_connect-0.6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ddf71c7014b5dc2ec08e5301892b8025eb254a063f5a339ec9c3f956a3e11135"},
- {file = "clickhouse_connect-0.6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1d00b581608440f8356a3f51a25dbc00526108126811f79b9271f4e0cacc5db"},
- {file = "clickhouse_connect-0.6.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b7fbb81d1b68ddff43cc1b117884030cf28ad8b0668e96703a63c4b1780f26d"},
- {file = "clickhouse_connect-0.6.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9eb0ea6af4a44f6a9b264ba6416ced81de6e250fd1fc6b6903ce0c20b457520"},
- {file = "clickhouse_connect-0.6.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:626d9fbc9cc787afe3c234f2db762571756e0114829e5c36ff0dd2f949720827"},
- {file = "clickhouse_connect-0.6.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:59a947df5301175432ad436022ef74fc8864de7201f438de96772250fd8fb749"},
- {file = "clickhouse_connect-0.6.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:adc0c122a38fae3cd02f76a81be0a7feb2d12d290003a7c3c314525c910bbb04"},
- {file = "clickhouse_connect-0.6.4-cp38-cp38-win32.whl", hash = "sha256:5391a21e7d3c44c49d05fff06384d84f85db64be3a399bb07d8acf043e8caad9"},
- {file = "clickhouse_connect-0.6.4-cp38-cp38-win_amd64.whl", hash = "sha256:abeca87c81965a3e18e62608c98f8436615409c5a3669203b266f2f6f23ee16f"},
- {file = "clickhouse_connect-0.6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:097bfc3da261c9a33df1a0dcb74351b150cb0fed5c570f0dbdf9fb010e820897"},
- {file = "clickhouse_connect-0.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:44cf2ac9ee2b996ef3c2946ad1321d8536fe97b4ddbeeaf2d36f6a1f9d5a53ba"},
- {file = "clickhouse_connect-0.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86c5edb9c6f2e8d2093420747d4f1e5f2d4f901a9cf47c276d400b75e5e07b0e"},
- {file = "clickhouse_connect-0.6.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4eae52f4167beb961c11f462abba49c3d06037cba126c1febc414ee42aab0b23"},
- {file = "clickhouse_connect-0.6.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73999d748089f4cb28917d63446b80ebffb8939dc3728ede86e3a580494ee7a1"},
- {file = "clickhouse_connect-0.6.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e8ad1765c78b5e4e8936b8e6044b3da4f31cd24cf15b6e6f1adca542072abc50"},
- {file = "clickhouse_connect-0.6.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:dfca12b0eb0c4f2e60815abae3f15e38ff5d22c48d89ed8b8914d83dc23f6404"},
- {file = "clickhouse_connect-0.6.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:203e9fc0315373996c32e4f9be3012ee0caed6b92b404653bf7c432318c3107a"},
- {file = "clickhouse_connect-0.6.4-cp39-cp39-win32.whl", hash = "sha256:fac7c375b4644f9866310a11bb13299e0070f38c5a975222b7eb5bc330ef753a"},
- {file = "clickhouse_connect-0.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:2efece8266091df991bb343c5ab1d29ec4e164791b60d4c62f508b1f46086c9a"},
- {file = "clickhouse_connect-0.6.4-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9de809b027e2b1451e117478933e023ee56c48fbc049aef28ef09ab570e0e203"},
- {file = "clickhouse_connect-0.6.4-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b69fb748f4c5fa60eca91f9782be8506d5d2d197ec324a9586f693ae0c1cc94"},
- {file = "clickhouse_connect-0.6.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1edadb0e68fa5d834ab2e4a778f99e803261cd1f0ebc513f60d0f8f5044f7b1a"},
- {file = "clickhouse_connect-0.6.4-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e26d2fa4a86c98a69d0dee057b5d5e0317208971da6ee8ff0765f50b1b267db"},
- {file = "clickhouse_connect-0.6.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:76f63eedbec2e603f93f25022cc821d147339be482221213949ea0f0b5915eb5"},
- {file = "clickhouse_connect-0.6.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a9e6710a7b61e08c8ce64091624af9e37f23804211eebfb647623f33804ccdd"},
- {file = "clickhouse_connect-0.6.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab7266c8019e6fede18cd69161934ffba4a1c1910175300492b6ee1da47785b5"},
- {file = "clickhouse_connect-0.6.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf7df49434c4330dc58d749c8685a76ee441bf1c5776230dbf57952af0409194"},
- {file = "clickhouse_connect-0.6.4-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:549691452128eb3035021b9b7be4f3bdc6c4b9192213b167de2ea3dfae87d01c"},
- {file = "clickhouse_connect-0.6.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:387b82f38b1977c2a38545172838504591ad123d87c09a82758d87e76453beb0"},
- {file = "clickhouse_connect-0.6.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f1ea1a89e6696a9905157b1a65884dd33dc88d7d50e74434c5bd650f8cfe1701"},
- {file = "clickhouse_connect-0.6.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b697e0e97f3e3404cf5b7d515adb8c025ba21083ed6c1dd4b7b1c789a10343bc"},
- {file = "clickhouse_connect-0.6.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22bdad905a6fe6cea576f03cc948b719f44b94bcdc5a00728621d0d3082c724c"},
- {file = "clickhouse_connect-0.6.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52fb56fcc82825b3e4760878ca447b5ffbde0fef60c9048ac8c04b3f40fd773a"},
- {file = "clickhouse_connect-0.6.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2577ab779607839570472f889957ad85e71c27afba975d47f3906665865113c9"},
+ {file = "clickhouse-connect-0.6.6.tar.gz", hash = "sha256:28d261b95fe9818f4d8bc4ad48087cbff3c9f0b6574ff04d234ed5bca6619474"},
+ {file = "clickhouse_connect-0.6.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:31187a9947f5771c9e2a4c5d5c33d8c42f1c0f83b1223277c8faf47da0fcd1dc"},
+ {file = "clickhouse_connect-0.6.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1e1713d1f9f294c0cf05ded6f7eff227dde2b19f0d19423fbbeb05fbf5d7c484"},
+ {file = "clickhouse_connect-0.6.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:961c463de6f0de93fc11f1c1f81efc1ec5b5895481cfdf79b3f832e0e242e7e1"},
+ {file = "clickhouse_connect-0.6.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18900f1a13b3b120252fc3583ca1e0fc4d3a33ea98fcf63d33d168a469561056"},
+ {file = "clickhouse_connect-0.6.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4cbbea1a943e742ea649c82f85109b9a9928e61b038923de2813977966acd76"},
+ {file = "clickhouse_connect-0.6.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2714ab61f063a65419278b97f8785ce2440fdb1ef46d9a6703cef9cd38517521"},
+ {file = "clickhouse_connect-0.6.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:823756569f6bea58ff9286cf494abaca5db8652e33ee4a6e7ecb40efbf945088"},
+ {file = "clickhouse_connect-0.6.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11aff145aacfae92b941b95ec5943fb62ea241ec2225b8ecefc4cadadf699893"},
+ {file = "clickhouse_connect-0.6.6-cp310-cp310-win32.whl", hash = "sha256:4f5f9e3dcece211dc711088a5b264e66e8198b878bdf99619a3a7c54976c118d"},
+ {file = "clickhouse_connect-0.6.6-cp310-cp310-win_amd64.whl", hash = "sha256:8268927ef8d476ef4c81d9562d049f38bc534c4d1d441e072cf8428f08ff6eaa"},
+ {file = "clickhouse_connect-0.6.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5f9cb2ebe0deaa78c942888aad32fa42beb4e75c2377e8784baf3d737c23e5f1"},
+ {file = "clickhouse_connect-0.6.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d502b7f35008facf2774f411eed6b35010923acaac254a8c5683fdf8a11abd62"},
+ {file = "clickhouse_connect-0.6.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87e0f2afe464be0947947d98482eb12b25be8857ae1a31c1aaa17a67f616174d"},
+ {file = "clickhouse_connect-0.6.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69f2c517943eeb7663a9d42bd9b737b8ec5513ddcf58f2372f8b2074a315bae2"},
+ {file = "clickhouse_connect-0.6.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa6c2b488cf9558c2b71a2599d812fe4368d5199edaa011731a8bc7bfe019751"},
+ {file = "clickhouse_connect-0.6.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:df9e80d0b3f5614d38026e7e2e7e7412dec942df8d765c082177879b37e678e2"},
+ {file = "clickhouse_connect-0.6.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a20351fb2ae47aac1ae9b1de0585949616baedd6dbdee5272f466a2aea6ec4dd"},
+ {file = "clickhouse_connect-0.6.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af40eaa20998d96198563748a6fd9796843b6f22e9e95b2136aabd917db33fff"},
+ {file = "clickhouse_connect-0.6.6-cp311-cp311-win32.whl", hash = "sha256:9591a9bfa58ace467544227f83226b22a1554e2db4cfcf658f25f43c9d94e960"},
+ {file = "clickhouse_connect-0.6.6-cp311-cp311-win_amd64.whl", hash = "sha256:3b6f6159f8eddb0cad4d7e0cbad5944e97e0146ee9f416fc663f7bd3d4e9ea46"},
+ {file = "clickhouse_connect-0.6.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8b941c85fe9ddd5e5edf6fc7458563d9e51ad900d95fe0b87b0458be166693a1"},
+ {file = "clickhouse_connect-0.6.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c642696a758fa726c86ca624dd40acded100d79a9f4bd9f5b56ba0ea4dc44099"},
+ {file = "clickhouse_connect-0.6.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57b6b36b316451c1bdc4450f9418c017af84af57d52d03cd4deb85480819a934"},
+ {file = "clickhouse_connect-0.6.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17cfb1d103b47350c3ba824641fb5ba730e6e29274077a6f8975a3394a1abadb"},
+ {file = "clickhouse_connect-0.6.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d52c7e7560666b93c078bf082e4ed87689fd283e6295a6d8d1dd491d4d7b6072"},
+ {file = "clickhouse_connect-0.6.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0a6d498b689aa09e9d1b0051480a04ecc3509002f54bfb82998d030b4675bb24"},
+ {file = "clickhouse_connect-0.6.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:28c876f7a4713662af2ded7350a0262756ec4da9262bb76cc85cfe2e88015b74"},
+ {file = "clickhouse_connect-0.6.6-cp37-cp37m-win32.whl", hash = "sha256:74bf0a95c7c5644948be0ba9c0abcad7615b806fd2545501862526dbe684db71"},
+ {file = "clickhouse_connect-0.6.6-cp37-cp37m-win_amd64.whl", hash = "sha256:0aaa4194d11cb7513de69b791911ff60b3ad8b86f125446a37347208e9b9ae6d"},
+ {file = "clickhouse_connect-0.6.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3b873d138dfedbe761f2d66ad1257ea253394c4f8dcffd6ff34dfb990f13a18b"},
+ {file = "clickhouse_connect-0.6.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7140705d05a05ac39eecf86727ab55985e5dba9d1734df8921cc417853a18b7f"},
+ {file = "clickhouse_connect-0.6.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69085fa0f4e5da5cef4ae5249e19f10d91e57ae78628e49e8853b71b6003dbae"},
+ {file = "clickhouse_connect-0.6.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e6ec081d87cc37be3ecf60b88002c58add76a72b4124525cb5cd28539e7d488"},
+ {file = "clickhouse_connect-0.6.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe04eb239b72bc9fa4f1999cd292f82af507cbe1f07546f26a3332c50a294b"},
+ {file = "clickhouse_connect-0.6.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:244bbf7ad92f1f030378412358c47cd377aa6d469b548dba2406a7894c8da2ab"},
+ {file = "clickhouse_connect-0.6.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:69e91bdb25166b6fa4eb55601d86fa57dee82070bce9b97a858c8973615ab8b8"},
+ {file = "clickhouse_connect-0.6.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d2627c8a9625e1c9058cfb5b231a0d0180ed9215d901b601d367de598f27a90d"},
+ {file = "clickhouse_connect-0.6.6-cp38-cp38-win32.whl", hash = "sha256:87fb937b34b561703eaba5781404736120bab691f4525096d5dfb4b99d4890a6"},
+ {file = "clickhouse_connect-0.6.6-cp38-cp38-win_amd64.whl", hash = "sha256:366c5765e6b7863b3a8d565d5a3b27f9f8731f6f4b016048fa172c6ad6485594"},
+ {file = "clickhouse_connect-0.6.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0c1b0d8bee6399f5b68bb0832fae51fd0f5e4bcb539bae2df36d8433b6e38a0b"},
+ {file = "clickhouse_connect-0.6.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3f7e3ead1429ec82b9cd0cf7b807bacf69d895042f75276f63d732378344376"},
+ {file = "clickhouse_connect-0.6.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36df02ebfbfa4dbe3667bf5b3402ff0193d0f682b9aa09d71469c15745473d8e"},
+ {file = "clickhouse_connect-0.6.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa636b0cbbff52c9fafe287d1d818fc9947feaa840c951b8bfd8f8d4d1ee45a0"},
+ {file = "clickhouse_connect-0.6.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af4968b6b48baae43d62c241bee9e1c8f680ee3d054254e3959c2d2fb7d370ee"},
+ {file = "clickhouse_connect-0.6.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a4156de52fe1f9b19f8c3a820d57c012a55644c56a87c8d31ecff89115959d60"},
+ {file = "clickhouse_connect-0.6.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fccbe34878e6202ff5715284cbe57e748d36f4c8ad6217f9c80f84a086013fb9"},
+ {file = "clickhouse_connect-0.6.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:70bfe48c0e4340ccf234b691fbd52f32db74649cb84ca28b98a211cc3e30b30c"},
+ {file = "clickhouse_connect-0.6.6-cp39-cp39-win32.whl", hash = "sha256:9f80b64e2268293a918721e1c122c54e2a1592bb74824fdd70e9add9fbcea31a"},
+ {file = "clickhouse_connect-0.6.6-cp39-cp39-win_amd64.whl", hash = "sha256:04a5030b76ee930b18eb3aeb7847146c2fa29da0feb0ec7dd3a0564a3de944f1"},
+ {file = "clickhouse_connect-0.6.6-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:75e84c827c8180d5dc66b0e99dba422a3ffd2c7d8ee5ba80e00b9c942dff8a36"},
+ {file = "clickhouse_connect-0.6.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e627061336142d02e9c900a96bcd87372e88f05755bf19b158e68472b99a921"},
+ {file = "clickhouse_connect-0.6.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:194f72e8f3f24c207aa87113b8d11674dab12b35232fd8b7b19b97257796be45"},
+ {file = "clickhouse_connect-0.6.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf755b46089ee6a7f1ab3e24fc6fbacefc54cfefceb0ed81ebf198abf6937dac"},
+ {file = "clickhouse_connect-0.6.6-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:39e58756a13872a24304b1987fafb7d5112ea88469eb55303b1183ebdd7a0be5"},
+ {file = "clickhouse_connect-0.6.6-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1e29de1264ffa26eb822e57c5715974c9818ae8e16bb114e54352d66947cdf7f"},
+ {file = "clickhouse_connect-0.6.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a74ed74427aaf10d2e8f7697b8ec53479f6068287ea695a5f3d3927db40be3c3"},
+ {file = "clickhouse_connect-0.6.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abc910b0f6c93d0d703809fd92cf19b71dcaf8c6d5f328deddae1709061a0aa2"},
+ {file = "clickhouse_connect-0.6.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23b17236e08da8b5d737ccd983db56a2d2222955a49c4b312b12e4a2b4a06c9b"},
+ {file = "clickhouse_connect-0.6.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d4d76560d0ce84d0ba550918433dd1f8da6983edabe2685cd84679cd7a90c179"},
+ {file = "clickhouse_connect-0.6.6-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:815bd0d5f40174716ffdf1adab066cd0e36c82c81b227224fb7281bdf8734eb6"},
+ {file = "clickhouse_connect-0.6.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82abd319ba51e0c5c2d123e2cf30b1604b0d46f4de694096aa911ddd63701f60"},
+ {file = "clickhouse_connect-0.6.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa3eea5dac3a7cd52523b556ecd05940c4710c96b6e39ec5a05ed7859bddc7f6"},
+ {file = "clickhouse_connect-0.6.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bbc28cdf903b4b2805199ce7d4580814a8b9bb4766ddd835cab46a81e6fcd63"},
+ {file = "clickhouse_connect-0.6.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5fc4deda5a97e672135b4330d81109b443266aa948b09a24a02db58c0fc96bc1"},
]
[package.dependencies]
certifi = "*"
+importlib-metadata = "*"
lz4 = "*"
pytz = "*"
urllib3 = ">=1.26"
@@ -769,14 +767,14 @@ sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"]
[[package]]
name = "cohere"
-version = "4.11.2"
+version = "4.12.1"
description = ""
category = "main"
optional = false
python-versions = ">=3.7,<4.0"
files = [
- {file = "cohere-4.11.2-py3-none-any.whl", hash = "sha256:c5032f4a2aafbcfdf1cacd5b49121c8cc4804fbd121d4a7ac0dfea499398ea28"},
- {file = "cohere-4.11.2.tar.gz", hash = "sha256:4d3e663a306e6fcb87c41cded2195257ebc6992d361a70417f6616f045c4ec47"},
+ {file = "cohere-4.12.1-py3-none-any.whl", hash = "sha256:80d17ae928873cdf63883a338618e477de5c71b3d510d7891af7dfdabc25186e"},
+ {file = "cohere-4.12.1.tar.gz", hash = "sha256:2e93a094757576d6c8d42e76363aa7841eb4166c5b0de8e5ed7272783982d2a4"},
]
[package.dependencies]
@@ -913,31 +911,35 @@ toml = ["tomli"]
[[package]]
name = "cryptography"
-version = "41.0.1"
+version = "41.0.2"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:f73bff05db2a3e5974a6fd248af2566134d8981fd7ab012e5dd4ddb1d9a70699"},
- {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:1a5472d40c8f8e91ff7a3d8ac6dfa363d8e3138b961529c996f3e2df0c7a411a"},
- {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fa01527046ca5facdf973eef2535a27fec4cb651e4daec4d043ef63f6ecd4ca"},
- {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b46e37db3cc267b4dea1f56da7346c9727e1209aa98487179ee8ebed09d21e43"},
- {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d198820aba55660b4d74f7b5fd1f17db3aa5eb3e6893b0a41b75e84e4f9e0e4b"},
- {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:948224d76c4b6457349d47c0c98657557f429b4e93057cf5a2f71d603e2fc3a3"},
- {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:059e348f9a3c1950937e1b5d7ba1f8e968508ab181e75fc32b879452f08356db"},
- {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b4ceb5324b998ce2003bc17d519080b4ec8d5b7b70794cbd2836101406a9be31"},
- {file = "cryptography-41.0.1-cp37-abi3-win32.whl", hash = "sha256:8f4ab7021127a9b4323537300a2acfb450124b2def3756f64dc3a3d2160ee4b5"},
- {file = "cryptography-41.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:1fee5aacc7367487b4e22484d3c7e547992ed726d14864ee33c0176ae43b0d7c"},
- {file = "cryptography-41.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9a6c7a3c87d595608a39980ebaa04d5a37f94024c9f24eb7d10262b92f739ddb"},
- {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5d092fdfedaec4cbbffbf98cddc915ba145313a6fdaab83c6e67f4e6c218e6f3"},
- {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a8e6c2de6fbbcc5e14fd27fb24414507cb3333198ea9ab1258d916f00bc3039"},
- {file = "cryptography-41.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cb33ccf15e89f7ed89b235cff9d49e2e62c6c981a6061c9c8bb47ed7951190bc"},
- {file = "cryptography-41.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f0ff6e18d13a3de56f609dd1fd11470918f770c6bd5d00d632076c727d35485"},
- {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7bfc55a5eae8b86a287747053140ba221afc65eb06207bedf6e019b8934b477c"},
- {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:eb8163f5e549a22888c18b0d53d6bb62a20510060a22fd5a995ec8a05268df8a"},
- {file = "cryptography-41.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8dde71c4169ec5ccc1087bb7521d54251c016f126f922ab2dfe6649170a3b8c5"},
- {file = "cryptography-41.0.1.tar.gz", hash = "sha256:d34579085401d3f49762d2f7d6634d6b6c2ae1242202e860f4d26b046e3a1006"},
+ {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711"},
+ {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7"},
+ {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d"},
+ {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f"},
+ {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182"},
+ {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83"},
+ {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5"},
+ {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58"},
+ {file = "cryptography-41.0.2-cp37-abi3-win32.whl", hash = "sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76"},
+ {file = "cryptography-41.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4"},
+ {file = "cryptography-41.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a"},
+ {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd"},
+ {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766"},
+ {file = "cryptography-41.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee"},
+ {file = "cryptography-41.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831"},
+ {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b"},
+ {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa"},
+ {file = "cryptography-41.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e"},
+ {file = "cryptography-41.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14"},
+ {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2"},
+ {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f"},
+ {file = "cryptography-41.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0"},
+ {file = "cryptography-41.0.2.tar.gz", hash = "sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c"},
]
[package.dependencies]
@@ -1079,6 +1081,18 @@ files = [
[package.extras]
graph = ["objgraph (>=1.7.2)"]
+[[package]]
+name = "diskcache"
+version = "5.6.1"
+description = "Disk Cache -- Disk and file backed persistent cache."
+category = "main"
+optional = false
+python-versions = ">=3"
+files = [
+ {file = "diskcache-5.6.1-py3-none-any.whl", hash = "sha256:558c6a2d5d7c721bb00e40711803d6804850c9f76c426ed81ecc627fe9d2ce2d"},
+ {file = "diskcache-5.6.1.tar.gz", hash = "sha256:e4c978532feff5814c4cc00fe1e11e40501985946643d73220d41ee7737c72c3"},
+]
+
[[package]]
name = "distro"
version = "1.8.0"
@@ -1374,23 +1388,23 @@ importlib-resources = {version = ">=5.0", markers = "python_version < \"3.10\""}
[[package]]
name = "fastapi"
-version = "0.99.1"
+version = "0.100.0"
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "fastapi-0.99.1-py3-none-any.whl", hash = "sha256:976df7bab51ac7beda9f68c4513b8c4490b5c1135c72aafd0a5ee4023ec5282e"},
- {file = "fastapi-0.99.1.tar.gz", hash = "sha256:ac78f717cd80d657bd183f94d33b9bda84aa376a46a9dab513586b8eef1dc6fc"},
+ {file = "fastapi-0.100.0-py3-none-any.whl", hash = "sha256:271662daf986da8fa98dc2b7c7f61c4abdfdccfb4786d79ed8b2878f172c6d5f"},
+ {file = "fastapi-0.100.0.tar.gz", hash = "sha256:acb5f941ea8215663283c10018323ba7ea737c571b67fc7e88e9469c7eb1d12e"},
]
[package.dependencies]
-pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0"
+pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<3.0.0"
starlette = ">=0.27.0,<0.28.0"
typing-extensions = ">=4.5.0"
[package.extras]
-all = ["email-validator (>=1.1.1)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
+all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
[[package]]
name = "filelock"
@@ -1569,14 +1583,14 @@ smmap = ">=3.0.1,<6"
[[package]]
name = "gitpython"
-version = "3.1.31"
+version = "3.1.32"
description = "GitPython is a Python library used to interact with Git repositories"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "GitPython-3.1.31-py3-none-any.whl", hash = "sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d"},
- {file = "GitPython-3.1.31.tar.gz", hash = "sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573"},
+ {file = "GitPython-3.1.32-py3-none-any.whl", hash = "sha256:e3d59b1c2c6ebb9dfa7a184daf3b6dd4914237e7488a1730a6d8f6f5d0b4187f"},
+ {file = "GitPython-3.1.32.tar.gz", hash = "sha256:8d9b8cb1e80b9735e8717c9362079d3ce4c6e5ddeebedd0361b228c3a67a62f6"},
]
[package.dependencies]
@@ -1671,14 +1685,14 @@ six = "*"
[[package]]
name = "google-cloud-aiplatform"
-version = "1.27.0"
+version = "1.28.0"
description = "Vertex AI API client library"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "google-cloud-aiplatform-1.27.0.tar.gz", hash = "sha256:ba9724e51eefddd284547ed632afcf23573b9040ea8ca5fd668a9ea093d5dcec"},
- {file = "google_cloud_aiplatform-1.27.0-py2.py3-none-any.whl", hash = "sha256:501a1dd8ad0012d73da3f3938140113f163d7c75b442bfc2e5f9a8889aca4119"},
+ {file = "google-cloud-aiplatform-1.28.0.tar.gz", hash = "sha256:810339254f354f9a0084f020aab43a56f710348910c177821f7d962b461244a0"},
+ {file = "google_cloud_aiplatform-1.28.0-py2.py3-none-any.whl", hash = "sha256:9fff957e193cc6de88a189b5a967f0cbd358c1da9b7faf36f3b7141fc0486243"},
]
[package.dependencies]
@@ -1742,14 +1756,14 @@ tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"]
[[package]]
name = "google-cloud-core"
-version = "2.3.2"
+version = "2.3.3"
description = "Google Cloud API client core library"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "google-cloud-core-2.3.2.tar.gz", hash = "sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a"},
- {file = "google_cloud_core-2.3.2-py2.py3-none-any.whl", hash = "sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe"},
+ {file = "google-cloud-core-2.3.3.tar.gz", hash = "sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb"},
+ {file = "google_cloud_core-2.3.3-py2.py3-none-any.whl", hash = "sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863"},
]
[package.dependencies]
@@ -2325,53 +2339,47 @@ pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0
[[package]]
name = "httptools"
-version = "0.5.0"
+version = "0.6.0"
description = "A collection of framework independent HTTP protocol utils."
category = "main"
optional = false
python-versions = ">=3.5.0"
files = [
- {file = "httptools-0.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8f470c79061599a126d74385623ff4744c4e0f4a0997a353a44923c0b561ee51"},
- {file = "httptools-0.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e90491a4d77d0cb82e0e7a9cb35d86284c677402e4ce7ba6b448ccc7325c5421"},
- {file = "httptools-0.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1d2357f791b12d86faced7b5736dea9ef4f5ecdc6c3f253e445ee82da579449"},
- {file = "httptools-0.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f90cd6fd97c9a1b7fe9215e60c3bd97336742a0857f00a4cb31547bc22560c2"},
- {file = "httptools-0.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5230a99e724a1bdbbf236a1b58d6e8504b912b0552721c7c6b8570925ee0ccde"},
- {file = "httptools-0.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a47a34f6015dd52c9eb629c0f5a8a5193e47bf2a12d9a3194d231eaf1bc451a"},
- {file = "httptools-0.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:24bb4bb8ac3882f90aa95403a1cb48465de877e2d5298ad6ddcfdebec060787d"},
- {file = "httptools-0.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e67d4f8734f8054d2c4858570cc4b233bf753f56e85217de4dfb2495904cf02e"},
- {file = "httptools-0.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7e5eefc58d20e4c2da82c78d91b2906f1a947ef42bd668db05f4ab4201a99f49"},
- {file = "httptools-0.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0297822cea9f90a38df29f48e40b42ac3d48a28637368f3ec6d15eebefd182f9"},
- {file = "httptools-0.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:557be7fbf2bfa4a2ec65192c254e151684545ebab45eca5d50477d562c40f986"},
- {file = "httptools-0.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:54465401dbbec9a6a42cf737627fb0f014d50dc7365a6b6cd57753f151a86ff0"},
- {file = "httptools-0.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4d9ebac23d2de960726ce45f49d70eb5466725c0087a078866043dad115f850f"},
- {file = "httptools-0.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:e8a34e4c0ab7b1ca17b8763613783e2458e77938092c18ac919420ab8655c8c1"},
- {file = "httptools-0.5.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f659d7a48401158c59933904040085c200b4be631cb5f23a7d561fbae593ec1f"},
- {file = "httptools-0.5.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef1616b3ba965cd68e6f759eeb5d34fbf596a79e84215eeceebf34ba3f61fdc7"},
- {file = "httptools-0.5.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3625a55886257755cb15194efbf209584754e31d336e09e2ffe0685a76cb4b60"},
- {file = "httptools-0.5.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:72ad589ba5e4a87e1d404cc1cb1b5780bfcb16e2aec957b88ce15fe879cc08ca"},
- {file = "httptools-0.5.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:850fec36c48df5a790aa735417dca8ce7d4b48d59b3ebd6f83e88a8125cde324"},
- {file = "httptools-0.5.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f222e1e9d3f13b68ff8a835574eda02e67277d51631d69d7cf7f8e07df678c86"},
- {file = "httptools-0.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3cb8acf8f951363b617a8420768a9f249099b92e703c052f9a51b66342eea89b"},
- {file = "httptools-0.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:550059885dc9c19a072ca6d6735739d879be3b5959ec218ba3e013fd2255a11b"},
- {file = "httptools-0.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a04fe458a4597aa559b79c7f48fe3dceabef0f69f562daf5c5e926b153817281"},
- {file = "httptools-0.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d0c1044bce274ec6711f0770fd2d5544fe392591d204c68328e60a46f88843b"},
- {file = "httptools-0.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c6eeefd4435055a8ebb6c5cc36111b8591c192c56a95b45fe2af22d9881eee25"},
- {file = "httptools-0.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5b65be160adcd9de7a7e6413a4966665756e263f0d5ddeffde277ffeee0576a5"},
- {file = "httptools-0.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fe9c766a0c35b7e3d6b6939393c8dfdd5da3ac5dec7f971ec9134f284c6c36d6"},
- {file = "httptools-0.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:85b392aba273566c3d5596a0a490978c085b79700814fb22bfd537d381dd230c"},
- {file = "httptools-0.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5e3088f4ed33947e16fd865b8200f9cfae1144f41b64a8cf19b599508e096bc"},
- {file = "httptools-0.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c2a56b6aad7cc8f5551d8e04ff5a319d203f9d870398b94702300de50190f63"},
- {file = "httptools-0.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9b571b281a19762adb3f48a7731f6842f920fa71108aff9be49888320ac3e24d"},
- {file = "httptools-0.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa47ffcf70ba6f7848349b8a6f9b481ee0f7637931d91a9860a1838bfc586901"},
- {file = "httptools-0.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:bede7ee075e54b9a5bde695b4fc8f569f30185891796b2e4e09e2226801d09bd"},
- {file = "httptools-0.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:64eba6f168803a7469866a9c9b5263a7463fa8b7a25b35e547492aa7322036b6"},
- {file = "httptools-0.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4b098e4bb1174096a93f48f6193e7d9aa7071506a5877da09a783509ca5fff42"},
- {file = "httptools-0.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9423a2de923820c7e82e18980b937893f4aa8251c43684fa1772e341f6e06887"},
- {file = "httptools-0.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca1b7becf7d9d3ccdbb2f038f665c0f4857e08e1d8481cbcc1a86a0afcfb62b2"},
- {file = "httptools-0.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:50d4613025f15f4b11f1c54bbed4761c0020f7f921b95143ad6d58c151198142"},
- {file = "httptools-0.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8ffce9d81c825ac1deaa13bc9694c0562e2840a48ba21cfc9f3b4c922c16f372"},
- {file = "httptools-0.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:1af91b3650ce518d226466f30bbba5b6376dbd3ddb1b2be8b0658c6799dd450b"},
- {file = "httptools-0.5.0.tar.gz", hash = "sha256:295874861c173f9101960bba332429bb77ed4dcd8cdf5cee9922eb00e4f6bc09"},
+ {file = "httptools-0.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:818325afee467d483bfab1647a72054246d29f9053fd17cc4b86cda09cc60339"},
+ {file = "httptools-0.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72205730bf1be875003692ca54a4a7c35fac77b4746008966061d9d41a61b0f5"},
+ {file = "httptools-0.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33eb1d4e609c835966e969a31b1dedf5ba16b38cab356c2ce4f3e33ffa94cad3"},
+ {file = "httptools-0.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdc6675ec6cb79d27e0575750ac6e2b47032742e24eed011b8db73f2da9ed40"},
+ {file = "httptools-0.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:463c3bc5ef64b9cf091be9ac0e0556199503f6e80456b790a917774a616aff6e"},
+ {file = "httptools-0.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:82f228b88b0e8c6099a9c4757ce9fdbb8b45548074f8d0b1f0fc071e35655d1c"},
+ {file = "httptools-0.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:0781fedc610293a2716bc7fa142d4c85e6776bc59d617a807ff91246a95dea35"},
+ {file = "httptools-0.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:721e503245d591527cddd0f6fd771d156c509e831caa7a57929b55ac91ee2b51"},
+ {file = "httptools-0.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:274bf20eeb41b0956e34f6a81f84d26ed57c84dd9253f13dcb7174b27ccd8aaf"},
+ {file = "httptools-0.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:259920bbae18740a40236807915def554132ad70af5067e562f4660b62c59b90"},
+ {file = "httptools-0.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03bfd2ae8a2d532952ac54445a2fb2504c804135ed28b53fefaf03d3a93eb1fd"},
+ {file = "httptools-0.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f959e4770b3fc8ee4dbc3578fd910fab9003e093f20ac8c621452c4d62e517cb"},
+ {file = "httptools-0.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e22896b42b95b3237eccc42278cd72c0df6f23247d886b7ded3163452481e38"},
+ {file = "httptools-0.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:38f3cafedd6aa20ae05f81f2e616ea6f92116c8a0f8dcb79dc798df3356836e2"},
+ {file = "httptools-0.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:47043a6e0ea753f006a9d0dd076a8f8c99bc0ecae86a0888448eb3076c43d717"},
+ {file = "httptools-0.6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a541579bed0270d1ac10245a3e71e5beeb1903b5fbbc8d8b4d4e728d48ff1d"},
+ {file = "httptools-0.6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65d802e7b2538a9756df5acc062300c160907b02e15ed15ba035b02bce43e89c"},
+ {file = "httptools-0.6.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:26326e0a8fe56829f3af483200d914a7cd16d8d398d14e36888b56de30bec81a"},
+ {file = "httptools-0.6.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e41ccac9e77cd045f3e4ee0fc62cbf3d54d7d4b375431eb855561f26ee7a9ec4"},
+ {file = "httptools-0.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4e748fc0d5c4a629988ef50ac1aef99dfb5e8996583a73a717fc2cac4ab89932"},
+ {file = "httptools-0.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cf8169e839a0d740f3d3c9c4fa630ac1a5aaf81641a34575ca6773ed7ce041a1"},
+ {file = "httptools-0.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5dcc14c090ab57b35908d4a4585ec5c0715439df07be2913405991dbb37e049d"},
+ {file = "httptools-0.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d0b0571806a5168013b8c3d180d9f9d6997365a4212cb18ea20df18b938aa0b"},
+ {file = "httptools-0.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fb4a608c631f7dcbdf986f40af7a030521a10ba6bc3d36b28c1dc9e9035a3c0"},
+ {file = "httptools-0.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:93f89975465133619aea8b1952bc6fa0e6bad22a447c6d982fc338fbb4c89649"},
+ {file = "httptools-0.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:73e9d66a5a28b2d5d9fbd9e197a31edd02be310186db423b28e6052472dc8201"},
+ {file = "httptools-0.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:22c01fcd53648162730a71c42842f73b50f989daae36534c818b3f5050b54589"},
+ {file = "httptools-0.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f96d2a351b5625a9fd9133c95744e8ca06f7a4f8f0b8231e4bbaae2c485046a"},
+ {file = "httptools-0.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72ec7c70bd9f95ef1083d14a755f321d181f046ca685b6358676737a5fecd26a"},
+ {file = "httptools-0.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b703d15dbe082cc23266bf5d9448e764c7cb3fcfe7cb358d79d3fd8248673ef9"},
+ {file = "httptools-0.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82c723ed5982f8ead00f8e7605c53e55ffe47c47465d878305ebe0082b6a1755"},
+ {file = "httptools-0.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b0a816bb425c116a160fbc6f34cece097fd22ece15059d68932af686520966bd"},
+ {file = "httptools-0.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dea66d94e5a3f68c5e9d86e0894653b87d952e624845e0b0e3ad1c733c6cc75d"},
+ {file = "httptools-0.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:23b09537086a5a611fad5696fc8963d67c7e7f98cb329d38ee114d588b0b74cd"},
+ {file = "httptools-0.6.0.tar.gz", hash = "sha256:9fc6e409ad38cbd68b177cd5158fc4042c796b82ca88d99ec78f07bed6c6b796"},
]
[package.extras]
@@ -2495,22 +2503,22 @@ testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packag
[[package]]
name = "importlib-resources"
-version = "5.12.0"
+version = "6.0.0"
description = "Read resources from Python packages"
category = "main"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "importlib_resources-5.12.0-py3-none-any.whl", hash = "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a"},
- {file = "importlib_resources-5.12.0.tar.gz", hash = "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6"},
+ {file = "importlib_resources-6.0.0-py3-none-any.whl", hash = "sha256:d952faee11004c045f785bb5636e8f885bed30dc3c940d5d42798a2a4541c185"},
+ {file = "importlib_resources-6.0.0.tar.gz", hash = "sha256:4cf94875a8368bd89531a756df9a9ebe1f150e0f885030b461237bc7f2d905f2"},
]
[package.dependencies]
zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
-testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
[[package]]
name = "iniconfig"
@@ -2612,22 +2620,22 @@ test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pa
[[package]]
name = "jaraco-classes"
-version = "3.2.3"
+version = "3.3.0"
description = "Utility functions for Python class constructs"
category = "main"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "jaraco.classes-3.2.3-py3-none-any.whl", hash = "sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158"},
- {file = "jaraco.classes-3.2.3.tar.gz", hash = "sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a"},
+ {file = "jaraco.classes-3.3.0-py3-none-any.whl", hash = "sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb"},
+ {file = "jaraco.classes-3.3.0.tar.gz", hash = "sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621"},
]
[package.dependencies]
more-itertools = "*"
[package.extras]
-docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"]
-testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
[[package]]
name = "jcloud"
@@ -2933,21 +2941,21 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)",
[[package]]
name = "langchain"
-version = "0.0.219"
+version = "0.0.229"
description = "Building applications with LLMs through composability"
category = "main"
optional = false
python-versions = ">=3.8.1,<4.0"
files = [
- {file = "langchain-0.0.219-py3-none-any.whl", hash = "sha256:1f08a00e622f1c75087d6013f34e82be3f8dd1859266eb583a0fd7bc045090cf"},
- {file = "langchain-0.0.219.tar.gz", hash = "sha256:842f8212939e5ac4005906d2215574ffb3e34d2fe28f5bc0f46eb3b28fb29c5d"},
+ {file = "langchain-0.0.229-py3-none-any.whl", hash = "sha256:a7ca79e4ab892756ede95d212bd42243303f91b172535cefd02b0b8965e4e7b7"},
+ {file = "langchain-0.0.229.tar.gz", hash = "sha256:ab1beac7f3fc1f06ab1a0b545ef0d47a3d5efef3b2b4c646aafaefc2eb3151d3"},
]
[package.dependencies]
aiohttp = ">=3.8.3,<4.0.0"
async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""}
dataclasses-json = ">=0.5.7,<0.6.0"
-langchainplus-sdk = ">=0.0.17"
+langchainplus-sdk = ">=0.0.20,<0.0.21"
numexpr = ">=2.8.4,<3.0.0"
numpy = ">=1,<2"
openapi-schema-pydantic = ">=1.2,<2.0"
@@ -2958,33 +2966,33 @@ SQLAlchemy = ">=1.4,<3"
tenacity = ">=8.1.0,<9.0.0"
[package.extras]
-all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.2.6,<0.3.0)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "awadb (>=0.3.3,<0.4.0)", "azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clarifai (==9.1.0)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=3,<4)", "deeplake (>=3.6.2,<4.0.0)", "docarray[hnswlib] (>=0.32.0,<0.33.0)", "duckduckgo-search (>=3.8.3,<4.0.0)", "elasticsearch (>=8,<9)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-auth (>=2.18.1,<3.0.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "langkit (>=0.0.1.dev3,<0.1.0)", "lark (>=1.1.5,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "momento (>=1.5.0,<2.0.0)", "nebula3-python (>=3.4.0,<4.0.0)", "neo4j (>=5.8.1,<6.0.0)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "octoai-sdk (>=0.1.1,<0.2.0)", "openai (>=0,<1)", "openlm (>=0.0.5,<0.0.6)", "opensearch-py (>=2.0.0,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pymongo (>=4.3.3,<5.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.1.2,<2.0.0)", "redis (>=4,<5)", "requests-toolbelt (>=1.0.0,<2.0.0)", "sentence-transformers (>=2,<3)", "singlestoredb (>=0.7.1,<0.8.0)", "spacy (>=3,<4)", "steamship (>=2.16.9,<3.0.0)", "tensorflow-text (>=2.11.0,<3.0.0)", "tigrisdb (>=1.0.0b6,<2.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
+all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3,<0.4)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "awadb (>=0.3.3,<0.4.0)", "azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clarifai (>=9.1.0)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=3,<4)", "deeplake (>=3.6.2,<4.0.0)", "docarray[hnswlib] (>=0.32.0,<0.33.0)", "duckduckgo-search (>=3.8.3,<4.0.0)", "elasticsearch (>=8,<9)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-auth (>=2.18.1,<3.0.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "langkit (>=0.0.1.dev3,<0.1.0)", "lark (>=1.1.5,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "marqo (>=0.11.0,<0.12.0)", "momento (>=1.5.0,<2.0.0)", "nebula3-python (>=3.4.0,<4.0.0)", "neo4j (>=5.8.1,<6.0.0)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "octoai-sdk (>=0.1.1,<0.2.0)", "openai (>=0,<1)", "openlm (>=0.0.5,<0.0.6)", "opensearch-py (>=2.0.0,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pymongo (>=4.3.3,<5.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.1.2,<2.0.0)", "rdflib (>=6.3.2,<7.0.0)", "redis (>=4,<5)", "requests-toolbelt (>=1.0.0,<2.0.0)", "sentence-transformers (>=2,<3)", "singlestoredb (>=0.7.1,<0.8.0)", "spacy (>=3,<4)", "steamship (>=2.16.9,<3.0.0)", "tensorflow-text (>=2.11.0,<3.0.0)", "tigrisdb (>=1.0.0b6,<2.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0a20230509004)", "openai (>=0,<1)"]
-clarifai = ["clarifai (==9.1.0)"]
+clarifai = ["clarifai (>=9.1.0)"]
cohere = ["cohere (>=3,<4)"]
docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"]
embeddings = ["sentence-transformers (>=2,<3)"]
-extended-testing = ["atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "chardet (>=5.1.0,<6.0.0)", "esprima (>=4.0.1,<5.0.0)", "gql (>=3.4.1,<4.0.0)", "html2text (>=2020.1.16,<2021.0.0)", "jq (>=1.4.1,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "openai (>=0,<1)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "streamlit (>=1.18.0,<2.0.0)", "telethon (>=1.28.5,<2.0.0)", "tqdm (>=4.48.0)", "zep-python (>=0.31)"]
+extended-testing = ["atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.0.7,<0.0.8)", "chardet (>=5.1.0,<6.0.0)", "esprima (>=4.0.1,<5.0.0)", "gql (>=3.4.1,<4.0.0)", "html2text (>=2020.1.16,<2021.0.0)", "jq (>=1.4.1,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "openai (>=0,<1)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "streamlit (>=1.18.0,<2.0.0)", "telethon (>=1.28.5,<2.0.0)", "tqdm (>=4.48.0)", "zep-python (>=0.32)"]
javascript = ["esprima (>=4.0.1,<5.0.0)"]
-llms = ["anthropic (>=0.2.6,<0.3.0)", "clarifai (==9.1.0)", "cohere (>=3,<4)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "openllm (>=0.1.6)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"]
+llms = ["anthropic (>=0.3,<0.4)", "clarifai (>=9.1.0)", "cohere (>=3,<4)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "openllm (>=0.1.19)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"]
openai = ["openai (>=0,<1)", "tiktoken (>=0.3.2,<0.4.0)"]
qdrant = ["qdrant-client (>=1.1.2,<2.0.0)"]
text-helpers = ["chardet (>=5.1.0,<6.0.0)"]
[[package]]
name = "langchain-serve"
-version = "0.0.52"
+version = "0.0.54"
description = "Langchain Serve - serve your langchain apps on Jina AI Cloud."
category = "main"
optional = true
python-versions = "*"
files = [
- {file = "langchain-serve-0.0.52.tar.gz", hash = "sha256:e69dcf6022423279059ab7ebda025e252aba4d9fd8e3e49776300355dbf85d1c"},
+ {file = "langchain-serve-0.0.54.tar.gz", hash = "sha256:5cbc980886c81f3bac7ed3337adeb0b94fc9f3645e4501dd7f0702f90766bbaa"},
]
[package.dependencies]
click = "*"
-jcloud = ">=0.2.8"
+jcloud = ">=0.2.8,<=0.2.12"
jina = "3.15.2"
jina-hubble-sdk = "*"
langchain = "*"
@@ -3037,17 +3045,22 @@ test = ["coverage", "pytest", "pytest-cov"]
[[package]]
name = "llama-cpp-python"
-version = "0.1.55"
+version = "0.1.70"
description = "A Python wrapper for llama.cpp"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "llama_cpp_python-0.1.55.tar.gz", hash = "sha256:1bc749f314a979c601b2dae22eb1f2d63fe791bc1237cce24d36b4f856be8ca2"},
+ {file = "llama_cpp_python-0.1.70.tar.gz", hash = "sha256:616ea7ad87417eba9c76d6ffe060b855af39ab7e795032dcf19fc49a7e73806b"},
]
[package.dependencies]
-typing-extensions = ">=4.5.0,<5.0.0"
+diskcache = ">=5.6.1"
+numpy = ">=1.20.0"
+typing-extensions = ">=4.5.0"
+
+[package.extras]
+server = ["fastapi (>=0.100.0)", "pydantic-settings (>=2.0.1)", "sse-starlette (>=1.6.1)", "uvicorn (>=0.22.1)"]
[[package]]
name = "loguru"
@@ -3735,37 +3748,37 @@ numpy = ">=1.13.3"
[[package]]
name = "numpy"
-version = "1.25.0"
+version = "1.25.1"
description = "Fundamental package for array computing in Python"
category = "main"
optional = false
python-versions = ">=3.9"
files = [
- {file = "numpy-1.25.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8aa130c3042052d656751df5e81f6d61edff3e289b5994edcf77f54118a8d9f4"},
- {file = "numpy-1.25.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e3f2b96e3b63c978bc29daaa3700c028fe3f049ea3031b58aa33fe2a5809d24"},
- {file = "numpy-1.25.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6b267f349a99d3908b56645eebf340cb58f01bd1e773b4eea1a905b3f0e4208"},
- {file = "numpy-1.25.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4aedd08f15d3045a4e9c648f1e04daca2ab1044256959f1f95aafeeb3d794c16"},
- {file = "numpy-1.25.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6d183b5c58513f74225c376643234c369468e02947b47942eacbb23c1671f25d"},
- {file = "numpy-1.25.0-cp310-cp310-win32.whl", hash = "sha256:d76a84998c51b8b68b40448ddd02bd1081bb33abcdc28beee6cd284fe11036c6"},
- {file = "numpy-1.25.0-cp310-cp310-win_amd64.whl", hash = "sha256:c0dc071017bc00abb7d7201bac06fa80333c6314477b3d10b52b58fa6a6e38f6"},
- {file = "numpy-1.25.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c69fe5f05eea336b7a740e114dec995e2f927003c30702d896892403df6dbf0"},
- {file = "numpy-1.25.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c7211d7920b97aeca7b3773a6783492b5b93baba39e7c36054f6e749fc7490c"},
- {file = "numpy-1.25.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecc68f11404930e9c7ecfc937aa423e1e50158317bf67ca91736a9864eae0232"},
- {file = "numpy-1.25.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e559c6afbca484072a98a51b6fa466aae785cfe89b69e8b856c3191bc8872a82"},
- {file = "numpy-1.25.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6c284907e37f5e04d2412950960894b143a648dea3f79290757eb878b91acbd1"},
- {file = "numpy-1.25.0-cp311-cp311-win32.whl", hash = "sha256:95367ccd88c07af21b379be1725b5322362bb83679d36691f124a16357390153"},
- {file = "numpy-1.25.0-cp311-cp311-win_amd64.whl", hash = "sha256:b76aa836a952059d70a2788a2d98cb2a533ccd46222558b6970348939e55fc24"},
- {file = "numpy-1.25.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b792164e539d99d93e4e5e09ae10f8cbe5466de7d759fc155e075237e0c274e4"},
- {file = "numpy-1.25.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7cd981ccc0afe49b9883f14761bb57c964df71124dcd155b0cba2b591f0d64b9"},
- {file = "numpy-1.25.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa48bebfb41f93043a796128854b84407d4df730d3fb6e5dc36402f5cd594c0"},
- {file = "numpy-1.25.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5177310ac2e63d6603f659fadc1e7bab33dd5a8db4e0596df34214eeab0fee3b"},
- {file = "numpy-1.25.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0ac6edfb35d2a99aaf102b509c8e9319c499ebd4978df4971b94419a116d0790"},
- {file = "numpy-1.25.0-cp39-cp39-win32.whl", hash = "sha256:7412125b4f18aeddca2ecd7219ea2d2708f697943e6f624be41aa5f8a9852cc4"},
- {file = "numpy-1.25.0-cp39-cp39-win_amd64.whl", hash = "sha256:26815c6c8498dc49d81faa76d61078c4f9f0859ce7817919021b9eba72b425e3"},
- {file = "numpy-1.25.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5b1b90860bf7d8a8c313b372d4f27343a54f415b20fb69dd601b7efe1029c91e"},
- {file = "numpy-1.25.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85cdae87d8c136fd4da4dad1e48064d700f63e923d5af6c8c782ac0df8044542"},
- {file = "numpy-1.25.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cc3fda2b36482891db1060f00f881c77f9423eead4c3579629940a3e12095fe8"},
- {file = "numpy-1.25.0.tar.gz", hash = "sha256:f1accae9a28dc3cda46a91de86acf69de0d1b5f4edd44a9b0c3ceb8036dfff19"},
+ {file = "numpy-1.25.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:77d339465dff3eb33c701430bcb9c325b60354698340229e1dff97745e6b3efa"},
+ {file = "numpy-1.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d736b75c3f2cb96843a5c7f8d8ccc414768d34b0a75f466c05f3a739b406f10b"},
+ {file = "numpy-1.25.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a90725800caeaa160732d6b31f3f843ebd45d6b5f3eec9e8cc287e30f2805bf"},
+ {file = "numpy-1.25.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c6c9261d21e617c6dc5eacba35cb68ec36bb72adcff0dee63f8fbc899362588"},
+ {file = "numpy-1.25.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0def91f8af6ec4bb94c370e38c575855bf1d0be8a8fbfba42ef9c073faf2cf19"},
+ {file = "numpy-1.25.1-cp310-cp310-win32.whl", hash = "sha256:fd67b306320dcadea700a8f79b9e671e607f8696e98ec255915c0c6d6b818503"},
+ {file = "numpy-1.25.1-cp310-cp310-win_amd64.whl", hash = "sha256:c1516db588987450b85595586605742879e50dcce923e8973f79529651545b57"},
+ {file = "numpy-1.25.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6b82655dd8efeea69dbf85d00fca40013d7f503212bc5259056244961268b66e"},
+ {file = "numpy-1.25.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e8f6049c4878cb16960fbbfb22105e49d13d752d4d8371b55110941fb3b17800"},
+ {file = "numpy-1.25.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41a56b70e8139884eccb2f733c2f7378af06c82304959e174f8e7370af112e09"},
+ {file = "numpy-1.25.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5154b1a25ec796b1aee12ac1b22f414f94752c5f94832f14d8d6c9ac40bcca6"},
+ {file = "numpy-1.25.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38eb6548bb91c421261b4805dc44def9ca1a6eef6444ce35ad1669c0f1a3fc5d"},
+ {file = "numpy-1.25.1-cp311-cp311-win32.whl", hash = "sha256:791f409064d0a69dd20579345d852c59822c6aa087f23b07b1b4e28ff5880fcb"},
+ {file = "numpy-1.25.1-cp311-cp311-win_amd64.whl", hash = "sha256:c40571fe966393b212689aa17e32ed905924120737194b5d5c1b20b9ed0fb171"},
+ {file = "numpy-1.25.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3d7abcdd85aea3e6cdddb59af2350c7ab1ed764397f8eec97a038ad244d2d105"},
+ {file = "numpy-1.25.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a180429394f81c7933634ae49b37b472d343cccb5bb0c4a575ac8bbc433722f"},
+ {file = "numpy-1.25.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d412c1697c3853c6fc3cb9751b4915859c7afe6a277c2bf00acf287d56c4e625"},
+ {file = "numpy-1.25.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20e1266411120a4f16fad8efa8e0454d21d00b8c7cee5b5ccad7565d95eb42dd"},
+ {file = "numpy-1.25.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f76aebc3358ade9eacf9bc2bb8ae589863a4f911611694103af05346637df1b7"},
+ {file = "numpy-1.25.1-cp39-cp39-win32.whl", hash = "sha256:247d3ffdd7775bdf191f848be8d49100495114c82c2bd134e8d5d075fb386a1c"},
+ {file = "numpy-1.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:1d5d3c68e443c90b38fdf8ef40e60e2538a27548b39b12b73132456847f4b631"},
+ {file = "numpy-1.25.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:35a9527c977b924042170a0887de727cd84ff179e478481404c5dc66b4170009"},
+ {file = "numpy-1.25.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d3fe3dd0506a28493d82dc3cf254be8cd0d26f4008a417385cbf1ae95b54004"},
+ {file = "numpy-1.25.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:012097b5b0d00a11070e8f2e261128c44157a8689f7dedcf35576e525893f4fe"},
+ {file = "numpy-1.25.1.tar.gz", hash = "sha256:9a3a9f3a61480cc086117b426a8bd86869c213fc4072e606f01c4e4b66eb92bf"},
]
[[package]]
@@ -4157,58 +4170,58 @@ files = [
[[package]]
name = "orjson"
-version = "3.9.1"
+version = "3.9.2"
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
- {file = "orjson-3.9.1-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c4434b7b786fdc394b95d029fb99949d7c2b05bbd4bf5cb5e3906be96ffeee3b"},
- {file = "orjson-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09faf14f74ed47e773fa56833be118e04aa534956f661eb491522970b7478e3b"},
- {file = "orjson-3.9.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:503eb86a8d53a187fe66aa80c69295a3ca35475804da89a9547e4fce5f803822"},
- {file = "orjson-3.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:20f2804b5a1dbd3609c086041bd243519224d47716efd7429db6c03ed28b7cc3"},
- {file = "orjson-3.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fd828e0656615a711c4cc4da70f3cac142e66a6703ba876c20156a14e28e3fa"},
- {file = "orjson-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec53d648176f873203b9c700a0abacab33ca1ab595066e9d616f98cdc56f4434"},
- {file = "orjson-3.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e186ae76b0d97c505500664193ddf508c13c1e675d9b25f1f4414a7606100da6"},
- {file = "orjson-3.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d4edee78503016f4df30aeede0d999b3cb11fb56f47e9db0e487bce0aaca9285"},
- {file = "orjson-3.9.1-cp310-none-win_amd64.whl", hash = "sha256:a4cc5d21e68af982d9a2528ac61e604f092c60eed27aef3324969c68f182ec7e"},
- {file = "orjson-3.9.1-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:761b6efd33c49de20dd73ce64cc59da62c0dab10aa6015f582680e0663cc792c"},
- {file = "orjson-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31229f9d0b8dc2ef7ee7e4393f2e4433a28e16582d4b25afbfccc9d68dc768f8"},
- {file = "orjson-3.9.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b7ab18d55ecb1de543d452f0a5f8094b52282b916aa4097ac11a4c79f317b86"},
- {file = "orjson-3.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db774344c39041f4801c7dfe03483df9203cbd6c84e601a65908e5552228dd25"},
- {file = "orjson-3.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ae47ef8c0fe89c4677db7e9e1fb2093ca6e66c3acbee5442d84d74e727edad5e"},
- {file = "orjson-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:103952c21575b9805803c98add2eaecd005580a1e746292ed2ec0d76dd3b9746"},
- {file = "orjson-3.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2cb0121e6f2c9da3eddf049b99b95fef0adf8480ea7cb544ce858706cdf916eb"},
- {file = "orjson-3.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:24d4ddaa2876e657c0fd32902b5c451fd2afc35159d66a58da7837357044b8c2"},
- {file = "orjson-3.9.1-cp311-none-win_amd64.whl", hash = "sha256:0b53b5f72cf536dd8aa4fc4c95e7e09a7adb119f8ff8ee6cc60f735d7740ad6a"},
- {file = "orjson-3.9.1-cp37-cp37m-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d4b68d01a506242316a07f1d2f29fb0a8b36cee30a7c35076f1ef59dce0890c1"},
- {file = "orjson-3.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9dd4abe6c6fd352f00f4246d85228f6a9847d0cc14f4d54ee553718c225388f"},
- {file = "orjson-3.9.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9e20bca5e13041e31ceba7a09bf142e6d63c8a7467f5a9c974f8c13377c75af2"},
- {file = "orjson-3.9.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8ae0467d01eb1e4bcffef4486d964bfd1c2e608103e75f7074ed34be5df48cc"},
- {file = "orjson-3.9.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:06f6ab4697fab090517f295915318763a97a12ee8186054adf21c1e6f6abbd3d"},
- {file = "orjson-3.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8515867713301fa065c58ec4c9053ba1a22c35113ab4acad555317b8fd802e50"},
- {file = "orjson-3.9.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:393d0697d1dfa18d27d193e980c04fdfb672c87f7765b87952f550521e21b627"},
- {file = "orjson-3.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d96747662d3666f79119e5d28c124e7d356c7dc195cd4b09faea4031c9079dc9"},
- {file = "orjson-3.9.1-cp37-none-win_amd64.whl", hash = "sha256:6d173d3921dd58a068c88ec22baea7dbc87a137411501618b1292a9d6252318e"},
- {file = "orjson-3.9.1-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d1c2b0b4246c992ce2529fc610a446b945f1429445ece1c1f826a234c829a918"},
- {file = "orjson-3.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19f70ba1f441e1c4bb1a581f0baa092e8b3e3ce5b2aac2e1e090f0ac097966da"},
- {file = "orjson-3.9.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:375d65f002e686212aac42680aed044872c45ee4bc656cf63d4a215137a6124a"},
- {file = "orjson-3.9.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4751cee4a7b1daeacb90a7f5adf2170ccab893c3ab7c5cea58b45a13f89b30b3"},
- {file = "orjson-3.9.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78d9a2a4b2302d5ebc3695498ebc305c3568e5ad4f3501eb30a6405a32d8af22"},
- {file = "orjson-3.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46b4facc32643b2689dfc292c0c463985dac4b6ab504799cf51fc3c6959ed668"},
- {file = "orjson-3.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ec7c8a0f1bf35da0d5fd14f8956f3b82a9a6918a3c6963d718dfd414d6d3b604"},
- {file = "orjson-3.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d3a40b0fbe06ccd4d6a99e523d20b47985655bcada8d1eba485b1b32a43e4904"},
- {file = "orjson-3.9.1-cp38-none-win_amd64.whl", hash = "sha256:402f9d3edfec4560a98880224ec10eba4c5f7b4791e4bc0d4f4d8df5faf2a006"},
- {file = "orjson-3.9.1-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:49c0d78dcd34626e2e934f1192d7c052b94e0ecadc5f386fd2bda6d2e03dadf5"},
- {file = "orjson-3.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:125f63e56d38393daa0a1a6dc6fedefca16c538614b66ea5997c3bd3af35ef26"},
- {file = "orjson-3.9.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:08927970365d2e1f3ce4894f9ff928a7b865d53f26768f1bbdd85dd4fee3e966"},
- {file = "orjson-3.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9a744e212d4780ecd67f4b6b128b2e727bee1df03e7059cddb2dfe1083e7dc4"},
- {file = "orjson-3.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d1dbf36db7240c61eec98c8d21545d671bce70be0730deb2c0d772e06b71af3"},
- {file = "orjson-3.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80a1e384626f76b66df615f7bb622a79a25c166d08c5d2151ffd41f24c4cc104"},
- {file = "orjson-3.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:15d28872fb055bf17ffca913826e618af61b2f689d2b170f72ecae1a86f80d52"},
- {file = "orjson-3.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1e4d905338f9ef32c67566929dfbfbb23cc80287af8a2c38930fb0eda3d40b76"},
- {file = "orjson-3.9.1-cp39-none-win_amd64.whl", hash = "sha256:48a27da6c7306965846565cc385611d03382bbd84120008653aa2f6741e2105d"},
- {file = "orjson-3.9.1.tar.gz", hash = "sha256:db373a25ec4a4fccf8186f9a72a1b3442837e40807a736a815ab42481e83b7d0"},
+ {file = "orjson-3.9.2-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7323e4ca8322b1ecb87562f1ec2491831c086d9faa9a6c6503f489dadbed37d7"},
+ {file = "orjson-3.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1272688ea1865f711b01ba479dea2d53e037ea00892fd04196b5875f7021d9d3"},
+ {file = "orjson-3.9.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b9a26f1d1427a9101a1e8910f2e2df1f44d3d18ad5480ba031b15d5c1cb282e"},
+ {file = "orjson-3.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6a5ca55b0d8f25f18b471e34abaee4b175924b6cd62f59992945b25963443141"},
+ {file = "orjson-3.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:877872db2c0f41fbe21f852ff642ca842a43bc34895b70f71c9d575df31fffb4"},
+ {file = "orjson-3.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a39c2529d75373b7167bf84c814ef9b8f3737a339c225ed6c0df40736df8748"},
+ {file = "orjson-3.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:84ebd6fdf138eb0eb4280045442331ee71c0aab5e16397ba6645f32f911bfb37"},
+ {file = "orjson-3.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a60a1cfcfe310547a1946506dd4f1ed0a7d5bd5b02c8697d9d5dcd8d2e9245e"},
+ {file = "orjson-3.9.2-cp310-none-win_amd64.whl", hash = "sha256:c290c4f81e8fd0c1683638802c11610b2f722b540f8e5e858b6914b495cf90c8"},
+ {file = "orjson-3.9.2-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:02ef014f9a605e84b675060785e37ec9c0d2347a04f1307a9d6840ab8ecd6f55"},
+ {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:992af54265ada1c1579500d6594ed73fe333e726de70d64919cf37f93defdd06"},
+ {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a40958f7af7c6d992ee67b2da4098dca8b770fc3b4b3834d540477788bfa76d3"},
+ {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93864dec3e3dd058a2dbe488d11ac0345214a6a12697f53a63e34de7d28d4257"},
+ {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16fdf5a82df80c544c3c91516ab3882cd1ac4f1f84eefeafa642e05cef5f6699"},
+ {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275b5a18fd9ed60b2720543d3ddac170051c43d680e47d04ff5203d2c6d8ebf1"},
+ {file = "orjson-3.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b9aea6dcb99fcbc9f6d1dd84fca92322fda261da7fb014514bb4689c7c2097a8"},
+ {file = "orjson-3.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7d74ae0e101d17c22ef67b741ba356ab896fc0fa64b301c2bf2bb0a4d874b190"},
+ {file = "orjson-3.9.2-cp311-none-win_amd64.whl", hash = "sha256:6320b28e7bdb58c3a3a5efffe04b9edad3318d82409e84670a9b24e8035a249d"},
+ {file = "orjson-3.9.2-cp37-cp37m-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:368e9cc91ecb7ac21f2aa475e1901204110cf3e714e98649c2502227d248f947"},
+ {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58e9e70f0dcd6a802c35887f306b555ff7a214840aad7de24901fc8bd9cf5dde"},
+ {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00c983896c2e01c94c0ef72fd7373b2aa06d0c0eed0342c4884559f812a6835b"},
+ {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ee743e8890b16c87a2f89733f983370672272b61ee77429c0a5899b2c98c1a7"},
+ {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7b065942d362aad4818ff599d2f104c35a565c2cbcbab8c09ec49edba91da75"},
+ {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e46e9c5b404bb9e41d5555762fd410d5466b7eb1ec170ad1b1609cbebe71df21"},
+ {file = "orjson-3.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8170157288714678ffd64f5de33039e1164a73fd8b6be40a8a273f80093f5c4f"},
+ {file = "orjson-3.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e3e2f087161947dafe8319ea2cfcb9cea4bb9d2172ecc60ac3c9738f72ef2909"},
+ {file = "orjson-3.9.2-cp37-none-win_amd64.whl", hash = "sha256:d7de3dbbe74109ae598692113cec327fd30c5a30ebca819b21dfa4052f7b08ef"},
+ {file = "orjson-3.9.2-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8cd4385c59bbc1433cad4a80aca65d2d9039646a9c57f8084897549b55913b17"},
+ {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a74036aab1a80c361039290cdbc51aa7adc7ea13f56e5ef94e9be536abd227bd"},
+ {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1aaa46d7d4ae55335f635eadc9be0bd9bcf742e6757209fc6dc697e390010adc"},
+ {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e52c67ed6bb368083aa2078ea3ccbd9721920b93d4b06c43eb4e20c4c860046"},
+ {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a6cdfcf9c7dd4026b2b01fdff56986251dc0cc1e980c690c79eec3ae07b36e7"},
+ {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1882a70bb69595b9ec5aac0040a819e94d2833fe54901e2b32f5e734bc259a8b"},
+ {file = "orjson-3.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fc05e060d452145ab3c0b5420769e7356050ea311fc03cb9d79c481982917cca"},
+ {file = "orjson-3.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f8bc2c40d9bb26efefb10949d261a47ca196772c308babc538dd9f4b73e8d386"},
+ {file = "orjson-3.9.2-cp38-none-win_amd64.whl", hash = "sha256:3164fc20a585ec30a9aff33ad5de3b20ce85702b2b2a456852c413e3f0d7ab09"},
+ {file = "orjson-3.9.2-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7a6ccadf788531595ed4728aa746bc271955448d2460ff0ef8e21eb3f2a281ba"},
+ {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3245d230370f571c945f69aab823c279a868dc877352817e22e551de155cb06c"},
+ {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:205925b179550a4ee39b8418dd4c94ad6b777d165d7d22614771c771d44f57bd"},
+ {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0325fe2d69512187761f7368c8cda1959bcb75fc56b8e7a884e9569112320e57"},
+ {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:806704cd58708acc66a064a9a58e3be25cf1c3f9f159e8757bd3f515bfabdfa1"},
+ {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03fb36f187a0c19ff38f6289418863df8b9b7880cdbe279e920bef3a09d8dab1"},
+ {file = "orjson-3.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20925d07a97c49c6305bff1635318d9fc1804aa4ccacb5fb0deb8a910e57d97a"},
+ {file = "orjson-3.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:eebfed53bec5674e981ebe8ed2cf00b3f7bcda62d634733ff779c264307ea505"},
+ {file = "orjson-3.9.2-cp39-none-win_amd64.whl", hash = "sha256:869b961df5fcedf6c79f4096119b35679b63272362e9b745e668f0391a892d39"},
+ {file = "orjson-3.9.2.tar.gz", hash = "sha256:24257c8f641979bf25ecd3e27251b5cc194cdd3a6e96004aac8446f5e63d9664"},
]
[[package]]
@@ -4520,14 +4533,14 @@ testing = ["pytest", "pytest-cov"]
[[package]]
name = "platformdirs"
-version = "3.8.0"
+version = "3.8.1"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "platformdirs-3.8.0-py3-none-any.whl", hash = "sha256:ca9ed98ce73076ba72e092b23d3c93ea6c4e186b3f1c3dad6edd98ff6ffcca2e"},
- {file = "platformdirs-3.8.0.tar.gz", hash = "sha256:b0cabcb11063d21a0b261d557acb0a9d2126350e63b70cdf7db6347baea456dc"},
+ {file = "platformdirs-3.8.1-py3-none-any.whl", hash = "sha256:cec7b889196b9144d088e4c57d9ceef7374f6c39694ad1577a0aab50d27ea28c"},
+ {file = "platformdirs-3.8.1.tar.gz", hash = "sha256:f87ca4fcff7d2b0f81c6a748a77973d7af0f4d526f98f308477c3c436c74d528"},
]
[package.extras]
@@ -4614,14 +4627,14 @@ test = ["coverage", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)", "pylint"
[[package]]
name = "prometheus-client"
-version = "0.17.0"
+version = "0.17.1"
description = "Python client for the Prometheus monitoring system."
category = "main"
optional = false
python-versions = ">=3.6"
files = [
- {file = "prometheus_client-0.17.0-py3-none-any.whl", hash = "sha256:a77b708cf083f4d1a3fb3ce5c95b4afa32b9c521ae363354a4a910204ea095ce"},
- {file = "prometheus_client-0.17.0.tar.gz", hash = "sha256:9c3b26f1535945e85b8934fb374678d263137b78ef85f305b1156c7c881cd11b"},
+ {file = "prometheus_client-0.17.1-py3-none-any.whl", hash = "sha256:e537f37160f6807b8202a6fc4764cdd19bac5480ddd3e0d463c3002b34462101"},
+ {file = "prometheus_client-0.17.1.tar.gz", hash = "sha256:21e674f39831ae3f8acde238afd9a27a37d0d2fb5a28ea094f0ce25d2cbf2091"},
]
[package.extras]
@@ -5223,14 +5236,14 @@ diagrams = ["jinja2", "railroad-diagrams"]
[[package]]
name = "pypdf"
-version = "3.12.0"
+version = "3.12.1"
description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files"
category = "main"
optional = false
python-versions = ">=3.6"
files = [
- {file = "pypdf-3.12.0-py3-none-any.whl", hash = "sha256:826ad4681660394d7a5742fe8380168cf13058e27b826b7f5b798e994cb77b38"},
- {file = "pypdf-3.12.0.tar.gz", hash = "sha256:cebac920db0698369f49c389018858a5436862bf3c45b64b10c55c008878db95"},
+ {file = "pypdf-3.12.1-py3-none-any.whl", hash = "sha256:74aa287c83e9aad2ce4a3627458dad729e39b5deae52175fe9f97bfffdde41bc"},
+ {file = "pypdf-3.12.1.tar.gz", hash = "sha256:68bf9e089caaab356518410168df9ed90f0a6109e29adac168449d4054fa0094"},
]
[package.dependencies]
@@ -6417,14 +6430,14 @@ sqlcipher = ["sqlcipher3-binary"]
[[package]]
name = "sqlalchemy2-stubs"
-version = "0.0.2a34"
+version = "0.0.2a35"
description = "Typing Stubs for SQLAlchemy 1.4"
category = "main"
optional = false
python-versions = ">=3.6"
files = [
- {file = "sqlalchemy2-stubs-0.0.2a34.tar.gz", hash = "sha256:2432137ab2fde1a608df4544f6712427b0b7ff25990cfbbc5a9d1db6c8c6f489"},
- {file = "sqlalchemy2_stubs-0.0.2a34-py3-none-any.whl", hash = "sha256:a313220ac793404349899faf1272e821a62dbe1d3a029bd444faa8d3e966cd07"},
+ {file = "sqlalchemy2-stubs-0.0.2a35.tar.gz", hash = "sha256:bd5d530697d7e8c8504c7fe792ef334538392a5fb7aa7e4f670bfacdd668a19d"},
+ {file = "sqlalchemy2_stubs-0.0.2a35-py3-none-any.whl", hash = "sha256:593784ff9fc0dc2ded1895e3322591689db3be06f3ca006e3ef47640baf2d38a"},
]
[package.dependencies]
@@ -7377,14 +7390,14 @@ files = [
[[package]]
name = "weaviate-client"
-version = "3.21.0"
+version = "3.22.1"
description = "A python native Weaviate client"
category = "main"
optional = false
python-versions = ">=3.8"
files = [
- {file = "weaviate-client-3.21.0.tar.gz", hash = "sha256:ec94ac554883c765e94da8b2947c4f0fa4a0378ed3bbe9f3653df3a5b1745a6d"},
- {file = "weaviate_client-3.21.0-py3-none-any.whl", hash = "sha256:420444ded7106fb000f4f8b2321b5f5fa2387825aa7a303d702accf61026f9d2"},
+ {file = "weaviate-client-3.22.1.tar.gz", hash = "sha256:aff61bd3f5d74df20a62328443e3aa9c860d5330fdfb19c4d8ddc44cb604032f"},
+ {file = "weaviate_client-3.22.1-py3-none-any.whl", hash = "sha256:01843a4899a227300e570409e77628e9d1b28476313f94943c37aee3f75112e1"},
]
[package.dependencies]
@@ -7753,19 +7766,19 @@ multidict = ">=4.0"
[[package]]
name = "zipp"
-version = "3.15.0"
+version = "3.16.0"
description = "Backport of pathlib-compatible object wrapper for zip files"
category = "main"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"},
- {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"},
+ {file = "zipp-3.16.0-py3-none-any.whl", hash = "sha256:5dadc3ad0a1f825fe42ce1bce0f2fc5a13af2e6b2d386af5b0ff295bc0a287d3"},
+ {file = "zipp-3.16.0.tar.gz", hash = "sha256:1876cb065531855bbe83b6c489dcf69ecc28f1068d8e95959fe8bbc77774c941"},
]
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
-testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
[[package]]
name = "zstandard"
@@ -7832,4 +7845,4 @@ deploy = ["langchain-serve"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<3.11"
-content-hash = "e25e43fde8f96f57beab702ac4c51cb3e569b81f85c540a7b4b5fb7b6388d04e"
+content-hash = "3ef18bc73e595f6aa8c3ee4b4c9666f3328c601933aef1bf225b865f39504e3c"
diff --git a/pyproject.toml b/pyproject.toml
index 08bdaad65..c82e532f8 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -23,14 +23,14 @@ langflow = "langflow.__main__:main"
[tool.poetry.dependencies]
python = ">=3.9,<3.11"
-fastapi = "^0.99.0"
+fastapi = "^0.100.0"
uvicorn = "^0.22.0"
beautifulsoup4 = "^4.12.2"
google-search-results = "^2.4.1"
google-api-python-client = "^2.79.0"
typer = "^0.9.0"
gunicorn = "^20.1.0"
-langchain = "^0.0.219"
+langchain = "^0.0.229"
openai = "^0.27.8"
pandas = "^2.0.0"
chromadb = "^0.3.21"
@@ -78,7 +78,7 @@ black = "^23.1.0"
ipykernel = "^6.21.2"
mypy = "^1.1.1"
ruff = "^0.0.254"
-httpx = "^0.23.3"
+httpx = "*"
pytest = "^7.2.2"
types-requests = "^2.28.11"
requests = "^2.28.0"
diff --git a/render.yaml b/render.yaml
new file mode 100644
index 000000000..e67da9334
--- /dev/null
+++ b/render.yaml
@@ -0,0 +1,11 @@
+services:
+ # A Docker web service
+ - type: web
+ name: langflow
+ runtime: docker
+ plan: free
+ dockerfilePath: ./Dockerfile
+ repo: https://github.com/logspace-ai/langflow
+ branch: main
+ healthCheckPath: /health
+ autoDeploy: false
diff --git a/src/backend/langflow/api/utils.py b/src/backend/langflow/api/utils.py
index bfd9e3da5..2384a4089 100644
--- a/src/backend/langflow/api/utils.py
+++ b/src/backend/langflow/api/utils.py
@@ -22,3 +22,38 @@ def remove_api_keys(flow: dict):
value["value"] = None
return flow
+
+
+def build_input_keys_response(langchain_object, artifacts):
+ """Build the input keys response."""
+
+ input_keys_response = {
+ "input_keys": {key: "" for key in langchain_object.input_keys},
+ "memory_keys": [],
+ "handle_keys": artifacts.get("handle_keys", []),
+ }
+
+ # Set the input keys values from artifacts
+ for key, value in artifacts.items():
+ if key in input_keys_response["input_keys"]:
+ input_keys_response["input_keys"][key] = value
+ # If the object has memory, that memory will have a memory_variables attribute
+ # memory variables should be removed from the input keys
+ if hasattr(langchain_object, "memory") and hasattr(
+ langchain_object.memory, "memory_variables"
+ ):
+ # Remove memory variables from input keys
+ input_keys_response["input_keys"] = {
+ key: value
+ for key, value in input_keys_response["input_keys"].items()
+ if key not in langchain_object.memory.memory_variables
+ }
+ # Add memory variables to memory_keys
+ input_keys_response["memory_keys"] = langchain_object.memory.memory_variables
+
+ if hasattr(langchain_object, "prompt") and hasattr(
+ langchain_object.prompt, "template"
+ ):
+ input_keys_response["template"] = langchain_object.prompt.template
+
+ return input_keys_response
diff --git a/src/backend/langflow/api/v1/base.py b/src/backend/langflow/api/v1/base.py
index 6941bedf3..71cac5412 100644
--- a/src/backend/langflow/api/v1/base.py
+++ b/src/backend/langflow/api/v1/base.py
@@ -1,6 +1,8 @@
+from langflow.template.frontend_node.base import FrontendNode
from pydantic import BaseModel, validator
from langflow.interface.utils import extract_input_variables_from_prompt
+from langchain.prompts import PromptTemplate
class CacheResponse(BaseModel):
@@ -11,8 +13,14 @@ class Code(BaseModel):
code: str
-class Prompt(BaseModel):
+class FrontendNodeRequest(FrontendNode):
+ template: dict # type: ignore
+
+
+class ValidatePromptRequest(BaseModel):
+ name: str
template: str
+ frontend_node: FrontendNodeRequest
# Build ValidationResponse class for {"imports": {"errors": []}, "function": {"errors": []}}
@@ -31,6 +39,7 @@ class CodeValidationResponse(BaseModel):
class PromptValidationResponse(BaseModel):
input_variables: list
+ frontend_node: FrontendNodeRequest
INVALID_CHARACTERS = {
@@ -51,34 +60,93 @@ INVALID_CHARACTERS = {
"}",
}
+INVALID_NAMES = {
+ "input_variables",
+ "output_parser",
+ "partial_variables",
+ "template",
+ "template_format",
+ "validate_template",
+}
+
def validate_prompt(template: str):
input_variables = extract_input_variables_from_prompt(template)
# Check if there are invalid characters in the input_variables
input_variables = check_input_variables(input_variables)
+ if any(var in INVALID_NAMES for var in input_variables):
+ raise ValueError(
+ f"Invalid input variables. None of the variables can be named {', '.join(input_variables)}. "
+ )
- return PromptValidationResponse(input_variables=input_variables)
+ try:
+ PromptTemplate(template=template, input_variables=input_variables)
+ except Exception as exc:
+ raise ValueError(str(exc)) from exc
+
+ return input_variables
def check_input_variables(input_variables: list):
invalid_chars = []
fixed_variables = []
+ wrong_variables = []
+ empty_variables = []
for variable in input_variables:
new_var = variable
- for char in INVALID_CHARACTERS:
- if char in variable:
- invalid_chars.append(char)
- new_var = new_var.replace(char, "")
+
+ # if variable is empty, then we should add that to the wrong variables
+ if not variable:
+ empty_variables.append(variable)
+ continue
+
+ # if variable starts with a number we should add that to the invalid chars
+ # and wrong variables
+ if variable[0].isdigit():
+ invalid_chars.append(variable[0])
+ new_var = new_var.replace(variable[0], "")
+ wrong_variables.append(variable)
+ else:
+ for char in INVALID_CHARACTERS:
+ if char in variable:
+ invalid_chars.append(char)
+ new_var = new_var.replace(char, "")
+ wrong_variables.append(variable)
fixed_variables.append(new_var)
- if new_var != variable:
- input_variables.remove(variable)
- input_variables.append(new_var)
# If any of the input_variables is not in the fixed_variables, then it means that
# there are invalid characters in the input_variables
- if any(var not in fixed_variables for var in input_variables):
- raise ValueError(
- f"Invalid input variables: {input_variables}. Please, use something like {fixed_variables} instead."
- )
+ if any(var not in fixed_variables for var in input_variables):
+ error_message = build_error_message(
+ input_variables,
+ invalid_chars,
+ wrong_variables,
+ fixed_variables,
+ empty_variables,
+ )
+ raise ValueError(error_message)
return input_variables
+
+
+def build_error_message(
+ input_variables, invalid_chars, wrong_variables, fixed_variables, empty_variables
+):
+ input_variables_str = ", ".join([f"'{var}'" for var in input_variables])
+ error_string = f"Invalid input variables: {input_variables_str}. "
+
+ if wrong_variables and invalid_chars:
+ # fix the wrong variables replacing invalid chars and find them in the fixed variables
+ error_string_vars = "You can fix them by replacing the invalid characters: "
+ wvars = wrong_variables.copy()
+ for i, wrong_var in enumerate(wvars):
+ for char in invalid_chars:
+ wrong_var = wrong_var.replace(char, "")
+ if wrong_var in fixed_variables:
+ error_string_vars += f"'{wrong_variables[i]}' -> '{wrong_var}'"
+ error_string += error_string_vars
+ elif empty_variables:
+ error_string += f" There are {len(empty_variables)} empty variable{'s' if len(empty_variables) > 1 else ''}."
+ elif len(set(fixed_variables)) != len(fixed_variables):
+ error_string += "There are duplicate variables."
+ return error_string
diff --git a/src/backend/langflow/api/v1/callback.py b/src/backend/langflow/api/v1/callback.py
index b58393d7b..deddde47f 100644
--- a/src/backend/langflow/api/v1/callback.py
+++ b/src/backend/langflow/api/v1/callback.py
@@ -1,22 +1,132 @@
import asyncio
-from typing import Any
from langchain.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler
from langflow.api.v1.schemas import ChatResponse
+from typing import Any, Dict, List, Union
+from fastapi import WebSocket
+
+
+from langchain.schema import AgentAction, LLMResult, AgentFinish
+from langflow.utils.logger import logger
+
+
# https://github.com/hwchase17/chat-langchain/blob/master/callback.py
class AsyncStreamingLLMCallbackHandler(AsyncCallbackHandler):
"""Callback handler for streaming LLM responses."""
- def __init__(self, websocket):
+ def __init__(self, websocket: WebSocket):
self.websocket = websocket
async def on_llm_new_token(self, token: str, **kwargs: Any) -> None:
resp = ChatResponse(message=token, type="stream", intermediate_steps="")
await self.websocket.send_json(resp.dict())
+ async def on_llm_start(
+ self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any
+ ) -> Any:
+ """Run when LLM starts running."""
+
+ async def on_llm_end(self, response: LLMResult, **kwargs: Any) -> Any:
+ """Run when LLM ends running."""
+
+ async def on_llm_error(
+ self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
+ ) -> Any:
+ """Run when LLM errors."""
+
+ async def on_chain_start(
+ self, serialized: Dict[str, Any], inputs: Dict[str, Any], **kwargs: Any
+ ) -> Any:
+ """Run when chain starts running."""
+
+ async def on_chain_end(self, outputs: Dict[str, Any], **kwargs: Any) -> Any:
+ """Run when chain ends running."""
+
+ async def on_chain_error(
+ self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
+ ) -> Any:
+ """Run when chain errors."""
+
+ async def on_tool_start(
+ self, serialized: Dict[str, Any], input_str: str, **kwargs: Any
+ ) -> Any:
+ """Run when tool starts running."""
+ resp = ChatResponse(
+ message="",
+ type="stream",
+ intermediate_steps=f"Tool input: {input_str}",
+ )
+ await self.websocket.send_json(resp.dict())
+
+ async def on_tool_end(self, output: str, **kwargs: Any) -> Any:
+ """Run when tool ends running."""
+ observation_prefix = kwargs.get("observation_prefix", "Tool output: ")
+ split_output = output.split()
+ first_word = split_output[0]
+ rest_of_output = split_output[1:]
+ # Create a formatted message.
+ intermediate_steps = f"{observation_prefix}{first_word}"
+
+ # Create a ChatResponse instance.
+ resp = ChatResponse(
+ message="",
+ type="stream",
+ intermediate_steps=intermediate_steps,
+ )
+ rest_of_resps = [
+ ChatResponse(
+ message="",
+ type="stream",
+ intermediate_steps=f"{word}",
+ )
+ for word in rest_of_output
+ ]
+ resps = [resp] + rest_of_resps
+ # Try to send the response, handle potential errors.
+
+ try:
+ # This is to emulate the stream of tokens
+ for resp in resps:
+ await self.websocket.send_json(resp.dict())
+ except Exception as e:
+ logger.error(e)
+
+ async def on_tool_error(
+ self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any
+ ) -> Any:
+ """Run when tool errors."""
+
+ async def on_text(self, text: str, **kwargs: Any) -> Any:
+ """Run on arbitrary text."""
+ # This runs when first sending the prompt
+ # to the LLM, adding it will send the final prompt
+ # to the frontend
+
+ async def on_agent_action(self, action: AgentAction, **kwargs: Any):
+ log = f"Thought: {action.log}"
+ # if there are line breaks, split them and send them
+ # as separate messages
+ if "\n" in log:
+ logs = log.split("\n")
+ for log in logs:
+ resp = ChatResponse(message="", type="stream", intermediate_steps=log)
+ await self.websocket.send_json(resp.dict())
+ else:
+ resp = ChatResponse(message="", type="stream", intermediate_steps=log)
+ await self.websocket.send_json(resp.dict())
+
+ async def on_agent_finish(self, finish: AgentFinish, **kwargs: Any) -> Any:
+ """Run on agent end."""
+ resp = ChatResponse(
+ message="",
+ type="stream",
+ intermediate_steps=finish.log,
+ )
+ await self.websocket.send_json(resp.dict())
+
class StreamingLLMCallbackHandler(BaseCallbackHandler):
"""Callback handler for streaming LLM responses."""
diff --git a/src/backend/langflow/api/v1/chat.py b/src/backend/langflow/api/v1/chat.py
index e47c31694..937eb2cf6 100644
--- a/src/backend/langflow/api/v1/chat.py
+++ b/src/backend/langflow/api/v1/chat.py
@@ -1,5 +1,6 @@
from fastapi import APIRouter, HTTPException, WebSocket, WebSocketException, status
from fastapi.responses import StreamingResponse
+from langflow.api.utils import build_input_keys_response
from langflow.api.v1.schemas import BuildStatus, BuiltResponse, InitResponse, StreamData
from langflow.chat.manager import ChatManager
@@ -83,6 +84,7 @@ async def stream_build(flow_id: str):
async def event_stream(flow_id):
final_response = {"end_of_stream": True}
+ artifacts = {}
try:
if flow_id not in flow_data_store:
error_message = "Invalid session ID"
@@ -113,13 +115,6 @@ async def stream_build(flow_id: str):
number_of_nodes = len(graph.nodes)
flow_data_store[flow_id]["status"] = BuildStatus.IN_PROGRESS
- # To deal with the ZeroShotAgent case
- # we need to build the root node first
- # and then the rest of the graph
- # This is a big problem because certain nodes require
- # params that are not connected to it.
- # We should consider connecting the tools to the ZeroShotPrompt
- graph.build()
for i, vertex in enumerate(graph.generator_build(), 1):
try:
@@ -131,8 +126,13 @@ async def stream_build(flow_id: str):
params = vertex._built_object_repr()
valid = True
logger.debug(
- f"Building node {params[:50]}{'...' if len(params) > 50 else ''}"
+ f"Building node {str(params)[:50]}{'...' if len(str(params)) > 50 else ''}"
)
+ if vertex.artifacts:
+ # The artifacts will be prompt variables
+ # passed to build_input_keys_response
+ # to set the input_keys values
+ artifacts.update(vertex.artifacts)
except Exception as exc:
params = str(exc)
valid = False
@@ -147,9 +147,26 @@ async def stream_build(flow_id: str):
yield str(StreamData(event="message", data=response))
- chat_manager.set_cache(flow_id, graph.build())
+ langchain_object = graph.build()
+ # Now we need to check the input_keys to send them to the client
+ if hasattr(langchain_object, "input_keys"):
+ input_keys_response = build_input_keys_response(
+ langchain_object, artifacts
+ )
+ else:
+ input_keys_response = {
+ "input_keys": {},
+ "memory_keys": [],
+ "handle_keys": [],
+ }
+ yield str(StreamData(event="message", data=input_keys_response))
+
+ chat_manager.set_cache(flow_id, langchain_object)
+ # We need to reset the chat history
+ chat_manager.chat_history.empty_history(flow_id)
flow_data_store[flow_id]["status"] = BuildStatus.SUCCESS
except Exception as exc:
+ logger.exception(exc)
logger.error("Error while building the flow: %s", exc)
flow_data_store[flow_id]["status"] = BuildStatus.FAILURE
yield str(StreamData(event="error", data={"error": str(exc)}))
diff --git a/src/backend/langflow/api/v1/schemas.py b/src/backend/langflow/api/v1/schemas.py
index 22df4a977..e4b9a6e84 100644
--- a/src/backend/langflow/api/v1/schemas.py
+++ b/src/backend/langflow/api/v1/schemas.py
@@ -53,7 +53,7 @@ class ChatMessage(BaseModel):
"""Chat message schema."""
is_bot: bool = False
- message: Union[str, None] = None
+ message: Union[str, None, dict] = None
type: str = "human"
diff --git a/src/backend/langflow/api/v1/validate.py b/src/backend/langflow/api/v1/validate.py
index 959273a00..2a5bdd673 100644
--- a/src/backend/langflow/api/v1/validate.py
+++ b/src/backend/langflow/api/v1/validate.py
@@ -3,10 +3,11 @@ from fastapi import APIRouter, HTTPException
from langflow.api.v1.base import (
Code,
CodeValidationResponse,
- Prompt,
+ ValidatePromptRequest,
PromptValidationResponse,
validate_prompt,
)
+from langflow.template.field.base import TemplateField
from langflow.utils.logger import logger
from langflow.utils.validate import validate_code
@@ -27,9 +28,100 @@ def post_validate_code(code: Code):
@router.post("/prompt", status_code=200, response_model=PromptValidationResponse)
-def post_validate_prompt(prompt: Prompt):
+def post_validate_prompt(prompt_request: ValidatePromptRequest):
try:
- return validate_prompt(prompt.template)
+ input_variables = validate_prompt(prompt_request.template)
+
+ old_custom_fields = get_old_custom_fields(prompt_request)
+
+ add_new_variables_to_template(input_variables, prompt_request)
+
+ remove_old_variables_from_template(
+ old_custom_fields, input_variables, prompt_request
+ )
+
+ update_input_variables_field(input_variables, prompt_request)
+
+ return PromptValidationResponse(
+ input_variables=input_variables,
+ frontend_node=prompt_request.frontend_node,
+ )
except Exception as e:
logger.exception(e)
raise HTTPException(status_code=500, detail=str(e)) from e
+
+
+def get_old_custom_fields(prompt_request):
+ try:
+ old_custom_fields = prompt_request.frontend_node.custom_fields[
+ prompt_request.name
+ ].copy()
+ except KeyError:
+ old_custom_fields = []
+ prompt_request.frontend_node.custom_fields[prompt_request.name] = []
+ return old_custom_fields
+
+
+def add_new_variables_to_template(input_variables, prompt_request):
+ for variable in input_variables:
+ try:
+ template_field = TemplateField(
+ name=variable,
+ display_name=variable,
+ field_type="str",
+ show=True,
+ advanced=False,
+ multiline=True,
+ input_types=["Document", "BaseOutputParser"],
+ value="", # Set the value to empty string
+ )
+ if variable in prompt_request.frontend_node.template:
+ # Set the new field with the old value
+ template_field.value = prompt_request.frontend_node.template[variable][
+ "value"
+ ]
+
+ prompt_request.frontend_node.template[variable] = template_field.to_dict()
+
+ # Check if variable is not already in the list before appending
+ if (
+ variable
+ not in prompt_request.frontend_node.custom_fields[prompt_request.name]
+ ):
+ prompt_request.frontend_node.custom_fields[prompt_request.name].append(
+ variable
+ )
+
+ except Exception as exc:
+ logger.exception(exc)
+ raise HTTPException(status_code=500, detail=str(exc)) from exc
+
+
+def remove_old_variables_from_template(
+ old_custom_fields, input_variables, prompt_request
+):
+ for variable in old_custom_fields:
+ if variable not in input_variables:
+ try:
+ # Remove the variable from custom_fields associated with the given name
+ if (
+ variable
+ in prompt_request.frontend_node.custom_fields[prompt_request.name]
+ ):
+ prompt_request.frontend_node.custom_fields[
+ prompt_request.name
+ ].remove(variable)
+
+ # Remove the variable from the template
+ prompt_request.frontend_node.template.pop(variable, None)
+
+ except Exception as exc:
+ logger.exception(exc)
+ raise HTTPException(status_code=500, detail=str(exc)) from exc
+
+
+def update_input_variables_field(input_variables, prompt_request):
+ if "input_variables" in prompt_request.frontend_node.template:
+ prompt_request.frontend_node.template["input_variables"][
+ "value"
+ ] = input_variables
diff --git a/src/backend/langflow/chat/manager.py b/src/backend/langflow/chat/manager.py
index 4a1b8e77c..33de784b5 100644
--- a/src/backend/langflow/chat/manager.py
+++ b/src/backend/langflow/chat/manager.py
@@ -104,16 +104,22 @@ class ChatManager:
async def close_connection(self, client_id: str, code: int, reason: str):
if websocket := self.active_connections[client_id]:
- await websocket.close(code=code, reason=reason)
- self.disconnect(client_id)
+ try:
+ await websocket.close(code=code, reason=reason)
+ self.disconnect(client_id)
+ except RuntimeError as exc:
+ # This is to catch the following error:
+ # Unexpected ASGI message 'websocket.close', after sending 'websocket.close'
+ if "after sending" in str(exc):
+ logger.error(exc)
async def process_message(
self, client_id: str, payload: Dict, langchain_object: Any
):
# Process the graph data and chat message
- chat_message = payload.pop("message", "")
- chat_message = ChatMessage(message=chat_message)
- self.chat_history.add_message(client_id, chat_message)
+ chat_inputs = payload.pop("inputs", "")
+ chat_inputs = ChatMessage(message=chat_inputs)
+ self.chat_history.add_message(client_id, chat_inputs)
# graph_data = payload
start_resp = ChatResponse(message=None, type="start", intermediate_steps="")
@@ -126,7 +132,7 @@ class ChatManager:
result, intermediate_steps = await process_graph(
langchain_object=langchain_object,
- chat_message=chat_message,
+ chat_inputs=chat_inputs,
websocket=self.active_connections[client_id],
)
except Exception as e:
diff --git a/src/backend/langflow/chat/utils.py b/src/backend/langflow/chat/utils.py
index 2e2ee367f..7db65b8e3 100644
--- a/src/backend/langflow/chat/utils.py
+++ b/src/backend/langflow/chat/utils.py
@@ -7,7 +7,7 @@ from langflow.utils.logger import logger
async def process_graph(
langchain_object,
- chat_message: ChatMessage,
+ chat_inputs: ChatMessage,
websocket: WebSocket,
):
langchain_object = try_setting_streaming_options(langchain_object, websocket)
@@ -21,9 +21,13 @@ async def process_graph(
# Generate result and thought
try:
+ if not chat_inputs.message:
+ logger.debug("No message provided")
+ raise ValueError("No message provided")
+
logger.debug("Generating result and thought")
result, intermediate_steps = await get_result_and_steps(
- langchain_object, chat_message.message or "", websocket=websocket
+ langchain_object, chat_inputs.message, websocket=websocket
)
logger.debug("Generated result and intermediate_steps")
return result, intermediate_steps
diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml
index 3745b9e22..3116e74c7 100644
--- a/src/backend/langflow/config.yaml
+++ b/src/backend/langflow/config.yaml
@@ -151,12 +151,19 @@ memories:
documentation: "https://python.langchain.com/docs/modules/memory/how_to/buffer_window"
VectorStoreRetrieverMemory:
documentation: "https://python.langchain.com/docs/modules/memory/how_to/vectorstore_retriever_memory"
-
+ MongoDBChatMessageHistory:
+ documentation: "https://python.langchain.com/docs/modules/memory/integrations/mongodb_chat_message_history"
prompts:
+ ChatMessagePromptTemplate:
+ documentation: "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/msg_prompt_templates"
+ HumanMessagePromptTemplate:
+ documentation: "https://python.langchain.com/docs/modules/model_io/models/chat/how_to/prompts"
+ SystemMessagePromptTemplate:
+ documentation: "https://python.langchain.com/docs/modules/model_io/models/chat/how_to/prompts"
+ ChatPromptTemplate:
+ documentation: "https://python.langchain.com/docs/modules/model_io/models/chat/how_to/prompts"
PromptTemplate:
documentation: "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/"
- ZeroShotPrompt:
- documentation: "https://python.langchain.com/docs/modules/agents/how_to/custom_mrkl_agent"
textsplitters:
CharacterTextSplitter:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/character_text_splitter"
@@ -269,7 +276,17 @@ vectorstores:
SupabaseVectorStore:
documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/supabase"
MongoDBAtlasVectorSearch:
- documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/mongodb_atlas_vector_search"
+ documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/mongodb_atlas"
+ # Requires docarray >=0.32.0 but langchain-serve requires jina 3.15.2 which doesn't support docarray >=0.32.0
+ # DocArrayInMemorySearch:
+ # documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/docarray_in_memory"
wrappers:
RequestsWrapper:
documentation: ""
+ SQLDatabase:
+ documentation: ""
+output_parsers:
+ StructuredOutputParser:
+ documentation: "https://python.langchain.com/docs/modules/model_io/output_parsers/structured"
+ ResponseSchema:
+ documentation: "https://python.langchain.com/docs/modules/model_io/output_parsers/structured"
diff --git a/src/backend/langflow/custom/customs.py b/src/backend/langflow/custom/customs.py
index 0f1e44308..58ef1b508 100644
--- a/src/backend/langflow/custom/customs.py
+++ b/src/backend/langflow/custom/customs.py
@@ -2,9 +2,9 @@ from langflow.template import frontend_node
# These should always be instantiated
CUSTOM_NODES = {
- "prompts": {
- "ZeroShotPrompt": frontend_node.prompts.ZeroShotPromptNode(),
- },
+ # "prompts": {
+ # "ZeroShotPrompt": frontend_node.prompts.ZeroShotPromptNode(),
+ # },
"tools": {
"PythonFunctionTool": frontend_node.tools.PythonFunctionToolNode(),
"PythonFunction": frontend_node.tools.PythonFunctionNode(),
@@ -23,6 +23,7 @@ CUSTOM_NODES = {
},
"memories": {
"PostgresChatMessageHistory": frontend_node.memories.PostgresChatMessageHistoryFrontendNode(),
+ "MongoDBChatMessageHistory": frontend_node.memories.MongoDBChatMessageHistoryFrontendNode(),
},
"chains": {
"SeriesCharacterChain": frontend_node.chains.SeriesCharacterChainNode(),
diff --git a/src/backend/langflow/graph/edge/base.py b/src/backend/langflow/graph/edge/base.py
index 88a177e40..569d33ec0 100644
--- a/src/backend/langflow/graph/edge/base.py
+++ b/src/backend/langflow/graph/edge/base.py
@@ -6,9 +6,15 @@ if TYPE_CHECKING:
class Edge:
- def __init__(self, source: "Vertex", target: "Vertex"):
+ def __init__(self, source: "Vertex", target: "Vertex", edge: dict):
self.source: "Vertex" = source
self.target: "Vertex" = target
+ self.source_handle = edge.get("sourceHandle", "")
+ self.target_handle = edge.get("targetHandle", "")
+ # 'BaseLoader;BaseOutputParser|documents|PromptTemplate-zmTlD'
+ # target_param is documents
+ self.target_param = self.target_handle.split("|")[1]
+
self.validate_edge()
def validate_edge(self) -> None:
@@ -42,6 +48,16 @@ class Edge:
def __repr__(self) -> str:
return (
- f"Edge(source={self.source.id}, target={self.target.id}, valid={self.valid}"
+ f"Edge(source={self.source.id}, target={self.target.id}, target_param={self.target_param}"
f", matched_type={self.matched_type})"
)
+
+ def __hash__(self) -> int:
+ return hash(self.__repr__())
+
+ def __eq__(self, __value: object) -> bool:
+ return (
+ self.__repr__() == __value.__repr__()
+ if isinstance(__value, Edge)
+ else False
+ )
diff --git a/src/backend/langflow/graph/graph/base.py b/src/backend/langflow/graph/graph/base.py
index 46425ddf6..86a2f98a9 100644
--- a/src/backend/langflow/graph/graph/base.py
+++ b/src/backend/langflow/graph/graph/base.py
@@ -179,7 +179,7 @@ class Graph:
raise ValueError(f"Source node {edge['source']} not found")
if target is None:
raise ValueError(f"Target node {edge['target']} not found")
- edges.append(Edge(source, target))
+ edges.append(Edge(source, target, edge))
return edges
def _get_vertex_class(self, node_type: str, node_lc_type: str) -> Type[Vertex]:
@@ -214,3 +214,10 @@ class Graph:
if node_type in node_types:
children.append(node)
return children
+
+ def __repr__(self):
+ node_ids = [node.id for node in self.nodes]
+ edges_repr = "\n".join(
+ [f"{edge.source.id} --> {edge.target.id}" for edge in self.edges]
+ )
+ return f"Graph:\nNodes: {node_ids}\nConnections:\n{edges_repr}"
diff --git a/src/backend/langflow/graph/graph/constants.py b/src/backend/langflow/graph/graph/constants.py
index 9474e311e..a2fd287eb 100644
--- a/src/backend/langflow/graph/graph/constants.py
+++ b/src/backend/langflow/graph/graph/constants.py
@@ -12,6 +12,7 @@ from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.tools.base import tool_creator
from langflow.interface.vector_store.base import vectorstore_creator
from langflow.interface.wrappers.base import wrapper_creator
+from langflow.interface.output_parsers.base import output_parser_creator
from langflow.interface.retrievers.base import retriever_creator
from typing import Dict, Type
@@ -30,5 +31,6 @@ VERTEX_TYPE_MAP: Dict[str, Type[Vertex]] = {
**{t: types.VectorStoreVertex for t in vectorstore_creator.to_list()},
**{t: types.DocumentLoaderVertex for t in documentloader_creator.to_list()},
**{t: types.TextSplitterVertex for t in textsplitter_creator.to_list()},
+ **{t: types.OutputParserVertex for t in output_parser_creator.to_list()},
**{t: types.RetrieverVertex for t in retriever_creator.to_list()},
}
diff --git a/src/backend/langflow/graph/vertex/base.py b/src/backend/langflow/graph/vertex/base.py
index 275a9e080..4eb39e8e8 100644
--- a/src/backend/langflow/graph/vertex/base.py
+++ b/src/backend/langflow/graph/vertex/base.py
@@ -1,14 +1,12 @@
-from langflow.utils.constants import DIRECT_TYPES
from langflow.interface.initialize import loading
from langflow.interface.listing import ALL_TYPES_DICT
+from langflow.utils.constants import DIRECT_TYPES
from langflow.utils.logger import logger
from langflow.utils.util import sync_to_async
-import contextlib
import inspect
import types
-import warnings
from typing import Any, Dict, List, Optional
from typing import TYPE_CHECKING
@@ -25,6 +23,7 @@ class Vertex:
self._parse_data()
self._built_object = None
self._built = False
+ self.artifacts: Dict[str, Any] = {}
def _parse_data(self) -> None:
self.data = self._data["data"]
@@ -45,6 +44,14 @@ class Vertex:
for key, value in template_dicts.items()
if not value["required"]
]
+ # Add the template_dicts[key]["input_types"] to the optional_inputs
+ self.optional_inputs.extend(
+ [
+ input_type
+ for value in template_dicts.values()
+ for input_type in value.get("input_types", [])
+ ]
+ )
template_dict = self.data["node"]["template"]
self.vertex_type = (
@@ -60,6 +67,7 @@ class Vertex:
break
def _build_params(self):
+ # sourcery skip: merge-list-append, remove-redundant-if
# Some params are required, some are optional
# but most importantly, some params are python base classes
# like str and others are LangChain objects like LLMChain, BasePromptTemplate
@@ -80,8 +88,19 @@ class Vertex:
if isinstance(value, dict)
}
params = {}
+
+ for edge in self.edges:
+ param_key = edge.target_param
+ if param_key in template_dict:
+ if template_dict[param_key]["list"]:
+ if param_key not in params:
+ params[param_key] = []
+ params[param_key].append(edge.source)
+ elif edge.target.id == self.id:
+ params[param_key] = edge.source
+
for key, value in template_dict.items():
- if key == "_type":
+ if key == "_type" or not value.get("show"):
continue
# If the type is not transformable to a python base class
# then we need to get the edge that connects to this node
@@ -92,124 +111,131 @@ class Vertex:
file_path = value.get("file_path")
params[key] = file_path
+ elif value.get("type") in DIRECT_TYPES and params.get(key) is None:
+ params[key] = value.get("value")
- elif value.get("type") not in DIRECT_TYPES:
- # Get the edge that connects to this node
- edges = [
- edge
- for edge in self.edges
- if edge.target == self and edge.matched_type in value["type"]
- ]
-
- # Get the output of the node that the edge connects to
- # if the value['list'] is True, then there will be more
- # than one time setting to params[key]
- # so we need to append to a list if it exists
- # or create a new list if it doesn't
-
- if value["required"] and not edges:
- # If a required parameter is not found, raise an error
- raise ValueError(
- f"Required input {key} for module {self.vertex_type} not found"
- )
- elif value["list"]:
- # If this is a list parameter, append all sources to a list
- params[key] = [edge.source for edge in edges]
- elif edges:
- # If a single parameter is found, use its source
- params[key] = edges[0].source
-
- elif value["required"] or value.get("value"):
- # If value does not have value this still passes
- # but then gives a keyError
- # so we need to check if value has value
- new_value = value.get("value")
- if new_value is None:
- warnings.warn(f"Value for {key} in {self.vertex_type} is None. ")
- if value.get("type") == "int":
- with contextlib.suppress(TypeError, ValueError):
- new_value = int(new_value) # type: ignore
- params[key] = new_value
-
+ if not value.get("required") and params.get(key) is None:
+ if value.get("default"):
+ params[key] = value.get("default")
+ else:
+ params.pop(key, None)
# Add _type to params
self.params = params
def _build(self):
- # The params dict is used to build the module
- # it contains values and keys that point to nodes which
- # have their own params dict
- # When build is called, we iterate through the params dict
- # and if the value is a node, we call build on that node
- # and use the output of that build as the value for the param
- # if the value is not a node, then we use the value as the param
- # and continue
- # Another aspect is that the node_type is the class that we need to import
- # and instantiate with these built params
+ """
+ Initiate the build process.
+ """
logger.debug(f"Building {self.vertex_type}")
- # Build each node in the params dict
+ self._build_each_node_in_params_dict()
+ self._get_and_instantiate_class()
+ self._validate_built_object()
+
+ self._built = True
+
+ def _build_each_node_in_params_dict(self):
+ """
+ Iterates over each node in the params dictionary and builds it.
+ """
for key, value in self.params.copy().items():
- # Check if Node or list of Nodes and not self
- # to avoid recursion
- if isinstance(value, Vertex):
+ if self._is_node(value):
if value == self:
del self.params[key]
continue
- result = value.build()
- # If the key is "func", then we need to use the run method
- if key == "func":
- if not isinstance(result, types.FunctionType):
- # func can be
- # PythonFunction(code='\ndef upper_case(text: str) -> str:\n return text.upper()\n')
- # so we need to check if there is an attribute called run
- if hasattr(result, "run"):
- result = result.run # type: ignore
- elif hasattr(result, "get_function"):
- result = result.get_function() # type: ignore
- elif inspect.iscoroutinefunction(result):
- self.params["coroutine"] = result
- else:
- # turn result which is a function into a coroutine
- # so that it can be awaited
- self.params["coroutine"] = sync_to_async(result)
- if isinstance(result, list):
- # If the result is a list, then we need to extend the list
- # with the result but first check if the key exists
- # if it doesn't, then we need to create a new list
- if isinstance(self.params[key], list):
- self.params[key].extend(result)
+ self._build_node_and_update_params(key, value)
+ elif isinstance(value, list) and self._is_list_of_nodes(value):
+ self._build_list_of_nodes_and_update_params(key, value)
- self.params[key] = result
- elif isinstance(value, list) and all(
- isinstance(node, Vertex) for node in value
- ):
- self.params[key] = []
- for node in value:
- built = node.build()
- if isinstance(built, list):
- self.params[key].extend(built)
- else:
- self.params[key].append(built)
+ def _is_node(self, value):
+ """
+ Checks if the provided value is an instance of Vertex.
+ """
+ return isinstance(value, Vertex)
- # Get the class from LANGCHAIN_TYPES_DICT
- # and instantiate it with the params
- # and return the instance
+ def _is_list_of_nodes(self, value):
+ """
+ Checks if the provided value is a list of Vertex instances.
+ """
+ return all(self._is_node(node) for node in value)
+ def _build_node_and_update_params(self, key, node):
+ """
+ Builds a given node and updates the params dictionary accordingly.
+ """
+ result = node.build()
+ self._handle_func(key, result)
+ if isinstance(result, list):
+ self._extend_params_list_with_result(key, result)
+ self.params[key] = result
+
+ def _build_list_of_nodes_and_update_params(self, key, nodes):
+ """
+ Iterates over a list of nodes, builds each and updates the params dictionary.
+ """
+ self.params[key] = []
+ for node in nodes:
+ built = node.build()
+ if isinstance(built, list):
+ self.params[key].extend(built)
+ else:
+ self.params[key].append(built)
+
+ def _handle_func(self, key, result):
+ """
+ Handles 'func' key by checking if the result is a function and setting it as coroutine.
+ """
+ if key == "func":
+ if not isinstance(result, types.FunctionType):
+ if hasattr(result, "run"):
+ result = result.run # type: ignore
+ elif hasattr(result, "get_function"):
+ result = result.get_function() # type: ignore
+ elif inspect.iscoroutinefunction(result):
+ self.params["coroutine"] = result
+ else:
+ self.params["coroutine"] = sync_to_async(result)
+
+ def _extend_params_list_with_result(self, key, result):
+ """
+ Extends a list in the params dictionary with the given result if it exists.
+ """
+ if isinstance(self.params[key], list):
+ self.params[key].extend(result)
+
+ def _get_and_instantiate_class(self):
+ """
+ Gets the class from a dictionary and instantiates it with the params.
+ """
+ if self.base_type is None:
+ raise ValueError(f"Base type for node {self.vertex_type} not found")
try:
- self._built_object = loading.instantiate_class(
+ result = loading.instantiate_class(
node_type=self.vertex_type,
base_type=self.base_type,
params=self.params,
)
+ self._update_built_object_and_artifacts(result)
except Exception as exc:
raise ValueError(
f"Error building node {self.vertex_type}: {str(exc)}"
) from exc
+ def _update_built_object_and_artifacts(self, result):
+ """
+ Updates the built object and its artifacts.
+ """
+ if isinstance(result, tuple):
+ self._built_object, self.artifacts = result
+ else:
+ self._built_object = result
+
+ def _validate_built_object(self):
+ """
+ Checks if the built object is None and raises a ValueError if so.
+ """
if self._built_object is None:
raise ValueError(f"Node type {self.vertex_type} not found")
- self._built = True
-
def build(self, force: bool = False) -> Any:
if not self._built or force:
self._build()
@@ -217,7 +243,8 @@ class Vertex:
return self._built_object
def add_edge(self, edge: "Edge") -> None:
- self.edges.append(edge)
+ if edge not in self.edges:
+ self.edges.append(edge)
def __repr__(self) -> str:
return f"Vertex(id={self.id}, data={self.data})"
@@ -229,4 +256,5 @@ class Vertex:
return id(self)
def _built_object_repr(self):
- return repr(self._built_object)
+ # Add a message with an emoji, stars for sucess,
+ return "Built sucessfully ✨" if self._built_object else "Failed to build 😵💫"
diff --git a/src/backend/langflow/graph/vertex/types.py b/src/backend/langflow/graph/vertex/types.py
index 846b79bc0..effd00071 100644
--- a/src/backend/langflow/graph/vertex/types.py
+++ b/src/backend/langflow/graph/vertex/types.py
@@ -1,3 +1,4 @@
+import ast
from typing import Any, Dict, List, Optional, Union
from langflow.graph.vertex.base import Vertex
@@ -79,7 +80,7 @@ class WrapperVertex(Vertex):
def build(self, force: bool = False) -> Any:
if not self._built or force:
if "headers" in self.params:
- self.params["headers"] = eval(self.params["headers"])
+ self.params["headers"] = ast.literal_eval(self.params["headers"])
self._build()
return self._built_object
@@ -91,8 +92,13 @@ class DocumentLoaderVertex(Vertex):
def _built_object_repr(self):
# This built_object is a list of documents. Maybe we should
# show how many documents are in the list?
+
if self._built_object:
+ avg_length = sum(len(doc.page_content) for doc in self._built_object) / len(
+ self._built_object
+ )
return f"""{self.vertex_type}({len(self._built_object)} documents)
+ \nAvg. Document Length (characters): {avg_length}
Documents: {self._built_object[:3]}..."""
return f"{self.vertex_type}()"
@@ -124,8 +130,13 @@ class TextSplitterVertex(Vertex):
def _built_object_repr(self):
# This built_object is a list of documents. Maybe we should
# show how many documents are in the list?
+
if self._built_object:
+ avg_length = sum(len(doc.page_content) for doc in self._built_object) / len(
+ self._built_object
+ )
return f"""{self.vertex_type}({len(self._built_object)} documents)
+ \nAvg. Document Length (characters): {avg_length}
\nDocuments: {self._built_object[:3]}..."""
return f"{self.vertex_type}()"
@@ -185,11 +196,46 @@ class PromptVertex(Vertex):
]
else:
prompt_params = ["template"]
- for param in prompt_params:
- prompt_text = self.params[param]
- variables = extract_input_variables_from_prompt(prompt_text)
- self.params["input_variables"].extend(variables)
- self.params["input_variables"] = list(set(self.params["input_variables"]))
+
+ if "prompt" not in self.params and "messages" not in self.params:
+ for param in prompt_params:
+ prompt_text = self.params[param]
+ variables = extract_input_variables_from_prompt(prompt_text)
+ self.params["input_variables"].extend(variables)
+ self.params["input_variables"] = list(
+ set(self.params["input_variables"])
+ )
+ else:
+ self.params.pop("input_variables", None)
self._build()
return self._built_object
+
+ def _built_object_repr(self):
+ if (
+ not self.artifacts
+ or self._built_object is None
+ or not hasattr(self._built_object, "format")
+ ):
+ return super()._built_object_repr()
+ # We'll build the prompt with the artifacts
+ # to show the user what the prompt looks like
+ # with the variables filled in
+ artifacts = self.artifacts.copy()
+ # Remove the handle_keys from the artifacts
+ # so the prompt format doesn't break
+ artifacts.pop("handle_keys", None)
+ try:
+ template = self._built_object.format(**artifacts)
+ return (
+ template
+ if isinstance(template, str)
+ else f"{self.vertex_type}({template})"
+ )
+ except KeyError:
+ return str(self._built_object)
+
+
+class OutputParserVertex(Vertex):
+ def __init__(self, data: Dict):
+ super().__init__(data, base_type="output_parsers")
diff --git a/src/backend/langflow/interface/agents/base.py b/src/backend/langflow/interface/agents/base.py
index d404f845d..b272144bc 100644
--- a/src/backend/langflow/interface/agents/base.py
+++ b/src/backend/langflow/interface/agents/base.py
@@ -6,13 +6,20 @@ from langflow.custom.customs import get_custom_nodes
from langflow.interface.agents.custom import CUSTOM_AGENTS
from langflow.interface.base import LangChainTypeCreator
from langflow.settings import settings
+from langflow.template.frontend_node.agents import AgentFrontendNode
from langflow.utils.logger import logger
-from langflow.utils.util import build_template_from_class
+from langflow.utils.util import build_template_from_class, build_template_from_method
class AgentCreator(LangChainTypeCreator):
type_name: str = "agents"
+ from_method_nodes = {"ZeroShotAgent": "from_llm_and_tools"}
+
+ @property
+ def frontend_node_class(self) -> type[AgentFrontendNode]:
+ return AgentFrontendNode
+
@property
def type_to_loader_dict(self) -> Dict:
if self.type_dict is None:
@@ -27,6 +34,13 @@ class AgentCreator(LangChainTypeCreator):
try:
if name in get_custom_nodes(self.type_name).keys():
return get_custom_nodes(self.type_name)[name]
+ elif name in self.from_method_nodes:
+ return build_template_from_method(
+ name,
+ type_to_cls_dict=self.type_to_loader_dict,
+ add_function=True,
+ method_name=self.from_method_nodes[name],
+ )
return build_template_from_class(
name, self.type_to_loader_dict, add_function=True
)
diff --git a/src/backend/langflow/interface/agents/custom.py b/src/backend/langflow/interface/agents/custom.py
index aa0cfb5db..b4e2b9bac 100644
--- a/src/backend/langflow/interface/agents/custom.py
+++ b/src/backend/langflow/interface/agents/custom.py
@@ -157,7 +157,7 @@ class VectorStoreAgent(CustomAgentExecutor):
llm_chain=llm_chain, allowed_tools=tool_names, **kwargs # type: ignore
)
return AgentExecutor.from_agent_and_tools(
- agent=agent, tools=tools, verbose=True
+ agent=agent, tools=tools, verbose=True, handle_parsing_errors=True
)
def run(self, *args, **kwargs):
@@ -232,6 +232,7 @@ class SQLAgent(CustomAgentExecutor):
verbose=True,
max_iterations=15,
early_stopping_method="force",
+ handle_parsing_errors=True,
)
def run(self, *args, **kwargs):
@@ -276,7 +277,7 @@ class VectorStoreRouterAgent(CustomAgentExecutor):
llm_chain=llm_chain, allowed_tools=tool_names, **kwargs # type: ignore
)
return AgentExecutor.from_agent_and_tools(
- agent=agent, tools=tools, verbose=True
+ agent=agent, tools=tools, verbose=True, handle_parsing_errors=True
)
def run(self, *args, **kwargs):
@@ -308,6 +309,7 @@ class InitializeAgent(CustomAgentExecutor):
agent=agent, # type: ignore
memory=memory,
return_intermediate_steps=True,
+ handle_parsing_errors=True,
)
def __init__(self, *args, **kwargs):
diff --git a/src/backend/langflow/interface/chains/base.py b/src/backend/langflow/interface/chains/base.py
index ff7e1ee33..67d31308f 100644
--- a/src/backend/langflow/interface/chains/base.py
+++ b/src/backend/langflow/interface/chains/base.py
@@ -23,6 +23,7 @@ class ChainCreator(LangChainTypeCreator):
from_method_nodes = {
"ConversationalRetrievalChain": "from_llm",
"LLMCheckerChain": "from_llm",
+ "SQLDatabaseChain": "from_llm",
}
@property
diff --git a/src/backend/langflow/interface/importing/utils.py b/src/backend/langflow/interface/importing/utils.py
index 7ef916ca9..ccfd8d5dd 100644
--- a/src/backend/langflow/interface/importing/utils.py
+++ b/src/backend/langflow/interface/importing/utils.py
@@ -10,6 +10,7 @@ from langchain.chains.base import Chain
from langchain.chat_models.base import BaseChatModel
from langchain.tools import BaseTool
from langflow.utils import validate
+from langflow.interface.wrappers.base import wrapper_creator
def import_module(module_path: str) -> Any:
@@ -44,6 +45,7 @@ def import_by_type(_type: str, name: str) -> Any:
"documentloaders": import_documentloader,
"textsplitters": import_textsplitter,
"utilities": import_utility,
+ "output_parsers": import_output_parser,
"retrievers": import_retriever,
}
if _type == "llms":
@@ -55,6 +57,11 @@ def import_by_type(_type: str, name: str) -> Any:
return loaded_func(name)
+def import_output_parser(output_parser: str) -> Any:
+ """Import output parser from output parser name"""
+ return import_module(f"from langchain.output_parsers import {output_parser}")
+
+
def import_chat_llm(llm: str) -> BaseChatModel:
"""Import chat llm from llm name"""
return import_class(f"langchain.chat_models.{llm}")
@@ -90,7 +97,11 @@ def import_prompt(prompt: str) -> Type[PromptTemplate]:
def import_wrapper(wrapper: str) -> Any:
"""Import wrapper from wrapper name"""
- return import_module(f"from langchain.requests import {wrapper}")
+ if (
+ isinstance(wrapper_creator.type_dict, dict)
+ and wrapper in wrapper_creator.type_dict
+ ):
+ return wrapper_creator.type_dict.get(wrapper)
def import_toolkit(toolkit: str) -> Any:
diff --git a/src/backend/langflow/interface/initialize/loading.py b/src/backend/langflow/interface/initialize/loading.py
index 9c618b1cd..b232d089c 100644
--- a/src/backend/langflow/interface/initialize/loading.py
+++ b/src/backend/langflow/interface/initialize/loading.py
@@ -1,5 +1,6 @@
+import contextlib
import json
-from typing import Any, Callable, Dict, Sequence, Type
+from typing import Any, Callable, Dict, List, Sequence, Type
from langchain.agents import ZeroShotAgent
from langchain.agents import agent as agent_module
@@ -10,19 +11,22 @@ from langflow.interface.initialize.llm import initialize_vertexai
from langflow.interface.initialize.vector_store import vecstore_initializer
+from langchain.schema import Document, BaseOutputParser
from pydantic import ValidationError
from langflow.interface.custom_lists import CUSTOM_NODES
from langflow.interface.importing.utils import get_function, import_by_type
+from langflow.interface.agents.base import agent_creator
from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.chains.base import chain_creator
+from langflow.interface.output_parsers.base import output_parser_creator
from langflow.interface.retrievers.base import retriever_creator
+from langflow.interface.wrappers.base import wrapper_creator
from langflow.interface.utils import load_file_into_dict
from langflow.utils import validate
from langchain.chains.base import Chain
from langchain.vectorstores.base import VectorStore
from langchain.document_loaders.base import BaseLoader
-from langchain.prompts.base import BasePromptTemplate
def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any:
@@ -60,11 +64,15 @@ def convert_kwargs(params):
def instantiate_based_on_type(class_object, base_type, node_type, params):
if base_type == "agents":
- return instantiate_agent(class_object, params)
+ return instantiate_agent(node_type, class_object, params)
elif base_type == "prompts":
return instantiate_prompt(node_type, class_object, params)
elif base_type == "tools":
- return instantiate_tool(node_type, class_object, params)
+ tool = instantiate_tool(node_type, class_object, params)
+ if hasattr(tool, "name") and isinstance(tool, BaseTool):
+ # tool name shouldn't contain spaces
+ tool.name = tool.name.replace(" ", "_")
+ return tool
elif base_type == "toolkits":
return instantiate_toolkit(node_type, class_object, params)
elif base_type == "embeddings":
@@ -79,26 +87,63 @@ def instantiate_based_on_type(class_object, base_type, node_type, params):
return instantiate_utility(node_type, class_object, params)
elif base_type == "chains":
return instantiate_chains(node_type, class_object, params)
+ elif base_type == "output_parsers":
+ return instantiate_output_parser(node_type, class_object, params)
elif base_type == "llms":
return instantiate_llm(node_type, class_object, params)
elif base_type == "retrievers":
return instantiate_retriever(node_type, class_object, params)
elif base_type == "memory":
return instantiate_memory(node_type, class_object, params)
+ elif base_type == "wrappers":
+ return instantiate_wrapper(node_type, class_object, params)
else:
return class_object(**params)
+def instantiate_wrapper(node_type, class_object, params):
+ if node_type in wrapper_creator.from_method_nodes:
+ method = wrapper_creator.from_method_nodes[node_type]
+ if class_method := getattr(class_object, method, None):
+ return class_method(**params)
+ raise ValueError(f"Method {method} not found in {class_object}")
+ return class_object(**params)
+
+
+def instantiate_output_parser(node_type, class_object, params):
+ if node_type in output_parser_creator.from_method_nodes:
+ method = output_parser_creator.from_method_nodes[node_type]
+ if class_method := getattr(class_object, method, None):
+ return class_method(**params)
+ raise ValueError(f"Method {method} not found in {class_object}")
+ return class_object(**params)
+
+
def instantiate_llm(node_type, class_object, params: Dict):
# This is a workaround so JinaChat works until streaming is implemented
# if "openai_api_base" in params and "jina" in params["openai_api_base"]:
# False if condition is True
if node_type == "VertexAI":
return initialize_vertexai(class_object=class_object, params=params)
+ # max_tokens sometimes is a string and should be an int
+ if "max_tokens" in params:
+ if isinstance(params["max_tokens"], str) and params["max_tokens"].isdigit():
+ params["max_tokens"] = int(params["max_tokens"])
+ elif not isinstance(params.get("max_tokens"), int):
+ params.pop("max_tokens", None)
return class_object(**params)
def instantiate_memory(node_type, class_object, params):
+ # process input_key and output_key to remove them if
+ # they are empty strings
+ if node_type == "ConversationEntityMemory":
+ params.pop("memory_key", None)
+
+ for key in ["input_key", "output_key"]:
+ if key in params and (params[key] == "" or not params[key]):
+ params.pop(key)
+
try:
if "retriever" in params and hasattr(params["retriever"], "as_retriever"):
params["retriever"] = params["retriever"].as_retriever()
@@ -141,26 +186,94 @@ def instantiate_chains(node_type, class_object: Type[Chain], params: Dict):
return class_object(**params)
-def instantiate_agent(class_object: Type[agent_module.Agent], params: Dict):
+def instantiate_agent(node_type, class_object: Type[agent_module.Agent], params: Dict):
+ if node_type in agent_creator.from_method_nodes:
+ method = agent_creator.from_method_nodes[node_type]
+ if class_method := getattr(class_object, method, None):
+ agent = class_method(**params)
+ tools = params.get("tools", [])
+ return AgentExecutor.from_agent_and_tools(
+ agent=agent, tools=tools, handle_parsing_errors=True
+ )
return load_agent_executor(class_object, params)
-def instantiate_prompt(node_type, class_object: Type[BasePromptTemplate], params: Dict):
+def instantiate_prompt(node_type, class_object, params: Dict):
if node_type == "ZeroShotPrompt":
if "tools" not in params:
params["tools"] = []
return ZeroShotAgent.create_prompt(**params)
- return class_object(**params)
+ elif "MessagePromptTemplate" in node_type:
+ # Then we only need the template
+ from_template_params = {
+ "template": params.pop("prompt", params.pop("template", ""))
+ }
+
+ if not from_template_params.get("template"):
+ raise ValueError("Prompt template is required")
+ prompt = class_object.from_template(**from_template_params)
+
+ elif node_type == "ChatPromptTemplate":
+ prompt = class_object.from_messages(**params)
+ else:
+ prompt = class_object(**params)
+
+ format_kwargs: Dict[str, Any] = {}
+ for input_variable in prompt.input_variables:
+ if input_variable in params:
+ variable = params[input_variable]
+ if isinstance(variable, str):
+ format_kwargs[input_variable] = variable
+ elif isinstance(variable, BaseOutputParser) and hasattr(
+ variable, "get_format_instructions"
+ ):
+ format_kwargs[input_variable] = variable.get_format_instructions()
+ elif isinstance(variable, List) and all(
+ isinstance(item, Document) for item in variable
+ ):
+ # Format document to contain page_content and metadata
+ # as one string separated by a newline
+ if len(variable) > 1:
+ content = "\n".join(
+ [item.page_content for item in variable if item.page_content]
+ )
+ else:
+ content = variable[0].page_content
+ # content could be a json list of strings
+ with contextlib.suppress(json.JSONDecodeError):
+ content = json.loads(content)
+ if isinstance(content, list):
+ content = ",".join([str(item) for item in content])
+ format_kwargs[input_variable] = content
+ # handle_keys will be a list but it does not exist yet
+ # so we need to create it
+
+ if (
+ isinstance(variable, List)
+ and all(isinstance(item, Document) for item in variable)
+ ) or (
+ isinstance(variable, BaseOutputParser)
+ and hasattr(variable, "get_format_instructions")
+ ):
+ if "handle_keys" not in format_kwargs:
+ format_kwargs["handle_keys"] = []
+
+ # Add the handle_keys to the list
+ format_kwargs["handle_keys"].append(input_variable)
+
+ return prompt, format_kwargs
def instantiate_tool(node_type, class_object: Type[BaseTool], params: Dict):
if node_type == "JsonSpec":
- params["dict_"] = load_file_into_dict(params.pop("path"))
+ if file_dict := load_file_into_dict(params.pop("path")):
+ params["dict_"] = file_dict
+ else:
+ raise ValueError("Invalid file")
return class_object(**params)
elif node_type == "PythonFunctionTool":
params["func"] = get_function(params.get("code"))
return class_object(**params)
- # For backward compatibility
elif node_type == "PythonFunction":
function_string = params["code"]
if isinstance(function_string, str):
@@ -218,7 +331,7 @@ def instantiate_documentloader(class_object: Type[BaseLoader], params: Dict):
# like lambda x: x.endswith(".txt") but as we don't know
# anything besides the string, we will simply check if the string is
# in x and if it is, we will return True
- file_filter = params.pop("file_filter", None)
+ file_filter = params.pop("file_filter")
extensions = file_filter.split(",")
params["file_filter"] = lambda x: any(
extension.strip() in x for extension in extensions
@@ -260,6 +373,12 @@ def instantiate_textsplitter(
"separator_type" in params and params["separator_type"] == "Text"
) or "separator_type" not in params:
params.pop("separator_type", None)
+ # separators might come in as an escaped string like \\n
+ # so we need to convert it to a string
+ if "separators" in params:
+ params["separators"] = (
+ params["separators"].encode().decode("unicode-escape")
+ )
text_splitter = class_object(**params)
else:
from langchain.text_splitter import Language
@@ -312,6 +431,7 @@ def load_agent_executor(agent_class: type[agent_module.Agent], params, **kwargs)
return AgentExecutor.from_agent_and_tools(
agent=agent,
tools=allowed_tools,
+ handle_parsing_errors=True,
# memory=memory,
**kwargs,
)
diff --git a/src/backend/langflow/interface/listing.py b/src/backend/langflow/interface/listing.py
index 1e1421d32..0893f855a 100644
--- a/src/backend/langflow/interface/listing.py
+++ b/src/backend/langflow/interface/listing.py
@@ -11,6 +11,7 @@ from langflow.interface.tools.base import tool_creator
from langflow.interface.utilities.base import utility_creator
from langflow.interface.vector_store.base import vectorstore_creator
from langflow.interface.wrappers.base import wrapper_creator
+from langflow.interface.output_parsers.base import output_parser_creator
from langflow.interface.retrievers.base import retriever_creator
@@ -29,6 +30,7 @@ def get_type_dict():
"embeddings": embedding_creator.to_list(),
"textSplitters": textsplitter_creator.to_list(),
"utilities": utility_creator.to_list(),
+ "outputParsers": output_parser_creator.to_list(),
"retrievers": retriever_creator.to_list(),
}
diff --git a/src/backend/langflow/interface/output_parsers/__init__.py b/src/backend/langflow/interface/output_parsers/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/src/backend/langflow/interface/output_parsers/base.py b/src/backend/langflow/interface/output_parsers/base.py
new file mode 100644
index 000000000..79cbdd98c
--- /dev/null
+++ b/src/backend/langflow/interface/output_parsers/base.py
@@ -0,0 +1,64 @@
+from typing import Dict, List, Optional, Type
+
+from langchain import output_parsers
+
+from langflow.interface.base import LangChainTypeCreator
+from langflow.interface.importing.utils import import_class
+from langflow.settings import settings
+from langflow.template.frontend_node.output_parsers import OutputParserFrontendNode
+from langflow.utils.logger import logger
+from langflow.utils.util import build_template_from_class, build_template_from_method
+
+
+class OutputParserCreator(LangChainTypeCreator):
+ type_name: str = "output_parsers"
+ from_method_nodes = {
+ "StructuredOutputParser": "from_response_schemas",
+ }
+
+ @property
+ def frontend_node_class(self) -> Type[OutputParserFrontendNode]:
+ return OutputParserFrontendNode
+
+ @property
+ def type_to_loader_dict(self) -> Dict:
+ if self.type_dict is None:
+ self.type_dict = {
+ output_parser_name: import_class(
+ f"langchain.output_parsers.{output_parser_name}"
+ )
+ # if output_parser_name is not lower case it is a class
+ for output_parser_name in output_parsers.__all__
+ }
+ self.type_dict = {
+ name: output_parser
+ for name, output_parser in self.type_dict.items()
+ if name in settings.output_parsers or settings.dev
+ }
+ return self.type_dict
+
+ def get_signature(self, name: str) -> Optional[Dict]:
+ try:
+ if name in self.from_method_nodes:
+ return build_template_from_method(
+ name,
+ type_to_cls_dict=self.type_to_loader_dict,
+ method_name=self.from_method_nodes[name],
+ )
+ else:
+ return build_template_from_class(
+ name,
+ type_to_cls_dict=self.type_to_loader_dict,
+ )
+ except ValueError as exc:
+ # raise ValueError("OutputParser not found") from exc
+ logger.error(f"OutputParser {name} not found: {exc}")
+ except AttributeError as exc:
+ logger.error(f"OutputParser {name} not loaded: {exc}")
+ return None
+
+ def to_list(self) -> List[str]:
+ return list(self.type_to_loader_dict.keys())
+
+
+output_parser_creator = OutputParserCreator()
diff --git a/src/backend/langflow/interface/run.py b/src/backend/langflow/interface/run.py
index ff888487f..97f47334e 100644
--- a/src/backend/langflow/interface/run.py
+++ b/src/backend/langflow/interface/run.py
@@ -14,6 +14,23 @@ def build_langchain_object_with_caching(data_graph):
return graph.build()
+@memoize_dict(maxsize=10)
+def build_sorted_vertices_with_caching(data_graph):
+ """
+ Build langchain object from data_graph.
+ """
+
+ logger.debug("Building langchain object")
+ graph = Graph.from_payload(data_graph)
+ sorted_vertices = graph.topological_sort()
+ artifacts = {}
+ for vertex in sorted_vertices:
+ vertex.build()
+ if vertex.artifacts:
+ artifacts.update(vertex.artifacts)
+ return graph.build(), artifacts
+
+
def build_langchain_object(data_graph):
"""
Build langchain object from data_graph.
diff --git a/src/backend/langflow/interface/tools/base.py b/src/backend/langflow/interface/tools/base.py
index d6b114e4c..027224a3a 100644
--- a/src/backend/langflow/interface/tools/base.py
+++ b/src/backend/langflow/interface/tools/base.py
@@ -90,7 +90,7 @@ class ToolCreator(LangChainTypeCreator):
def get_signature(self, name: str) -> Optional[Dict]:
"""Get the signature of a tool."""
- base_classes = ["Tool"]
+ base_classes = ["Tool", "BaseTool"]
fields = []
params = []
tool_params = {}
diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py
index 6b1ecb3e2..72ecb6775 100644
--- a/src/backend/langflow/interface/types.py
+++ b/src/backend/langflow/interface/types.py
@@ -11,6 +11,7 @@ from langflow.interface.tools.base import tool_creator
from langflow.interface.utilities.base import utility_creator
from langflow.interface.vector_store.base import vectorstore_creator
from langflow.interface.wrappers.base import wrapper_creator
+from langflow.interface.output_parsers.base import output_parser_creator
from langflow.interface.retrievers.base import retriever_creator
@@ -45,6 +46,7 @@ def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union
documentloader_creator,
textsplitter_creator,
utility_creator,
+ output_parser_creator,
retriever_creator,
]
diff --git a/src/backend/langflow/interface/utils.py b/src/backend/langflow/interface/utils.py
index 1ab2b4ce5..9203915cf 100644
--- a/src/backend/langflow/interface/utils.py
+++ b/src/backend/langflow/interface/utils.py
@@ -16,17 +16,15 @@ def load_file_into_dict(file_path: str) -> dict:
if not os.path.exists(file_path):
raise FileNotFoundError(f"File not found: {file_path}")
- file_extension = os.path.splitext(file_path)[1].lower()
-
- if file_extension == ".json":
- with open(file_path, "r") as json_file:
- data = json.load(json_file)
- elif file_extension in [".yaml", ".yml"]:
- with open(file_path, "r") as yaml_file:
- data = yaml.safe_load(yaml_file)
- else:
- raise ValueError("Unsupported file type. Please provide a JSON or YAML file.")
-
+ # Files names are UUID, so we can't find the extension
+ with open(file_path, "r") as file:
+ try:
+ data = json.load(file)
+ except json.JSONDecodeError:
+ file.seek(0)
+ data = yaml.safe_load(file)
+ except ValueError as exc:
+ raise ValueError("Invalid file type. Expected .json or .yaml.") from exc
return data
diff --git a/src/backend/langflow/interface/wrappers/base.py b/src/backend/langflow/interface/wrappers/base.py
index f5773d07a..77e38f921 100644
--- a/src/backend/langflow/interface/wrappers/base.py
+++ b/src/backend/langflow/interface/wrappers/base.py
@@ -1,25 +1,36 @@
from typing import Dict, List, Optional
-from langchain import requests
+from langchain import requests, sql_database
from langflow.interface.base import LangChainTypeCreator
from langflow.utils.logger import logger
-from langflow.utils.util import build_template_from_class
+from langflow.utils.util import build_template_from_class, build_template_from_method
class WrapperCreator(LangChainTypeCreator):
type_name: str = "wrappers"
+ from_method_nodes = {"SQLDatabase": "from_uri"}
+
@property
def type_to_loader_dict(self) -> Dict:
if self.type_dict is None:
self.type_dict = {
- wrapper.__name__: wrapper for wrapper in [requests.TextRequestsWrapper]
+ wrapper.__name__: wrapper
+ for wrapper in [requests.TextRequestsWrapper, sql_database.SQLDatabase]
}
return self.type_dict
def get_signature(self, name: str) -> Optional[Dict]:
try:
+ if name in self.from_method_nodes:
+ return build_template_from_method(
+ name,
+ type_to_cls_dict=self.type_to_loader_dict,
+ add_function=True,
+ method_name=self.from_method_nodes[name],
+ )
+
return build_template_from_class(name, self.type_to_loader_dict)
except ValueError as exc:
raise ValueError("Wrapper not found") from exc
diff --git a/src/backend/langflow/lcserve.py b/src/backend/langflow/lcserve.py
index affc0ff79..87f69e014 100644
--- a/src/backend/langflow/lcserve.py
+++ b/src/backend/langflow/lcserve.py
@@ -3,9 +3,9 @@
import os
# Use the JCLOUD_WORKSPACE for db URL if it's provided by JCloud.
-if 'JCLOUD_WORKSPACE' in os.environ:
+if "JCLOUD_WORKSPACE" in os.environ:
os.environ[
- 'LANGFLOW_DATABASE_URL'
+ "LANGFLOW_DATABASE_URL"
] = f"sqlite:///{os.environ['JCLOUD_WORKSPACE']}/langflow.db"
from langflow.main import setup_app
diff --git a/src/backend/langflow/processing/base.py b/src/backend/langflow/processing/base.py
index b332eac7c..f8690bbdf 100644
--- a/src/backend/langflow/processing/base.py
+++ b/src/backend/langflow/processing/base.py
@@ -1,3 +1,4 @@
+from typing import Union
from langflow.api.v1.callback import (
AsyncStreamingLLMCallbackHandler,
StreamingLLMCallbackHandler,
@@ -6,39 +7,31 @@ from langflow.processing.process import fix_memory_inputs, format_actions
from langflow.utils.logger import logger
-async def get_result_and_steps(langchain_object, message: str, **kwargs):
+async def get_result_and_steps(langchain_object, inputs: Union[dict, str], **kwargs):
"""Get result and thought from extracted json"""
try:
if hasattr(langchain_object, "verbose"):
langchain_object.verbose = True
- chat_input = None
- memory_key = ""
- if hasattr(langchain_object, "memory") and langchain_object.memory is not None:
- memory_key = langchain_object.memory.memory_key
-
- if hasattr(langchain_object, "input_keys"):
- for key in langchain_object.input_keys:
- if key not in [memory_key, "chat_history"]:
- chat_input = {key: message}
- else:
- chat_input = message # type: ignore
if hasattr(langchain_object, "return_intermediate_steps"):
# https://github.com/hwchase17/langchain/issues/2068
# Deactivating until we have a frontend solution
# to display intermediate steps
langchain_object.return_intermediate_steps = True
+ try:
+ fix_memory_inputs(langchain_object)
+ except Exception as exc:
+ logger.error(exc)
- fix_memory_inputs(langchain_object)
try:
async_callbacks = [AsyncStreamingLLMCallbackHandler(**kwargs)]
- output = await langchain_object.acall(chat_input, callbacks=async_callbacks)
+ output = await langchain_object.acall(inputs, callbacks=async_callbacks)
except Exception as exc:
# make the error message more informative
logger.debug(f"Error: {str(exc)}")
sync_callbacks = [StreamingLLMCallbackHandler(**kwargs)]
- output = langchain_object(chat_input, callbacks=sync_callbacks)
+ output = langchain_object(inputs, callbacks=sync_callbacks)
intermediate_steps = (
output.get("intermediate_steps", []) if isinstance(output, dict) else []
@@ -49,7 +42,11 @@ async def get_result_and_steps(langchain_object, message: str, **kwargs):
if isinstance(output, dict)
else output
)
- thought = format_actions(intermediate_steps) if intermediate_steps else ""
+ try:
+ thought = format_actions(intermediate_steps) if intermediate_steps else ""
+ except Exception as exc:
+ logger.exception(exc)
+ thought = ""
except Exception as exc:
logger.exception(exc)
raise ValueError(f"Error: {str(exc)}") from exc
diff --git a/src/backend/langflow/processing/process.py b/src/backend/langflow/processing/process.py
index e36cbfd8b..03e6e4c35 100644
--- a/src/backend/langflow/processing/process.py
+++ b/src/backend/langflow/processing/process.py
@@ -2,7 +2,7 @@ from pathlib import Path
from langchain.schema import AgentAction
import json
from langflow.interface.run import (
- build_langchain_object_with_caching,
+ build_sorted_vertices_with_caching,
get_memory_key,
update_memory_keys,
)
@@ -22,7 +22,10 @@ def fix_memory_inputs(langchain_object):
if not hasattr(langchain_object, "memory") or langchain_object.memory is None:
return
try:
- if langchain_object.memory.memory_key in langchain_object.input_variables:
+ if (
+ hasattr(langchain_object.memory, "memory_key")
+ and langchain_object.memory.memory_key in langchain_object.input_variables
+ ):
return
except AttributeError:
input_variables = (
@@ -88,8 +91,20 @@ def process_graph_cached(data_graph: Dict[str, Any], inputs: Optional[dict] = No
with PromptTemplate,then run the graph and return the result and thought.
"""
# Load langchain object
- langchain_object = build_langchain_object_with_caching(data_graph)
+ langchain_object, artifacts = build_sorted_vertices_with_caching(data_graph)
logger.debug("Loaded LangChain object")
+ if inputs is None:
+ inputs = {}
+
+ # Add artifacts to inputs
+ # artifacts can be documents loaded when building
+ # the flow
+ for (
+ key,
+ value,
+ ) in artifacts.items():
+ if key not in inputs or not inputs[key]:
+ inputs[key] = value
if langchain_object is None:
# Raise user facing error
@@ -105,8 +120,7 @@ def process_graph_cached(data_graph: Dict[str, Any], inputs: Optional[dict] = No
result = get_result_and_thought(langchain_object, inputs)
logger.debug("Generated result and thought")
elif isinstance(langchain_object, VectorStore):
- class_name = langchain_object.__class__.__name__
- result = {"message": f"Processed {class_name} successfully"}
+ result = langchain_object.search(**inputs)
else:
raise ValueError(
f"Unknown langchain_object type: {type(langchain_object).__name__}"
@@ -115,23 +129,23 @@ def process_graph_cached(data_graph: Dict[str, Any], inputs: Optional[dict] = No
def load_flow_from_json(
- input: Union[Path, str, dict], tweaks: Optional[dict] = None, build=True
+ flow: Union[Path, str, dict], tweaks: Optional[dict] = None, build=True
):
"""
Load flow from a JSON file or a JSON object.
- :param input: JSON file path or JSON object
+ :param flow: JSON file path or JSON object
:param tweaks: Optional tweaks to be processed
:param build: If True, build the graph, otherwise return the graph object
:return: Langchain object or Graph object depending on the build parameter
"""
# If input is a file path, load JSON from the file
- if isinstance(input, (str, Path)):
- with open(input, "r", encoding="utf-8") as f:
+ if isinstance(flow, (str, Path)):
+ with open(flow, "r", encoding="utf-8") as f:
flow_graph = json.load(f)
# If input is a dictionary, assume it's a JSON object
- elif isinstance(input, dict):
- flow_graph = input
+ elif isinstance(flow, dict):
+ flow_graph = flow
else:
raise TypeError(
"Input must be either a file path (str) or a JSON object (dict)"
diff --git a/src/backend/langflow/settings.py b/src/backend/langflow/settings.py
index df00a2c27..9e6c60082 100644
--- a/src/backend/langflow/settings.py
+++ b/src/backend/langflow/settings.py
@@ -21,6 +21,7 @@ class Settings(BaseSettings):
toolkits: dict = {}
textsplitters: dict = {}
utilities: dict = {}
+ output_parsers: dict = {}
dev: bool = False
database_url: Optional[str] = None
cache: str = "InMemoryCache"
@@ -66,7 +67,7 @@ class Settings(BaseSettings):
self.vectorstores = new_settings.vectorstores or {}
self.documentloaders = new_settings.documentloaders or {}
self.retrievers = new_settings.retrievers or {}
-
+ self.output_parsers = new_settings.output_parsers or {}
self.dev = dev
def update_settings(self, **kwargs):
diff --git a/src/backend/langflow/template/field/base.py b/src/backend/langflow/template/field/base.py
index fdfdca562..a747ad322 100644
--- a/src/backend/langflow/template/field/base.py
+++ b/src/backend/langflow/template/field/base.py
@@ -21,6 +21,7 @@ class TemplateFieldCreator(BaseModel, ABC):
name: str = ""
display_name: Optional[str] = None
advanced: bool = False
+ input_types: list[str] = []
info: Optional[str] = ""
def to_dict(self):
diff --git a/src/backend/langflow/template/frontend_node/agents.py b/src/backend/langflow/template/frontend_node/agents.py
index f692a7d6c..02aea78b9 100644
--- a/src/backend/langflow/template/frontend_node/agents.py
+++ b/src/backend/langflow/template/frontend_node/agents.py
@@ -13,6 +13,16 @@ NON_CHAT_AGENTS = {
}
+class AgentFrontendNode(FrontendNode):
+ @staticmethod
+ def format_field(field: TemplateField, name: Optional[str] = None) -> None:
+ if field.name in ["suffix", "prefix"]:
+ field.show = True
+ if field.name == "Tools" and name == "ZeroShotAgent":
+ field.field_type = "BaseTool"
+ field.is_list = True
+
+
class SQLAgentNode(FrontendNode):
name: str = "SQLAgent"
template: Template = Template(
diff --git a/src/backend/langflow/template/frontend_node/base.py b/src/backend/langflow/template/frontend_node/base.py
index de8c78112..7dae45463 100644
--- a/src/backend/langflow/template/frontend_node/base.py
+++ b/src/backend/langflow/template/frontend_node/base.py
@@ -1,15 +1,16 @@
+from collections import defaultdict
import re
from typing import List, Optional
from pydantic import BaseModel, Field
+from langflow.template.frontend_node.formatter import field_formatters
from langflow.template.frontend_node.constants import FORCE_SHOW_FIELDS
from langflow.template.field.base import TemplateField
from langflow.template.template.base import Template
from langflow.utils import constants
-from langflow.template.frontend_node.formatter import field_formatters
-CLASSES_TO_REMOVE = ["Serializable", "BaseModel"]
+CLASSES_TO_REMOVE = ["Serializable", "BaseModel", "object"]
class FieldFormatters(BaseModel):
@@ -47,6 +48,8 @@ class FrontendNode(BaseModel):
name: str = ""
display_name: str = ""
documentation: str = ""
+ custom_fields: defaultdict = defaultdict(list)
+ output_types: List[str] = []
field_formatters: FieldFormatters = Field(default_factory=FieldFormatters)
def process_base_classes(self) -> None:
@@ -76,6 +79,8 @@ class FrontendNode(BaseModel):
"description": self.description,
"base_classes": self.base_classes,
"display_name": self.display_name or self.name,
+ "custom_fields": self.custom_fields,
+ "output_types": self.output_types,
"documentation": self.documentation,
},
}
diff --git a/src/backend/langflow/template/frontend_node/chains.py b/src/backend/langflow/template/frontend_node/chains.py
index ce8c1c62c..b678dec3b 100644
--- a/src/backend/langflow/template/frontend_node/chains.py
+++ b/src/backend/langflow/template/frontend_node/chains.py
@@ -81,7 +81,7 @@ class ChainFrontendNode(FrontendNode):
field.advanced = False
if field.name == "verbose":
field.required = False
- field.show = True
+ field.show = False
field.advanced = True
if field.name == "llm":
field.required = True
diff --git a/src/backend/langflow/template/frontend_node/constants.py b/src/backend/langflow/template/frontend_node/constants.py
index 90cdbf280..295995586 100644
--- a/src/backend/langflow/template/frontend_node/constants.py
+++ b/src/backend/langflow/template/frontend_node/constants.py
@@ -58,3 +58,7 @@ The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.
You can change this to use other APIs like JinaChat, LocalAI and Prem.
"""
+
+
+INPUT_KEY_INFO = """The variable to be used as Chat Input when more than one variable is available."""
+OUTPUT_KEY_INFO = """The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)"""
diff --git a/src/backend/langflow/template/frontend_node/documentloaders.py b/src/backend/langflow/template/frontend_node/documentloaders.py
index 35a727870..d775d8736 100644
--- a/src/backend/langflow/template/frontend_node/documentloaders.py
+++ b/src/backend/langflow/template/frontend_node/documentloaders.py
@@ -19,6 +19,10 @@ def build_file_field(
class DocumentLoaderFrontNode(FrontendNode):
+ def add_extra_base_classes(self) -> None:
+ self.base_classes = ["Document"]
+ self.output_types = ["Document"]
+
file_path_templates = {
"AirbyteJSONLoader": build_file_field(suffixes=[".json"], fileTypes=["json"]),
"CoNLLULoader": build_file_field(suffixes=[".csv"], fileTypes=["csv"]),
diff --git a/src/backend/langflow/template/frontend_node/formatter/base.py b/src/backend/langflow/template/frontend_node/formatter/base.py
index 67e906593..f582bc298 100644
--- a/src/backend/langflow/template/frontend_node/formatter/base.py
+++ b/src/backend/langflow/template/frontend_node/formatter/base.py
@@ -2,9 +2,10 @@ from abc import ABC, abstractmethod
from typing import Optional
from langflow.template.field.base import TemplateField
+from pydantic import BaseModel
-class FieldFormatter(ABC):
+class FieldFormatter(BaseModel, ABC):
@abstractmethod
def format(self, field: TemplateField, name: Optional[str]) -> None:
pass
diff --git a/src/backend/langflow/template/frontend_node/memories.py b/src/backend/langflow/template/frontend_node/memories.py
index 6d490212f..374d36ff0 100644
--- a/src/backend/langflow/template/frontend_node/memories.py
+++ b/src/backend/langflow/template/frontend_node/memories.py
@@ -2,8 +2,13 @@ from typing import Optional
from langflow.template.field.base import TemplateField
from langflow.template.frontend_node.base import FrontendNode
+from langflow.template.frontend_node.constants import INPUT_KEY_INFO, OUTPUT_KEY_INFO
from langflow.template.template.base import Template
from langchain.memory.chat_message_histories.postgres import DEFAULT_CONNECTION_STRING
+from langchain.memory.chat_message_histories.mongodb import (
+ DEFAULT_COLLECTION_NAME,
+ DEFAULT_DBNAME,
+)
class MemoryFrontendNode(FrontendNode):
@@ -66,11 +71,15 @@ class MemoryFrontendNode(FrontendNode):
field.required = False
field.show = True
field.advanced = False
- if field.name in ["input_key", "output_key"]:
+ if field.name in {"input_key", "output_key"}:
field.required = False
field.show = True
field.advanced = False
field.value = ""
+ field.info = (
+ INPUT_KEY_INFO if field.name == "input_key" else OUTPUT_KEY_INFO
+ )
+
if field.name == "memory_key":
field.value = "chat_history"
if field.name == "chat_memory":
@@ -80,9 +89,10 @@ class MemoryFrontendNode(FrontendNode):
if field.name == "url":
field.show = True
if field.name == "entity_store":
- field.show = True
- if name == "SQLiteEntityStore":
- field.show = True
+ field.show = False
+ if name == "ConversationEntityMemory" and field.name == "memory_key":
+ field.show = False
+ field.required = False
class PostgresChatMessageHistoryFrontendNode(MemoryFrontendNode):
@@ -120,3 +130,56 @@ class PostgresChatMessageHistoryFrontendNode(MemoryFrontendNode):
)
description: str = "Memory store with Postgres"
base_classes: list[str] = ["PostgresChatMessageHistory", "BaseChatMessageHistory"]
+
+
+class MongoDBChatMessageHistoryFrontendNode(MemoryFrontendNode):
+ name: str = "MongoDBChatMessageHistory"
+ template: Template = Template(
+ # langchain/memory/chat_message_histories/mongodb.py
+ # connection_string: str,
+ # session_id: str,
+ # database_name: str = DEFAULT_DBNAME,
+ # collection_name: str = DEFAULT_COLLECTION_NAME,
+ type_name="MongoDBChatMessageHistory",
+ fields=[
+ TemplateField(
+ field_type="str",
+ required=True,
+ placeholder="",
+ is_list=False,
+ show=True,
+ multiline=False,
+ name="session_id",
+ ),
+ TemplateField(
+ field_type="str",
+ required=True,
+ show=True,
+ name="connection_string",
+ value="",
+ info="MongoDB connection string (e.g mongodb://mongo_user:password123@mongo:27017)",
+ ),
+ TemplateField(
+ field_type="str",
+ required=True,
+ placeholder="",
+ is_list=False,
+ show=True,
+ multiline=False,
+ value=DEFAULT_DBNAME,
+ name="database_name",
+ ),
+ TemplateField(
+ field_type="str",
+ required=True,
+ placeholder="",
+ is_list=False,
+ show=True,
+ multiline=False,
+ value=DEFAULT_COLLECTION_NAME,
+ name="collection_name",
+ ),
+ ],
+ )
+ description: str = "Memory store with MongoDB"
+ base_classes: list[str] = ["MongoDBChatMessageHistory", "BaseChatMessageHistory"]
diff --git a/src/backend/langflow/template/frontend_node/output_parsers.py b/src/backend/langflow/template/frontend_node/output_parsers.py
new file mode 100644
index 000000000..e9b4d3706
--- /dev/null
+++ b/src/backend/langflow/template/frontend_node/output_parsers.py
@@ -0,0 +1,10 @@
+from typing import Optional
+from langflow.template.field.base import TemplateField
+from langflow.template.frontend_node.base import FrontendNode
+
+
+class OutputParserFrontendNode(FrontendNode):
+ @staticmethod
+ def format_field(field: TemplateField, name: Optional[str] = None) -> None:
+ FrontendNode.format_field(field, name)
+ field.show = True
diff --git a/src/backend/langflow/template/frontend_node/textsplitters.py b/src/backend/langflow/template/frontend_node/textsplitters.py
index 65a2be6f6..1d5549042 100644
--- a/src/backend/langflow/template/frontend_node/textsplitters.py
+++ b/src/backend/langflow/template/frontend_node/textsplitters.py
@@ -4,10 +4,14 @@ from langchain.text_splitter import Language
class TextSplittersFrontendNode(FrontendNode):
+ def add_extra_base_classes(self) -> None:
+ self.base_classes = ["Document"]
+ self.output_types = ["Document"]
+
def add_extra_fields(self) -> None:
self.template.add_field(
TemplateField(
- field_type="BaseLoader",
+ field_type="Document",
required=True,
show=True,
name="documents",
@@ -41,7 +45,7 @@ class TextSplittersFrontendNode(FrontendNode):
field_type="str",
required=True,
show=True,
- value=".",
+ value="\\n",
name=name,
display_name="Separator",
)
diff --git a/src/backend/langflow/template/frontend_node/tools.py b/src/backend/langflow/template/frontend_node/tools.py
index fa3942bd2..ece765ed7 100644
--- a/src/backend/langflow/template/frontend_node/tools.py
+++ b/src/backend/langflow/template/frontend_node/tools.py
@@ -53,7 +53,7 @@ class ToolNode(FrontendNode):
],
)
description: str = "Converts a chain, agent or function into a tool."
- base_classes: list[str] = ["Tool"]
+ base_classes: list[str] = ["Tool", "BaseTool"]
def to_dict(self):
return super().to_dict()
@@ -109,7 +109,7 @@ class PythonFunctionToolNode(FrontendNode):
],
)
description: str = "Python function to be executed."
- base_classes: list[str] = ["Tool"]
+ base_classes: list[str] = ["BaseTool", "Tool"]
def to_dict(self):
return super().to_dict()
diff --git a/src/backend/langflow/template/frontend_node/vectorstores.py b/src/backend/langflow/template/frontend_node/vectorstores.py
index 89eedc596..4daf56bd2 100644
--- a/src/backend/langflow/template/frontend_node/vectorstores.py
+++ b/src/backend/langflow/template/frontend_node/vectorstores.py
@@ -254,7 +254,7 @@ class VectorStoreFrontendNode(FrontendNode):
# when instantiating the vectorstores
field.name = "documents"
- field.field_type = "TextSplitter"
+ field.field_type = "Document"
field.display_name = "Documents"
field.required = False
field.show = True
diff --git a/src/backend/langflow/utils/util.py b/src/backend/langflow/utils/util.py
index 4769563bd..c5db6052e 100644
--- a/src/backend/langflow/utils/util.py
+++ b/src/backend/langflow/utils/util.py
@@ -243,7 +243,11 @@ def format_dict(d, name: Optional[str] = None):
# Check for list type
if "List" in _type or "Sequence" in _type or "Set" in _type:
- _type = _type.replace("List[", "")[:-1]
+ _type = (
+ _type.replace("List[", "")
+ .replace("Sequence[", "")
+ .replace("Set[", "")[:-1]
+ )
value["list"] = True
else:
value["list"] = False
diff --git a/src/frontend/index.html b/src/frontend/index.html
index 50bdae647..8c21f0124 100644
--- a/src/frontend/index.html
+++ b/src/frontend/index.html
@@ -5,6 +5,7 @@
+
- {title} -
+{title}