Merge branch 'release' into bugfix-duplicate

This commit is contained in:
Gabriel Luiz Freitas Almeida 2023-06-27 13:15:49 -03:00 committed by GitHub
commit ae5f04a1b0
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
27 changed files with 624 additions and 392 deletions

View file

@ -13,8 +13,9 @@
<img alt="Github License" src="https://img.shields.io/github/license/logspace-ai/langflow" />
</p>
<p>
<a href="https://discord.gg/FUhJnnJ9"><img alt="Discord Server" src="https://dcbadge.vercel.app/api/server/FUhJnnJ9?compact=true&style=flat"/></a>
<a href="https://discord.gg/EqksyE2EX9"><img alt="Discord Server" src="https://dcbadge.vercel.app/api/server/EqksyE2EX9?compact=true&style=flat"/></a>
<a href="https://huggingface.co/spaces/Logspace/LangFlow"><img src="https://huggingface.co/datasets/huggingface/badges/raw/main/open-in-hf-spaces-sm.svg" alt="HuggingFace Spaces"></a>
</p>

138
poetry.lock generated
View file

@ -296,14 +296,14 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte
[[package]]
name = "authlib"
version = "1.2.0"
version = "1.2.1"
description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients."
category = "main"
optional = false
python-versions = "*"
files = [
{file = "Authlib-1.2.0-py2.py3-none-any.whl", hash = "sha256:4ddf4fd6cfa75c9a460b361d4bd9dac71ffda0be879dbe4292a02e92349ad55a"},
{file = "Authlib-1.2.0.tar.gz", hash = "sha256:4fa3e80883a5915ef9f5bc28630564bc4ed5b5af39812a3ff130ec76bd631e9d"},
{file = "Authlib-1.2.1-py2.py3-none-any.whl", hash = "sha256:c88984ea00149a90e3537c964327da930779afa4564e354edfd98410bea01911"},
{file = "Authlib-1.2.1.tar.gz", hash = "sha256:421f7c6b468d907ca2d9afede256f068f87e34d23dd221c07d13d4c234726afb"},
]
[package.dependencies]
@ -1102,13 +1102,13 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"]
[[package]]
name = "docarray"
version = "0.21.0"
version = "0.21.1"
description = "The data structure for unstructured data"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "docarray-0.21.0.tar.gz", hash = "sha256:3c9f605123800c1b0cdf8c458be3fb19c05e9a81f723e51200ef531b02e689ee"},
{file = "docarray-0.21.1.tar.gz", hash = "sha256:3a478707465c263147a98fe32feb0b6eb215c0ed58852135ac86acb7eae5cecb"},
]
[package.dependencies]
@ -2580,14 +2580,14 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"]
[[package]]
name = "keyring"
version = "24.1.0"
version = "24.2.0"
description = "Store and access your passwords safely."
category = "main"
optional = false
python-versions = ">=3.7"
python-versions = ">=3.8"
files = [
{file = "keyring-24.1.0-py3-none-any.whl", hash = "sha256:ade5e1e7710a7579d7c01e64a712926270239aba48055b1cdc6c022dd6d789b5"},
{file = "keyring-24.1.0.tar.gz", hash = "sha256:bd48a36612ef55505bf70e563528e3e66ba93267e344b6780cf6151f9c1eda6d"},
{file = "keyring-24.2.0-py3-none-any.whl", hash = "sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6"},
{file = "keyring-24.2.0.tar.gz", hash = "sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509"},
]
[package.dependencies]
@ -2604,14 +2604,14 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)",
[[package]]
name = "langchain"
version = "0.0.211"
version = "0.0.215"
description = "Building applications with LLMs through composability"
category = "main"
optional = false
python-versions = ">=3.8.1,<4.0"
files = [
{file = "langchain-0.0.211-py3-none-any.whl", hash = "sha256:527b602466d68e5c4e82c550cb6e218d7fe0e1b3d37f97beddc13785a75dbf96"},
{file = "langchain-0.0.211.tar.gz", hash = "sha256:35b43d4492ef3de67b6ea0168f12a489029cae0f5c4031d5c907764168b177cb"},
{file = "langchain-0.0.215-py3-none-any.whl", hash = "sha256:af9587c2eb317a6e33123f8a4ee8ccd8685cfab62359ea4fec52c962d9646acf"},
{file = "langchain-0.0.215.tar.gz", hash = "sha256:a6b261f3be941eeac2d9b37fbf8996fa4279ef724f064e8c90813046126da85b"},
]
[package.dependencies]
@ -3199,44 +3199,44 @@ dill = ">=0.3.6"
[[package]]
name = "mypy"
version = "1.4.0"
version = "1.4.1"
description = "Optional static typing for Python"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "mypy-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3af348e0925a59213244f28c7c0c3a2c2088b4ba2fe9d6c8d4fbb0aba0b7d05"},
{file = "mypy-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0b2e0da7ff9dd8d2066d093d35a169305fc4e38db378281fce096768a3dbdbf"},
{file = "mypy-1.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210fe0f39ec5be45dd9d0de253cb79245f0a6f27631d62e0c9c7988be7152965"},
{file = "mypy-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f7a5971490fd4a5a436e143105a1f78fa8b3fe95b30fff2a77542b4f3227a01f"},
{file = "mypy-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:50f65f0e9985f1e50040e603baebab83efed9eb37e15a22a4246fa7cd660f981"},
{file = "mypy-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1b5c875fcf3e7217a3de7f708166f641ca154b589664c44a6fd6d9f17d9e7e"},
{file = "mypy-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b4c734d947e761c7ceb1f09a98359dd5666460acbc39f7d0a6b6beec373c5840"},
{file = "mypy-1.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5984a8d13d35624e3b235a793c814433d810acba9eeefe665cdfed3d08bc3af"},
{file = "mypy-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0f98973e39e4a98709546a9afd82e1ffcc50c6ec9ce6f7870f33ebbf0bd4f26d"},
{file = "mypy-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:19d42b08c7532d736a7e0fb29525855e355fa51fd6aef4f9bbc80749ff64b1a2"},
{file = "mypy-1.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6ba9a69172abaa73910643744d3848877d6aac4a20c41742027dcfd8d78f05d9"},
{file = "mypy-1.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a34eed094c16cad0f6b0d889811592c7a9b7acf10d10a7356349e325d8704b4f"},
{file = "mypy-1.4.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:53c2a1fed81e05ded10a4557fe12bae05b9ecf9153f162c662a71d924d504135"},
{file = "mypy-1.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bba57b4d2328740749f676807fcf3036e9de723530781405cc5a5e41fc6e20de"},
{file = "mypy-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:653863c75f0dbb687d92eb0d4bd9fe7047d096987ecac93bb7b1bc336de48ebd"},
{file = "mypy-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7461469e163f87a087a5e7aa224102a30f037c11a096a0ceeb721cb0dce274c8"},
{file = "mypy-1.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cf0ca95e4b8adeaf07815a78b4096b65adf64ea7871b39a2116c19497fcd0dd"},
{file = "mypy-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:94a81b9354545123feb1a99b960faeff9e1fa204fce47e0042335b473d71530d"},
{file = "mypy-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:67242d5b28ed0fa88edd8f880aed24da481929467fdbca6487167cb5e3fd31ff"},
{file = "mypy-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f2b353eebef669529d9bd5ae3566905a685ae98b3af3aad7476d0d519714758"},
{file = "mypy-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:62bf18d97c6b089f77f0067b4e321db089d8520cdeefc6ae3ec0f873621c22e5"},
{file = "mypy-1.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca33ab70a4aaa75bb01086a0b04f0ba8441e51e06fc57e28585176b08cad533b"},
{file = "mypy-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5a0ee54c2cb0f957f8a6f41794d68f1a7e32b9968675ade5846f538504856d42"},
{file = "mypy-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6c34d43e3d54ad05024576aef28081d9d0580f6fa7f131255f54020eb12f5352"},
{file = "mypy-1.4.0-py3-none-any.whl", hash = "sha256:f051ca656be0c179c735a4c3193f307d34c92fdc4908d44fd4516fbe8b10567d"},
{file = "mypy-1.4.0.tar.gz", hash = "sha256:de1e7e68148a213036276d1f5303b3836ad9a774188961eb2684eddff593b042"},
{file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"},
{file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"},
{file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"},
{file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"},
{file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"},
{file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"},
{file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"},
{file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"},
{file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"},
{file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"},
{file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"},
{file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"},
{file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"},
{file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"},
{file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"},
{file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"},
{file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"},
{file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"},
{file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"},
{file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"},
{file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"},
{file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"},
{file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"},
{file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"},
{file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"},
{file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"},
]
[package.dependencies]
mypy-extensions = ">=1.0.0"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
typing-extensions = ">=3.10"
typing-extensions = ">=4.1.0"
[package.extras]
dmypy = ["psutil (>=4.0)"]
@ -5129,14 +5129,14 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""}
[[package]]
name = "qdrant-client"
version = "1.3.0"
version = "1.3.1"
description = "Client library for the Qdrant vector search engine"
category = "main"
optional = false
python-versions = ">=3.7,<3.12"
files = [
{file = "qdrant_client-1.3.0-py3-none-any.whl", hash = "sha256:f5ab40e24dd31d919475f9a1d7823b1eff2f4df8b4af1812abf19e2789b7021e"},
{file = "qdrant_client-1.3.0.tar.gz", hash = "sha256:68168e9b69af7c49ea5f9e90e027ae1944d2a1ee1ea6701315e0ba8a2fdc4a63"},
{file = "qdrant_client-1.3.1-py3-none-any.whl", hash = "sha256:9640855585d1f532094e342f07e0f2ef00652a60fc5d903c92ca3989a1e86318"},
{file = "qdrant_client-1.3.1.tar.gz", hash = "sha256:a999358b10e611d71b4b04c6ded36a6cfc963e56b4c3f99d9c1a603ca524a82e"},
]
[package.dependencies]
@ -5445,41 +5445,39 @@ tests = ["black (>=22.3.0)", "flake8 (>=3.8.2)", "matplotlib (>=3.1.3)", "mypy (
[[package]]
name = "scipy"
version = "1.10.1"
version = "1.11.0"
description = "Fundamental algorithms for scientific computing in Python"
category = "main"
optional = false
python-versions = "<3.12,>=3.8"
python-versions = "<3.13,>=3.9"
files = [
{file = "scipy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7354fd7527a4b0377ce55f286805b34e8c54b91be865bac273f527e1b839019"},
{file = "scipy-1.10.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4b3f429188c66603a1a5c549fb414e4d3bdc2a24792e061ffbd607d3d75fd84e"},
{file = "scipy-1.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1553b5dcddd64ba9a0d95355e63fe6c3fc303a8fd77c7bc91e77d61363f7433f"},
{file = "scipy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c0ff64b06b10e35215abce517252b375e580a6125fd5fdf6421b98efbefb2d2"},
{file = "scipy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:fae8a7b898c42dffe3f7361c40d5952b6bf32d10c4569098d276b4c547905ee1"},
{file = "scipy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f1564ea217e82c1bbe75ddf7285ba0709ecd503f048cb1236ae9995f64217bd"},
{file = "scipy-1.10.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:d925fa1c81b772882aa55bcc10bf88324dadb66ff85d548c71515f6689c6dac5"},
{file = "scipy-1.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaea0a6be54462ec027de54fca511540980d1e9eea68b2d5c1dbfe084797be35"},
{file = "scipy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15a35c4242ec5f292c3dd364a7c71a61be87a3d4ddcc693372813c0b73c9af1d"},
{file = "scipy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:43b8e0bcb877faf0abfb613d51026cd5cc78918e9530e375727bf0625c82788f"},
{file = "scipy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5678f88c68ea866ed9ebe3a989091088553ba12c6090244fdae3e467b1139c35"},
{file = "scipy-1.10.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:39becb03541f9e58243f4197584286e339029e8908c46f7221abeea4b749fa88"},
{file = "scipy-1.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bce5869c8d68cf383ce240e44c1d9ae7c06078a9396df68ce88a1230f93a30c1"},
{file = "scipy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07c3457ce0b3ad5124f98a86533106b643dd811dd61b548e78cf4c8786652f6f"},
{file = "scipy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:049a8bbf0ad95277ffba9b3b7d23e5369cc39e66406d60422c8cfef40ccc8415"},
{file = "scipy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cd9f1027ff30d90618914a64ca9b1a77a431159df0e2a195d8a9e8a04c78abf9"},
{file = "scipy-1.10.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:79c8e5a6c6ffaf3a2262ef1be1e108a035cf4f05c14df56057b64acc5bebffb6"},
{file = "scipy-1.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51af417a000d2dbe1ec6c372dfe688e041a7084da4fdd350aeb139bd3fb55353"},
{file = "scipy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b4735d6c28aad3cdcf52117e0e91d6b39acd4272f3f5cd9907c24ee931ad601"},
{file = "scipy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ff7f37b1bf4417baca958d254e8e2875d0cc23aaadbe65b3d5b3077b0eb23ea"},
{file = "scipy-1.10.1.tar.gz", hash = "sha256:2cf9dfb80a7b4589ba4c40ce7588986d6d5cebc5457cad2c2880f6bc2d42f3a5"},
{file = "scipy-1.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2e4f14c11fbf825319dbd7f467639a241e7c956c34edb1e036ec7bb6271e4f7b"},
{file = "scipy-1.11.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:b269ed44e2e2e43611f2ae95ba551fd98abbdc1a7ea8268f72f75876982368c4"},
{file = "scipy-1.11.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c29bae479b17d85208dfdfc67e50d5944ee23211f236728aadde9b0b7c1c33e"},
{file = "scipy-1.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a53f9cebcfda6158c241c35a559407a4ef6b8cb0863eb4144958fe0a0b7c3dae"},
{file = "scipy-1.11.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ebf4b2ea26d50312731ddba2406389c5ddcbff9d777cf3277ea11decc81e5dfb"},
{file = "scipy-1.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:894ced9a2cdb050ff5e392f274617af46dca896d5c9112fa4a2019929554d321"},
{file = "scipy-1.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7a92bd3cd4acad2e0e0b360176d5ec68b100983c8145add8a8233acddf4e5fcc"},
{file = "scipy-1.11.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:586608ea35206257d4e0ce6f154a6cfef71723b2c1f6d40de5e0b0e8a81cd2ff"},
{file = "scipy-1.11.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e631c3c49c24f30828580b8126fe3be5cca5409dad5b797418a5b8965eeafa"},
{file = "scipy-1.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccc70892ea674f93183c5c4139557b611e42f644dd755da4b19ca974ab770672"},
{file = "scipy-1.11.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80015b8928f91bd40377b2b1010ba2e09b03680cbfc291208740494aeb8debf2"},
{file = "scipy-1.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:6302c7cba5bf99c901653ff158746625526cc438f058bce41514d7469b79b2c3"},
{file = "scipy-1.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c61ea63124da6a3cff38126426912cc86420898b4902a9bc5e5b6524547a6dcb"},
{file = "scipy-1.11.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:684d44607eacd5dd367c7a9e76e922523fa9c0a7f2379a4d0fc4d70d751464cc"},
{file = "scipy-1.11.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0c9c160d117fe71cd2a12ef21cce8e0475ade2fd97c761ef327b9839089bd16"},
{file = "scipy-1.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83867a63515c4e3fce3272d81200dda614d70f4c3a22f047d84021bfe83d7929"},
{file = "scipy-1.11.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6666a1e31b2123a077f0dc7ab1053e36479cfd457fb9f5c367e7198505c6607a"},
{file = "scipy-1.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:fad4006248513528e0c496de295a9f4d2b65086cc0e388f748e7dbf49fa12760"},
{file = "scipy-1.11.0.tar.gz", hash = "sha256:f9b0248cb9d08eead44cde47cbf6339f1e9aa0dfde28f5fb27950743e317bd5d"},
]
[package.dependencies]
numpy = ">=1.19.5,<1.27.0"
numpy = ">=1.21.6,<1.28.0"
[package.extras]
dev = ["click", "doit (>=0.36.0)", "flake8", "mypy", "pycodestyle", "pydevtool", "rich-click", "typing_extensions"]
doc = ["matplotlib (>2)", "numpydoc", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"]
dev = ["click", "cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"]
doc = ["jupytext", "matplotlib (>2)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"]
test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"]
[[package]]
@ -7117,4 +7115,4 @@ deploy = ["langchain-serve"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<3.12"
content-hash = "e7b18762564269aaf9c6b046fa151f5e3acf85444f38653503341f658155e70a"
content-hash = "2f8373e1c80ce345f39ed9247fd4759ae94b5c754c4e850d3aa72183556eb92b"

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow"
version = "0.1.7"
version = "0.2.2"
description = "A Python package with a built-in web application"
authors = ["Logspace <contact@logspace.ai>"]
maintainers = [
@ -30,7 +30,7 @@ google-search-results = "^2.4.1"
google-api-python-client = "^2.79.0"
typer = "^0.9.0"
gunicorn = "^20.1.0"
langchain = "^0.0.211"
langchain = "^0.0.215"
openai = "^0.27.8"
types-pyyaml = "^6.0.12.8"
pandas = "^1.5.3"
@ -72,7 +72,7 @@ pymongo = "^4.4.0"
certifi = "^2023.5.7"
[tool.poetry.group.dev.dependencies]
[tool.poetry.dev-dependencies]
black = "^23.1.0"
ipykernel = "^6.21.2"
mypy = "^1.1.1"

View file

@ -1,6 +1,5 @@
import sys
import time
from fastapi import FastAPI
import httpx
from multiprocess import Process, cpu_count # type: ignore
import platform
@ -11,9 +10,7 @@ from rich.panel import Panel
from rich import box
from rich import print as rprint
import typer
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse
from langflow.main import create_app
from langflow.main import setup_app
from langflow.settings import settings
from langflow.utils.logger import configure, logger
import webbrowser
@ -30,6 +27,7 @@ def get_number_of_workers(workers=None):
def update_settings(
config: str,
cache: str,
dev: bool = False,
database_url: Optional[str] = None,
remove_api_keys: bool = False,
@ -41,6 +39,8 @@ def update_settings(
settings.update_settings(database_url=database_url)
if remove_api_keys:
settings.update_settings(remove_api_keys=remove_api_keys)
if cache:
settings.update_settings(cache=cache)
def serve_on_jcloud():
@ -102,6 +102,11 @@ def serve(
),
log_level: str = typer.Option("critical", help="Logging level."),
log_file: Path = typer.Option("logs/langflow.log", help="Path to the log file."),
cache: str = typer.Argument(
envvar="LANGCHAIN_CACHE",
help="Type of cache to use. (InMemoryCache, SQLiteCache)",
default="SQLiteCache",
),
jcloud: bool = typer.Option(False, help="Deploy on Jina AI Cloud"),
dev: bool = typer.Option(False, help="Run in development mode (may contain bugs)"),
database_url: str = typer.Option(
@ -130,17 +135,15 @@ def serve(
configure(log_level=log_level, log_file=log_file)
update_settings(
config, dev=dev, database_url=database_url, remove_api_keys=remove_api_keys
config,
dev=dev,
database_url=database_url,
remove_api_keys=remove_api_keys,
cache=cache,
)
# get the directory of the current file
if not path:
frontend_path = Path(__file__).parent
static_files_dir = frontend_path / "frontend"
else:
static_files_dir = Path(path)
app = create_app()
setup_static_files(app, static_files_dir)
# create path object if path is provided
static_files_dir: Optional[Path] = Path(path) if path else None
app = setup_app(static_files_dir=static_files_dir)
# check if port is being used
if is_port_in_use(port, host):
port = get_free_port(port)
@ -188,29 +191,6 @@ def run_on_windows(host, port, log_level, options, app):
run_langflow(host, port, log_level, options, app)
def setup_static_files(app: FastAPI, static_files_dir: Path):
"""
Setup the static files directory.
Args:
app (FastAPI): FastAPI app.
path (str): Path to the static files directory.
"""
app.mount(
"/",
StaticFiles(directory=static_files_dir, html=True),
name="static",
)
@app.exception_handler(404)
async def custom_404_handler(request, __):
path = static_files_dir / "index.html"
if not path.exists():
raise RuntimeError(f"File at path {path} does not exist.")
return FileResponse(path)
def is_port_in_use(port, host="localhost"):
"""
Check if a port is in use.

View file

@ -0,0 +1,2 @@
class ChatConfig:
streaming: bool = True

View file

@ -209,3 +209,5 @@ class ChatManager:
except Exception as e:
logger.error(e)
self.disconnect(client_id)

View file

@ -1,141 +1,247 @@
---
agents:
- ZeroShotAgent
- JsonAgent
- CSVAgent
- AgentInitializer
- VectorStoreAgent
- VectorStoreRouterAgent
- SQLAgent
ZeroShotAgent:
documentation: "https://python.langchain.com/docs/modules/agents/how_to/custom_mrkl_agent"
JsonAgent:
documentation: "https://python.langchain.com/docs/modules/agents/toolkits/openapi"
CSVAgent:
documentation: "https://python.langchain.com/docs/modules/agents/toolkits/csv"
AgentInitializer:
documentation: "https://python.langchain.com/docs/modules/agents/agent_types/"
VectorStoreAgent:
documentation: ""
VectorStoreRouterAgent:
documentation: ""
SQLAgent:
documentation: ""
chains:
- LLMChain
- LLMMathChain
- LLMCheckerChain
- ConversationChain
- SeriesCharacterChain
- MidJourneyPromptChain
- TimeTravelGuideChain
- SQLDatabaseChain
- RetrievalQA
- RetrievalQAWithSourcesChain
- ConversationalRetrievalChain
- CombineDocsChain
LLMChain:
documentation: "https://python.langchain.com/docs/modules/chains/foundational/llm_chain"
LLMMathChain:
documentation: "https://python.langchain.com/docs/modules/chains/additional/llm_math"
LLMCheckerChain:
documentation: "https://python.langchain.com/docs/modules/chains/additional/llm_checker"
ConversationChain:
documentation: ""
SeriesCharacterChain:
documentation: ""
MidJourneyPromptChain:
documentation: ""
TimeTravelGuideChain:
documentation: ""
SQLDatabaseChain:
documentation: ""
RetrievalQA:
documentation: "https://python.langchain.com/docs/modules/chains/popular/vector_db_qa"
RetrievalQAWithSourcesChain:
documentation: ""
ConversationalRetrievalChain:
documentation: "https://python.langchain.com/docs/modules/chains/popular/chat_vector_db"
CombineDocsChain:
documentation: ""
documentloaders:
- AirbyteJSONLoader
- CoNLLULoader
- CSVLoader
- UnstructuredEmailLoader
- EverNoteLoader
- FacebookChatLoader
- GutenbergLoader
- BSHTMLLoader
- UnstructuredHTMLLoader
# - UnstructuredImageLoader # Issue with Python 3.11 (https://github.com/Unstructured-IO/unstructured-inference/issues/83)
- UnstructuredMarkdownLoader
- PyPDFLoader
- UnstructuredPowerPointLoader
- SRTLoader
- TelegramChatLoader
- TextLoader
- UnstructuredWordDocumentLoader
- WebBaseLoader
- AZLyricsLoader
- CollegeConfidentialLoader
- HNLoader
- IFixitLoader
- IMSDbLoader
- GitbookLoader
- ReadTheDocsLoader
- SlackDirectoryLoader
- NotionDirectoryLoader
- DirectoryLoader
- GitLoader
AirbyteJSONLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/airbyte_json"
CoNLLULoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/conll-u"
CSVLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv"
UnstructuredEmailLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/email"
EverNoteLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/evernote"
FacebookChatLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/facebook_chat"
GutenbergLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/gutenberg"
BSHTMLLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/html"
UnstructuredHTMLLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/html"
UnstructuredMarkdownLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/markdown"
PyPDFLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/pdf"
UnstructuredPowerPointLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/microsoft_powerpoint"
SRTLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/subtitle"
TelegramChatLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/telegram"
TextLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/"
UnstructuredWordDocumentLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/microsoft_word"
WebBaseLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/web_base"
AZLyricsLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/azlyrics"
CollegeConfidentialLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/college_confidential"
HNLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/hacker_news"
IFixitLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/ifixit"
IMSDbLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/imsdb"
GitbookLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/gitbook"
ReadTheDocsLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/readthedocs_documentation"
SlackDirectoryLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/slack"
NotionDirectoryLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/notion"
DirectoryLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/how_to/file_directory"
GitLoader:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/git"
embeddings:
- OpenAIEmbeddings
- HuggingFaceEmbeddings
- CohereEmbeddings
OpenAIEmbeddings:
documentation: "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/openai"
HuggingFaceEmbeddings:
documentation: "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/sentence_transformers"
CohereEmbeddings:
documentation: "https://python.langchain.com/docs/modules/data_connection/text_embedding/integrations/cohere"
llms:
- OpenAI
# - AzureOpenAI
# - AzureChatOpenAI
- ChatOpenAI
- LlamaCpp
- CTransformers
- Cohere
- Anthropic
- ChatAnthropic
- HuggingFaceHub
OpenAI:
documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai"
ChatOpenAI:
documentation: "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai"
LlamaCpp:
documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/llamacpp"
CTransformers:
documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/ctransformers"
Cohere:
documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/cohere"
Anthropic:
documentation: ""
ChatAnthropic:
documentation: "https://python.langchain.com/docs/modules/model_io/models/chat/integrations/anthropic"
HuggingFaceHub:
documentation: "https://python.langchain.com/docs/modules/model_io/models/llms/integrations/huggingface_hub"
memories:
- ConversationBufferMemory
- ConversationSummaryMemory
- ConversationKGMemory
ConversationBufferMemory:
documentation: "https://python.langchain.com/docs/modules/memory/how_to/summary"
ConversationSummaryMemory:
documentation: "https://python.langchain.com/docs/modules/memory/how_to/summary"
ConversationKGMemory:
documentation: "https://python.langchain.com/docs/modules/memory/how_to/kg"
ConversationBufferWindowMemory:
documentation: "https://python.langchain.com/docs/modules/memory/how_to/buffer_window"
VectorStoreRetrieverMemory:
documentation: "https://python.langchain.com/docs/modules/memory/how_to/vectorstore_retriever_memory"
prompts:
- PromptTemplate
- FewShotPromptTemplate
- ZeroShotPrompt
PromptTemplate:
documentation: "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/"
ZeroShotPrompt:
documentation: "https://python.langchain.com/docs/modules/agents/how_to/custom_mrkl_agent"
textsplitters:
- CharacterTextSplitter
- RecursiveCharacterTextSplitter
# - LatexTextSplitter
# - PythonCodeTextSplitter
CharacterTextSplitter:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/character_text_splitter"
RecursiveCharacterTextSplitter:
documentation: "https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/recursive_text_splitter"
toolkits:
- OpenAPIToolkit
- JsonToolkit
- VectorStoreInfo
- VectorStoreRouterToolkit
- VectorStoreToolkit
OpenAPIToolkit:
documentation: ""
JsonToolkit:
documentation: ""
VectorStoreInfo:
documentation: ""
VectorStoreRouterToolkit:
documentation: ""
VectorStoreToolkit:
documentation: ""
tools:
- Search
- PAL-MATH
- Calculator
- Serper Search
- Tool
- PythonFunctionTool
- PythonFunction
- JsonSpec
- News API
- TMDB API
- Podcast API
- QuerySQLDataBaseTool
- InfoSQLDatabaseTool
- ListSQLDatabaseTool
# - QueryCheckerTool
- BingSearchRun
- GoogleSearchRun
- GoogleSearchResults
- GoogleSerperRun
- JsonListKeysTool
- JsonGetValueTool
- PythonREPLTool
- PythonAstREPLTool
- RequestsGetTool
- RequestsPostTool
- RequestsPatchTool
- RequestsPutTool
- RequestsDeleteTool
- WikipediaQueryRun
- WolframAlphaQueryRun
Search:
documentation: ""
PAL-MATH:
documentation: ""
Calculator:
documentation: ""
Serper Search:
documentation: ""
Tool:
documentation: ""
PythonFunctionTool:
documentation: ""
PythonFunction:
documentation: ""
JsonSpec:
documentation: ""
News API:
documentation: ""
TMDB API:
documentation: ""
Podcast API:
documentation: ""
QuerySQLDataBaseTool:
documentation: ""
InfoSQLDatabaseTool:
documentation: ""
ListSQLDatabaseTool:
documentation: ""
BingSearchRun:
documentation: ""
GoogleSearchRun:
documentation: ""
GoogleSearchResults:
documentation: ""
GoogleSerperRun:
documentation: ""
JsonListKeysTool:
documentation: ""
JsonGetValueTool:
documentation: ""
PythonREPLTool:
documentation: ""
PythonAstREPLTool:
documentation: ""
RequestsGetTool:
documentation: ""
RequestsPostTool:
documentation: ""
RequestsPatchTool:
documentation: ""
RequestsPutTool:
documentation: ""
RequestsDeleteTool:
documentation: ""
WikipediaQueryRun:
documentation: ""
WolframAlphaQueryRun:
documentation: ""
utilities:
- BingSearchAPIWrapper
- GoogleSearchAPIWrapper
- GoogleSerperAPIWrapper
- SearxResults
- SearxSearchWrapper
- SerpAPIWrapper
- WikipediaAPIWrapper
- WolframAlphaAPIWrapper
# - ZapierNLAWrapper
- SQLDatabase
BingSearchAPIWrapper:
documentation: ""
GoogleSearchAPIWrapper:
documentation: ""
GoogleSerperAPIWrapper:
documentation: ""
SearxResults:
documentation: ""
SearxSearchWrapper:
documentation: ""
SerpAPIWrapper:
documentation: ""
WikipediaAPIWrapper:
documentation: ""
WolframAlphaAPIWrapper:
documentation: ""
vectorstores:
- Chroma
- Qdrant
- Weaviate
- FAISS
- Pinecone
- SupabaseVectorStore
- MongoDBAtlasVectorSearch
Chroma:
documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/chroma"
Qdrant:
documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/qdrant"
Weaviate:
documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/weaviate"
FAISS:
documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/faiss"
Pinecone:
documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/pinecone"
SupabaseVectorStore:
documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/supabase"
MongoDBAtlasVectorSearch:
documentation: "https://python.langchain.com/docs/modules/data_connection/vectorstores/integrations/mongodb_atlas_vector_search"
wrappers:
- RequestsWrapper
# - ChatPromptTemplate
# - SystemMessagePromptTemplate
# - HumanMessagePromptTemplate
RequestsWrapper:
documentation: ""

View file

@ -6,6 +6,7 @@ from langchain.agents import (
Tool,
ZeroShotAgent,
initialize_agent,
AgentType,
)
from langchain.agents.agent_toolkits import (
SQLDatabaseToolkit,
@ -297,6 +298,9 @@ class InitializeAgent(CustomAgentExecutor):
agent: str,
memory: Optional[BaseChatMemory] = None,
):
# Find which value in the AgentType enum corresponds to the string
# passed in as agent
agent = AgentType(agent)
return initialize_agent(
tools=tools,
llm=llm,

View file

@ -8,6 +8,7 @@ from langflow.template.field.base import TemplateField
from langflow.template.frontend_node.base import FrontendNode
from langflow.template.template.base import Template
from langflow.utils.logger import logger
from langflow.settings import settings
# Assuming necessary imports for Field, Template, and FrontendNode classes
@ -15,12 +16,29 @@ from langflow.utils.logger import logger
class LangChainTypeCreator(BaseModel, ABC):
type_name: str
type_dict: Optional[Dict] = None
name_docs_dict: Optional[Dict[str, str]] = None
@property
def frontend_node_class(self) -> Type[FrontendNode]:
"""The class type of the FrontendNode created in frontend_node."""
return FrontendNode
@property
def docs_map(self) -> Dict[str, str]:
"""A dict with the name of the component as key and the documentation link as value."""
if self.name_docs_dict is None:
try:
type_settings = getattr(settings, self.type_name)
self.name_docs_dict = {
name: value_dict["documentation"]
for name, value_dict in type_settings.items()
}
except AttributeError as exc:
logger.error(exc)
self.name_docs_dict = {}
return self.name_docs_dict
@property
@abstractmethod
def type_to_loader_dict(self) -> Dict:
@ -83,7 +101,7 @@ class LangChainTypeCreator(BaseModel, ABC):
signature.add_extra_fields()
signature.add_extra_base_classes()
signature.set_documentation(self.docs_map.get(name, ""))
return signature

View file

@ -1,11 +1,12 @@
import json
from typing import Any, Callable, Dict, Sequence
from typing import Any, Callable, Dict, Sequence, Type
from langchain.agents import ZeroShotAgent
from langchain.agents import agent as agent_module
from langchain.agents.agent import AgentExecutor
from langchain.agents.agent_toolkits.base import BaseToolkit
from langchain.agents.tools import BaseTool
from langflow.interface.initialize.vector_store import vecstore_initializer
from pydantic import ValidationError
@ -16,6 +17,11 @@ from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.chains.base import chain_creator
from langflow.interface.utils import load_file_into_dict
from langflow.utils import validate
from langchain.chains.base import Chain
from langchain.vectorstores.base import VectorStore
from langchain.document_loaders.base import BaseLoader
from langchain.prompts.base import BasePromptTemplate
from langflow.chat.config import ChatConfig
def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any:
@ -72,11 +78,22 @@ def instantiate_based_on_type(class_object, base_type, node_type, params):
return instantiate_utility(node_type, class_object, params)
elif base_type == "chains":
return instantiate_chains(node_type, class_object, params)
elif base_type == "llms":
return instantiate_llm(node_type, class_object, params)
else:
return class_object(**params)
def instantiate_chains(node_type, class_object, params):
def instantiate_llm(node_type, class_object, params: Dict):
# This is a workaround so JinaChat works until streaming is implemented
# if "openai_api_base" in params and "jina" in params["openai_api_base"]:
# False if condition is True
ChatConfig.streaming = "jina" not in params.get("openai_api_base", "")
return class_object(**params)
def instantiate_chains(node_type, class_object: Type[Chain], params: Dict):
if "retriever" in params and hasattr(params["retriever"], "as_retriever"):
params["retriever"] = params["retriever"].as_retriever()
if node_type in chain_creator.from_method_nodes:
@ -88,11 +105,11 @@ def instantiate_chains(node_type, class_object, params):
return class_object(**params)
def instantiate_agent(class_object, params):
def instantiate_agent(class_object: Type[agent_module.Agent], params: Dict):
return load_agent_executor(class_object, params)
def instantiate_prompt(node_type, class_object, params):
def instantiate_prompt(node_type, class_object: Type[BasePromptTemplate], params: Dict):
if node_type == "ZeroShotPrompt":
if "tools" not in params:
params["tools"] = []
@ -100,7 +117,7 @@ def instantiate_prompt(node_type, class_object, params):
return class_object(**params)
def instantiate_tool(node_type, class_object, params):
def instantiate_tool(node_type, class_object: Type[BaseTool], params: Dict):
if node_type == "JsonSpec":
params["dict_"] = load_file_into_dict(params.pop("path"))
return class_object(**params)
@ -118,7 +135,7 @@ def instantiate_tool(node_type, class_object, params):
return class_object(**params)
def instantiate_toolkit(node_type, class_object, params):
def instantiate_toolkit(node_type, class_object: Type[BaseToolkit], params: Dict):
loaded_toolkit = class_object(**params)
# Commenting this out for now to use toolkits as normal tools
# if toolkits_creator.has_create_function(node_type):
@ -128,7 +145,7 @@ def instantiate_toolkit(node_type, class_object, params):
return loaded_toolkit
def instantiate_embedding(class_object, params):
def instantiate_embedding(class_object, params: Dict):
params.pop("model", None)
params.pop("headers", None)
try:
@ -142,7 +159,7 @@ def instantiate_embedding(class_object, params):
return class_object(**params)
def instantiate_vectorstore(class_object, params):
def instantiate_vectorstore(class_object: Type[VectorStore], params: Dict):
search_kwargs = params.pop("search_kwargs", {})
if initializer := vecstore_initializer.get(class_object.__name__):
vecstore = initializer(class_object, params)
@ -158,7 +175,7 @@ def instantiate_vectorstore(class_object, params):
return vecstore
def instantiate_documentloader(class_object, params):
def instantiate_documentloader(class_object: Type[BaseLoader], params: Dict):
if "file_filter" in params:
# file_filter will be a string but we need a function
# that will be used to filter the files using file_filter
@ -187,19 +204,29 @@ def instantiate_documentloader(class_object, params):
return docs
def instantiate_textsplitter(class_object, params):
def instantiate_textsplitter(
class_object,
params: Dict,
):
try:
documents = params.pop("documents")
except KeyError as e:
except KeyError as exc:
raise ValueError(
"The source you provided did not load correctly or was empty."
"Try changing the chunk_size of the Text Splitter."
) from e
text_splitter = class_object(**params)
) from exc
if "separator_type" in params and params["separator_type"] == "Text":
text_splitter = class_object(**params)
else:
params["language"] = params.pop("separator_type", None)
params.pop("separators", None)
text_splitter = class_object.from_language(**params)
return text_splitter.split_documents(documents)
def instantiate_utility(node_type, class_object, params):
def instantiate_utility(node_type, class_object, params: Dict):
if node_type == "SQLDatabase":
return class_object.from_uri(params.pop("uri"))
return class_object(**params)

View file

@ -4,9 +4,12 @@ import os
from io import BytesIO
import re
import yaml
from langchain.base_language import BaseLanguageModel
from PIL.Image import Image
from langflow.utils.logger import logger
from langflow.chat.config import ChatConfig
def load_file_into_dict(file_path: str) -> dict:
@ -48,9 +51,9 @@ def try_setting_streaming_options(langchain_object, websocket):
if isinstance(llm, BaseLanguageModel):
if hasattr(llm, "streaming") and isinstance(llm.streaming, bool):
llm.streaming = True
llm.streaming = ChatConfig.streaming
elif hasattr(llm, "stream") and isinstance(llm.stream, bool):
llm.stream = True
llm.stream = ChatConfig.streaming
return langchain_object
@ -58,3 +61,22 @@ def try_setting_streaming_options(langchain_object, websocket):
def extract_input_variables_from_prompt(prompt: str) -> list[str]:
"""Extract input variables from prompt."""
return re.findall(r"{(.*?)}", prompt)
def setup_llm_caching():
"""Setup LLM caching."""
try:
import langchain
from langflow.settings import settings
from langflow.interface.importing.utils import import_class
cache_class = import_class(f"langchain.cache.{settings.cache}")
logger.debug(f"Setting up LLM caching with {cache_class.__name__}")
langchain.llm_cache = cache_class()
logger.info(f"LLM caching setup with {cache_class.__name__}")
except ImportError:
logger.warning(f"Could not import {settings.cache}. ")
except Exception as exc:
logger.warning(f"Could not setup LLM caching. Error: {exc}")

View file

@ -1,8 +1,13 @@
from pathlib import Path
from typing import Optional
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import FileResponse
from fastapi.staticfiles import StaticFiles
from langflow.api import router
from langflow.database.base import create_db_and_tables
from langflow.interface.utils import setup_llm_caching
def create_app():
@ -28,6 +33,43 @@ def create_app():
app.include_router(router)
app.on_event("startup")(create_db_and_tables)
app.on_event("startup")(setup_llm_caching)
return app
def setup_static_files(app: FastAPI, static_files_dir: Path):
"""
Setup the static files directory.
Args:
app (FastAPI): FastAPI app.
path (str): Path to the static files directory.
"""
app.mount(
"/",
StaticFiles(directory=static_files_dir, html=True),
name="static",
)
@app.exception_handler(404)
async def custom_404_handler(request, __):
path = static_files_dir / "index.html"
if not path.exists():
raise RuntimeError(f"File at path {path} does not exist.")
return FileResponse(path)
# app = create_app()
# setup_static_files(app, static_files_dir)
def setup_app(static_files_dir: Optional[Path]) -> FastAPI:
"""Setup the FastAPI app."""
# get the directory of the current file
if not static_files_dir:
frontend_path = Path(__file__).parent
static_files_dir = frontend_path / "frontend"
app = create_app()
setup_static_files(app, static_files_dir)
return app

View file

@ -1,26 +1,26 @@
import os
from typing import List
import yaml
from pydantic import BaseSettings, root_validator
class Settings(BaseSettings):
chains: List[str] = []
agents: List[str] = []
prompts: List[str] = []
llms: List[str] = []
tools: List[str] = []
memories: List[str] = []
embeddings: List[str] = []
vectorstores: List[str] = []
documentloaders: List[str] = []
wrappers: List[str] = []
toolkits: List[str] = []
textsplitters: List[str] = []
utilities: List[str] = []
chains: dict = {}
agents: dict = {}
prompts: dict = {}
llms: dict = {}
tools: dict = {}
memories: dict = {}
embeddings: dict = {}
vectorstores: dict = {}
documentloaders: dict = {}
wrappers: dict = {}
toolkits: dict = {}
textsplitters: dict = {}
utilities: dict = {}
dev: bool = False
database_url: str = "sqlite:///./langflow.db"
cache: str = "InMemoryCache"
remove_api_keys: bool = False
class Config:
@ -37,16 +37,16 @@ class Settings(BaseSettings):
def update_from_yaml(self, file_path: str, dev: bool = False):
new_settings = load_settings_from_yaml(file_path)
self.chains = new_settings.chains or []
self.agents = new_settings.agents or []
self.prompts = new_settings.prompts or []
self.llms = new_settings.llms or []
self.tools = new_settings.tools or []
self.memories = new_settings.memories or []
self.wrappers = new_settings.wrappers or []
self.toolkits = new_settings.toolkits or []
self.textsplitters = new_settings.textsplitters or []
self.utilities = new_settings.utilities or []
self.chains = new_settings.chains or {}
self.agents = new_settings.agents or {}
self.prompts = new_settings.prompts or {}
self.llms = new_settings.llms or {}
self.tools = new_settings.tools or {}
self.memories = new_settings.memories or {}
self.wrappers = new_settings.wrappers or {}
self.toolkits = new_settings.toolkits or {}
self.textsplitters = new_settings.textsplitters or {}
self.utilities = new_settings.utilities or {}
self.dev = dev
def update_settings(self, **kwargs):

View file

@ -21,6 +21,7 @@ class TemplateFieldCreator(BaseModel, ABC):
name: str = ""
display_name: Optional[str] = None
advanced: bool = False
info: Optional[str] = ""
def to_dict(self):
result = self.dict()

View file

@ -15,14 +15,21 @@ class FrontendNode(BaseModel):
base_classes: List[str]
name: str = ""
display_name: str = ""
documentation: str = ""
def set_documentation(self, documentation: str) -> None:
"""Sets the documentation of the frontend node."""
self.documentation = documentation
def to_dict(self) -> dict:
"""Returns a dict representation of the frontend node."""
return {
self.name: {
"template": self.template.to_dict(self.format_field),
"description": self.description,
"base_classes": self.base_classes,
"display_name": self.display_name or self.name,
"documentation": self.documentation,
},
}

View file

@ -32,3 +32,13 @@ You are a good listener and you can talk about anything.
HUMAN_PROMPT = "{input}"
QA_CHAIN_TYPES = ["stuff", "map_reduce", "map_rerank", "refine"]
# This variable is used to tell the user
# that it can be changed to use other APIs
# like Prem and LocalAI
OPENAI_API_BASE_INFO = """
The base URL of the OpenAI API. Defaults to https://api.openai.com/v1.
You can change this to use other APIs like JinaChat, LocalAI and Prem.
"""

View file

@ -2,6 +2,7 @@ from typing import Optional
from langflow.template.field.base import TemplateField
from langflow.template.frontend_node.base import FrontendNode
from langflow.template.frontend_node.constants import OPENAI_API_BASE_INFO
class LLMFrontendNode(FrontendNode):
@ -15,6 +16,9 @@ class LLMFrontendNode(FrontendNode):
if "key" not in field.name.lower() and "token" not in field.name.lower():
field.password = False
if field.name == "openai_api_base":
field.info = OPENAI_API_BASE_INFO
@staticmethod
def format_azure_field(field: TemplateField):
if field.name == "model_name":

View file

@ -1,5 +1,6 @@
from langflow.template.field.base import TemplateField
from langflow.template.frontend_node.base import FrontendNode
from langchain.text_splitter import Language
class TextSplittersFrontendNode(FrontendNode):
@ -17,6 +18,22 @@ class TextSplittersFrontendNode(FrontendNode):
name = "separator"
elif self.template.type_name == "RecursiveCharacterTextSplitter":
name = "separators"
# Add a field for type of separator
# which will have Text or any value from the
# Language enum
self.template.add_field(
TemplateField(
field_type="str",
required=True,
show=True,
name="separator_type",
advanced=False,
is_list=True,
options=[x.value for x in Language],
value="Text",
display_name="Separator Type",
)
)
self.template.add_field(
TemplateField(
field_type="str",

View file

@ -200,7 +200,7 @@ class VectorStoreFrontendNode(FrontendNode):
self.template.add_field(field)
def add_extra_base_classes(self) -> None:
self.base_classes.append("BaseRetriever")
self.base_classes.extend(("BaseRetriever", "VectorStoreRetriever"))
@staticmethod
def format_field(field: TemplateField, name: Optional[str] = None) -> None:

View file

@ -25,6 +25,7 @@ import { nodeColors } from "../../../../utils";
import ShadTooltip from "../../../../components/ShadTooltipComponent";
import { PopUpContext } from "../../../../contexts/popUpContext";
import ToggleShadComponent from "../../../../components/toggleShadComponent";
import { Info } from "lucide-react";
export default function ParameterComponent({
left,
@ -36,9 +37,11 @@ export default function ParameterComponent({
type,
name = "",
required = false,
info = "",
}: ParameterComponentType) {
const ref = useRef(null);
const refHtml = useRef(null);
const infoHtml = useRef(null);
const updateNodeInternals = useUpdateNodeInternals();
const [position, setPosition] = useState(0);
const { closePopUp } = useContext(PopUpContext);
@ -79,6 +82,18 @@ export default function ParameterComponent({
});
};
useEffect(() => {
infoHtml.current = (
<div className="h-full w-full break-words">
{info.split("\n").map((line, i) => (
<p key={i} className="block">
{line}
</p>
))}
</div>
);
}, [info]);
useEffect(() => {
const groupedObj = groupByFamily(myData, tooltipTitle);
@ -126,8 +141,25 @@ export default function ParameterComponent({
className="w-full flex flex-wrap justify-between items-center bg-muted dark:bg-gray-800 dark:text-white mt-1 px-5 py-2"
>
<>
<div className={"text-sm truncate w-full " + (left ? "" : "text-end")}>
<div
className={
"text-sm truncate w-full" +
(left ? "" : " text-end") +
(info !== "" ? " flex items-center" : "")
}
>
{title}
<div className="">
{info !== "" && (
<ShadTooltip
open={true}
delayDuration={500}
content={infoHtml.current}
>
<Info className="ml-2 relative bottom-0.5 w-3 h-3" />
</ShadTooltip>
)}
</div>
<span className="text-red-600">{required ? " *" : ""}</span>
</div>
{left &&

View file

@ -6,16 +6,7 @@ import {
} from "../../utils";
import ParameterComponent from "./components/parameterComponent";
import { typesContext } from "../../contexts/typesContext";
import {
useContext,
useState,
useEffect,
useRef,
ForwardRefExoticComponent,
ComponentType,
SVGProps,
ReactNode,
} from "react";
import { useContext, useState, useEffect, useRef } from "react";
import { NodeDataType } from "../../types/flow";
import { alertContext } from "../../contexts/alertContext";
import { PopUpContext } from "../../contexts/popUpContext";
@ -23,10 +14,9 @@ import NodeModal from "../../modals/NodeModal";
import Tooltip from "../../components/TooltipComponent";
import { NodeToolbar } from "reactflow";
import NodeToolbarComponent from "../../pages/FlowPage/components/nodeToolbarComponent";
import { FileText, Info } from "lucide-react";
import ShadTooltip from "../../components/ShadTooltipComponent";
import { useSSE } from "../../contexts/SSEContext";
import { ReactElement } from "react-markdown/lib/react-markdown";
export default function GenericNode({
data,
@ -46,6 +36,7 @@ export default function GenericNode({
const [validationStatus, setValidationStatus] = useState(null);
// State for outline color
const { sseData, isBuilding } = useSSE();
const refHtml = useRef(null);
// useEffect(() => {
// if (reactFlowInstance) {
@ -79,6 +70,22 @@ export default function GenericNode({
useEffect(() => {}, [closePopUp, data.node.template]);
useEffect(() => {
refHtml.current = (
<div className="flex">
<span>{`${data.node.display_name} Documentation`}</span>
<span
className="self-center"
style={{
color: nodeColors[types[data.type]] ?? nodeColors.unknown,
}}
>
<FileText className="h-4 w-4 ml-2" />
</span>
</div>
);
}, []);
return (
<>
<NodeToolbar>
@ -103,7 +110,7 @@ export default function GenericNode({
color: nodeColors[types[data.type]] ?? nodeColors.unknown,
}}
/>
<div className="ml-2 truncate">
<div className="ml-2 truncate flex">
<ShadTooltip
delayDuration={1500}
content={data.node.display_name}
@ -112,6 +119,29 @@ export default function GenericNode({
{data.node.display_name}
</div>
</ShadTooltip>
<div className="">
{data.node.documentation !== "" && (
<ShadTooltip
open={true}
delayDuration={1000}
content={refHtml.current}
>
<a
href={data.node.documentation}
target="_blank"
rel="noopener noreferrer"
>
<Info
style={{
color:
nodeColors[types[data.type]] ?? nodeColors.unknown,
}}
className="ml-2 self-center w-4 h-4"
/>
</a>
</ShadTooltip>
)}
</div>
</div>
</div>
<div className="flex gap-3">
@ -214,6 +244,7 @@ export default function GenericNode({
? toTitleCase(data.node.template[t].name)
: toTitleCase(t)
}
info={data.node.template[t].info}
name={t}
tooltipTitle={data.node.template[t].type}
required={data.node.template[t].required}

View file

@ -198,10 +198,13 @@ export function TabsProvider({ children }: { children: ReactNode }) {
edge.style = { stroke: "#555555" };
});
}
function updateDisplay_name(node: NodeType, template: APIClassType) {
node.data.node.display_name = template["display_name"]
? template["display_name"]
: node.data.type;
node.data.node.display_name = template["display_name"] || node.data.type;
}
function updateNodeDocumentation(node: NodeType, template: APIClassType) {
node.data.node.documentation = template["documentation"];
}
function processFlowNodes(flow) {
@ -218,6 +221,7 @@ export function TabsProvider({ children }: { children: ReactNode }) {
updateNodeEdges(flow, node, template);
updateNodeDescription(node, template);
updateNodeTemplate(node, template);
updateNodeDocumentation(node, template);
}
});
}

View file

@ -1,9 +1,9 @@
import React, { forwardRef } from "react";
import { ReactComponent as MidjorneySVG } from "./Midjourney_Emblem.svg";
import { ReactComponent as MidjourneySVG } from "./Midjourney_Emblem.svg";
export const MidjorneyIcon = forwardRef<
export const MidjourneyIcon = forwardRef<
SVGSVGElement,
React.PropsWithChildren<{}>
>((props, ref) => {
return <MidjorneySVG ref={ref} {...props} />;
return <MidjourneySVG ref={ref} {...props} />;
});

View file

@ -12,6 +12,7 @@ export type APIClassType = {
description: string;
template: APITemplateType;
display_name: string;
documentation: string;
[key: string]: Array<string> | string | APITemplateType;
};
export type TemplateVariableType = {

View file

@ -1,10 +1,4 @@
import {
ComponentType,
ForwardRefExoticComponent,
ReactElement,
ReactNode,
SVGProps,
} from "react";
import { ReactElement, ReactNode } from "react";
import { NodeDataType } from "../flow/index";
import { typesContextType } from "../typesContext";
export type InputComponentType = {
@ -41,6 +35,7 @@ export type ParameterComponentType = {
name?: string;
tooltipTitle: string;
dataContext?: typesContextType;
info?: string;
};
export type InputListComponentType = {
value: string[];

View file

@ -33,7 +33,7 @@ import { HackerNewsIcon } from "./icons/hackerNews";
import { HugginFaceIcon } from "./icons/HuggingFace";
import { IFixIcon } from "./icons/IFixIt";
import { MetaIcon } from "./icons/Meta";
import { MidjorneyIcon } from "./icons/Midjorney";
import { MidjourneyIcon } from "./icons/Midjorney";
import { NotionIcon } from "./icons/Notion";
import { OpenAiIcon } from "./icons/OpenAi";
import { QDrantIcon } from "./icons/QDrant";
@ -45,7 +45,6 @@ import { twMerge } from "tailwind-merge";
import { ADJECTIVES, DESCRIPTIONS, NOUNS } from "./constants";
import { ComponentType, SVGProps } from "react";
import {
Boxes,
Cpu,
Fingerprint,
Gift,
@ -53,7 +52,6 @@ import {
HelpCircle,
Laptop2,
Layers,
LayoutDashboard,
Lightbulb,
Link,
MessageCircle,
@ -189,7 +187,7 @@ export const nodeIcons: {
HuggingFaceEmbeddings: HugginFaceIcon,
IFixitLoader: IFixIcon,
Meta: MetaIcon,
Midjorney: MidjorneyIcon,
Midjourney: MidjourneyIcon,
NotionDirectoryLoader: NotionIcon,
ChatOpenAI: OpenAiIcon,
OpenAI: OpenAiIcon,
@ -287,7 +285,10 @@ export const nodeIconsLucide: {
Meta: MetaIcon as React.ForwardRefExoticComponent<
ComponentType<SVGProps<SVGSVGElement>>
>,
Midjorney: MidjorneyIcon as React.ForwardRefExoticComponent<
Midjorney: MidjourneyIcon as React.ForwardRefExoticComponent<
ComponentType<SVGProps<SVGSVGElement>>
>,
MongoDBAtlasVectorSearch: MongoDBIcon as React.ForwardRefExoticComponent<
ComponentType<SVGProps<SVGSVGElement>>
>,
NotionDirectoryLoader: NotionIcon as React.ForwardRefExoticComponent<
@ -302,6 +303,9 @@ export const nodeIconsLucide: {
OpenAIEmbeddings: OpenAiIcon as React.ForwardRefExoticComponent<
ComponentType<SVGProps<SVGSVGElement>>
>,
Pinecone: PineconeIcon as React.ForwardRefExoticComponent<
ComponentType<SVGProps<SVGSVGElement>>
>,
Qdrant: QDrantIcon as React.ForwardRefExoticComponent<
ComponentType<SVGProps<SVGSVGElement>>
>,
@ -311,6 +315,9 @@ export const nodeIconsLucide: {
SlackDirectoryLoader: SlackIcon as React.ForwardRefExoticComponent<
ComponentType<SVGProps<SVGSVGElement>>
>,
SupabaseVectorStore: SupabaseIcon as React.ForwardRefExoticComponent<
ComponentType<SVGProps<SVGSVGElement>>
>,
agents: Rocket as React.ForwardRefExoticComponent<
ComponentType<SVGProps<SVGSVGElement>>
>,

View file

@ -88,85 +88,6 @@ def test_prompt_template(client: TestClient):
}
def test_few_shot_prompt_template(client: TestClient):
response = client.get("api/v1/all")
assert response.status_code == 200
json_response = response.json()
prompts = json_response["prompts"]
prompt = prompts["FewShotPromptTemplate"]
template = prompt["template"]
# Test other fields in the template similar to PromptTemplate
assert template["examples"] == {
"required": False,
"placeholder": "",
"show": True,
"multiline": True,
"password": False,
"name": "examples",
"type": "prompt",
"list": True,
"advanced": False,
}
assert template["example_selector"] == {
"required": False,
"placeholder": "",
"show": False,
"multiline": False,
"password": False,
"name": "example_selector",
"type": "BaseExampleSelector",
"list": False,
"advanced": False,
}
assert template["example_prompt"] == {
"required": True,
"placeholder": "",
"show": True,
"multiline": False,
"password": False,
"name": "example_prompt",
"type": "PromptTemplate",
"list": False,
"advanced": False,
}
assert template["suffix"] == {
"required": True,
"placeholder": "",
"show": True,
"multiline": True,
"password": False,
"name": "suffix",
"type": "prompt",
"list": False,
"advanced": False,
}
assert template["example_separator"] == {
"required": False,
"placeholder": "",
"show": False,
"multiline": False,
"value": "\n\n",
"password": False,
"name": "example_separator",
"type": "str",
"list": False,
"advanced": False,
}
assert template["prefix"] == {
"required": False,
"placeholder": "",
"show": True,
"multiline": True,
"value": "",
"password": False,
"name": "prefix",
"type": "prompt",
"list": False,
"advanced": False,
}
def test_zero_shot_prompt(client: TestClient):
response = client.get("api/v1/all")
assert response.status_code == 200