Adding API improvements, HomePage and validation improvements (0.1.0)

This commit is contained in:
Gabriel Luiz Freitas Almeida 2023-06-16 19:46:39 -03:00 committed by GitHub
commit 8cb9df212e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
193 changed files with 19775 additions and 11475 deletions

6
.githooks/pre-commit Executable file → Normal file
View file

@ -1,2 +1,6 @@
#!/bin/sh
make format
added_files=$(git diff --name-only --cached --diff-filter=d)
make format
git add ${added_files}

1
.gitignore vendored
View file

@ -241,3 +241,4 @@ dmypy.json
# Poetry
.testenv/*
langflow.db

11
.vscode/launch.json vendored
View file

@ -1,7 +1,7 @@
{
"configurations": [
{
"name": "Python: FastAPI",
"name": "Debug Backend",
"type": "python",
"request": "launch",
"module": "uvicorn",
@ -14,7 +14,7 @@
"debug"
],
"jinja": true,
"justMyCode": false
"justMyCode": true
},
{
"name": "Python: Remote Attach",
@ -30,6 +30,13 @@
"remoteRoot": "."
}
]
},
{
"name": "Debug Frontend",
"type": "chrome",
"request": "launch",
"url": "http://localhost:3000/",
"webRoot": "${workspaceRoot}/src/frontend"
}
]
}

View file

@ -43,8 +43,14 @@ install_backend:
poetry install
backend:
make install_backend
poetry run uvicorn langflow.main:app --port 7860 --reload --log-level debug
build_and_run:
echo 'Removing dist folder'
rm -rf dist
make build && poetry run pip install dist/*.tar.gz && poetry run langflow
build_frontend:
cd src/frontend && CI='' npm run build
cp -r src/frontend/build src/backend/langflow/frontend

6
package-lock.json generated
View file

@ -1,6 +0,0 @@
{
"name": "reactFlow",
"lockfileVersion": 3,
"requires": true,
"packages": {}
}

View file

@ -1,5 +0,0 @@
{
"dependencies": {
"vite-plugin-svgr": "^3.2.0"
}
}

481
poetry.lock generated
View file

@ -148,6 +148,27 @@ files = [
{file = "aiostream-0.4.5.tar.gz", hash = "sha256:3ecbf87085230fbcd9605c32ca20c4fb41af02c71d076eab246ea22e35947d88"},
]
[[package]]
name = "anthropic"
version = "0.2.10"
description = "Library for accessing the anthropic API"
category = "main"
optional = false
python-versions = ">=3.8"
files = [
{file = "anthropic-0.2.10-py3-none-any.whl", hash = "sha256:a007496207fd186b0bcb9592b00ca130069d2a427f3d6f602a61dbbd1ac6316e"},
{file = "anthropic-0.2.10.tar.gz", hash = "sha256:e4da061a86d8ffb86072c0b0feaf219a3a4f7dfddd4224df9ba769e469498c19"},
]
[package.dependencies]
aiohttp = "*"
httpx = "*"
requests = "*"
tokenizers = "*"
[package.extras]
dev = ["black (>=22.3.0)", "pytest"]
[[package]]
name = "anyio"
version = "3.7.0"
@ -614,77 +635,77 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""}
[[package]]
name = "clickhouse-connect"
version = "0.6.2"
version = "0.6.3"
description = "ClickHouse Database Core Driver for Python, Pandas, and Superset"
category = "main"
optional = false
python-versions = "~=3.7"
files = [
{file = "clickhouse-connect-0.6.2.tar.gz", hash = "sha256:d103ea49c21f5783939a23cd6c6ac9bef15deeedd5495a9c4abbbf5cf6cf4794"},
{file = "clickhouse_connect-0.6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f5be919df434c90ebfc13b2fd90b6f71849cafd483b9cf7fb6eefb1f94005df"},
{file = "clickhouse_connect-0.6.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d70c71bfdfb7a9a42d766231c13688fa692d61cb08dda2911219148fa7ad7b73"},
{file = "clickhouse_connect-0.6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48e296bdeab4c28a801b4d7e659c7fbc4cb30e9fb2e74159862c636da34c90e"},
{file = "clickhouse_connect-0.6.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4479e2061d02ba97613d0f1c9854cc5ff83a89b1f938c48421315a904ba69ced"},
{file = "clickhouse_connect-0.6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:647bae3bbe6a686dd0707f08fe007c2eaf49a9be20cc2747308a7f77b1779406"},
{file = "clickhouse_connect-0.6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0066cb0d5673796e16068ef6be5171a944d5396e1c57139cb66350bfd4fd27f9"},
{file = "clickhouse_connect-0.6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7d7561aa49b2d0992f6556a4240acab53bcc4da4a8e4443307c5b78aec4b7068"},
{file = "clickhouse_connect-0.6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53e0292c59e4877cf3a23c8de6ccfab3d585355ae98e7006b08d40cdb943a172"},
{file = "clickhouse_connect-0.6.2-cp310-cp310-win32.whl", hash = "sha256:a110e329c32e6afeade60377024d108a607737140e7dff47c2fdcc421b3b0d94"},
{file = "clickhouse_connect-0.6.2-cp310-cp310-win_amd64.whl", hash = "sha256:962d1b777f3b025838c7762ba9daea1dd72e7befe7f6158531cf938ed1adfb1d"},
{file = "clickhouse_connect-0.6.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5c20d6a22f7a32b4111361c65701229850a46b63b7ae08fa574dfb41d9d0d7f6"},
{file = "clickhouse_connect-0.6.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:069a3d8e48bf91b5d728f3f3f519aef62e6598fbb72ed38b90cc30573793b3cb"},
{file = "clickhouse_connect-0.6.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3aa8ee479cea3571e15843b7a22fa78146d828d6d4ddf369c27c5accd97b3582"},
{file = "clickhouse_connect-0.6.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:229bccaee316947b5483040e80c09d56718e25584fb1ff280a40f69740db6b78"},
{file = "clickhouse_connect-0.6.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5ab19e254bf7f353aacec0672382a626c7d535496ef405ae371a7675191bece"},
{file = "clickhouse_connect-0.6.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e5886e8e659389cf136d443ee8c548c822ec7ca88d71634b26a6cacbbc99d66c"},
{file = "clickhouse_connect-0.6.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11981d891f57d0047a498440a697cf4347e9533043ff71dfa5278dbc21824067"},
{file = "clickhouse_connect-0.6.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:79d8443bfbcff0d7aae2c7e2aac09e04ebd20daea73168ee5c1464cfb9169198"},
{file = "clickhouse_connect-0.6.2-cp311-cp311-win32.whl", hash = "sha256:ffb3e9556808d41228e86237320ef9727c1a2337987a453a36460b17b6f31057"},
{file = "clickhouse_connect-0.6.2-cp311-cp311-win_amd64.whl", hash = "sha256:0ad73ccfb40ec8e6cb71f5c23488988b0e2b3a4357f227b992831f9bc478994b"},
{file = "clickhouse_connect-0.6.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a1229035714346601aa8ca4f99e48686ad176e6307437db10d331d650b91251d"},
{file = "clickhouse_connect-0.6.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db9c8a14f6a52a8fbb8caada605b16499e8b3f6e9ae245502d30cae7c508a4c1"},
{file = "clickhouse_connect-0.6.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98cfc11f58919c472c84967cd753789cb2c64888a1bc89ad6489885b9ae2a573"},
{file = "clickhouse_connect-0.6.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d7c26ab7417fd9573325b5f9ac4105a4681c9a15c41df6d32f507fc0847cbad4"},
{file = "clickhouse_connect-0.6.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:970308f9b6691f6f86f7dd0c34ac25421437e1b008f3868a8763f2570e62ab50"},
{file = "clickhouse_connect-0.6.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6b4ea2c068fc5e8785e047560a71605322651ee842e1268181e7afc2fc990a0d"},
{file = "clickhouse_connect-0.6.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fb5ab90b3907cf32831bb835f9076b4e5c6aaba20ad883898de4894518dd2e8f"},
{file = "clickhouse_connect-0.6.2-cp37-cp37m-win32.whl", hash = "sha256:ea4467ef8c43fba32cf7ebeb15ec7a193c1b9e125154279939c583812fe5691f"},
{file = "clickhouse_connect-0.6.2-cp37-cp37m-win_amd64.whl", hash = "sha256:f0c3451558feb127c2297a6a6c370e1aaee6dd09368dd4a92a6eb326ce84a7fb"},
{file = "clickhouse_connect-0.6.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9955d03a5ceb3f1de91e41376c9c6f3323ab16dc5be42ae3a8854ba435f4ffe3"},
{file = "clickhouse_connect-0.6.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:14ef77c04cb3fd2045442675ea44215fab7426711a78558d1f89c85361d903d4"},
{file = "clickhouse_connect-0.6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:387ed0529eeba2ed708b25d806efcd22090f13264bd6a27790537fab818912d0"},
{file = "clickhouse_connect-0.6.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11de16b5677159b9442323c84b2c062002185cab52b7780e011b7e234309293"},
{file = "clickhouse_connect-0.6.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0cd0ee29cabcded71a3761e603ff7b5b3f9c60fbbe7ce1c790949d664bcb0c6"},
{file = "clickhouse_connect-0.6.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:40e81d55c9a1fd26ab1b52d02dc176fd1b52496544b4e2ed1b94b71fdac6ddab"},
{file = "clickhouse_connect-0.6.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5804ebf0d892dc7c4bbc12b78571703a73a281a5bacd19ce2709daae6a04b986"},
{file = "clickhouse_connect-0.6.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:832d6cac1593c7645a51768c3939e007a1d5581a7056f922cc7b4e1d553f08d5"},
{file = "clickhouse_connect-0.6.2-cp38-cp38-win32.whl", hash = "sha256:e69b085e9c6cdc94ab919237f14b8a2b8c8628748ebd1ff361fc6355e948d315"},
{file = "clickhouse_connect-0.6.2-cp38-cp38-win_amd64.whl", hash = "sha256:82d462f7fa6552546ff73db9e4fe7d1dac04a822e1b3375927c63239fdebaf05"},
{file = "clickhouse_connect-0.6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03b59f4efedd41f4f69b56efaaf06793d4436c2259f44f11a5956a02e93ad65f"},
{file = "clickhouse_connect-0.6.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:aa1a2f7328a9ed2e92c46f5047410989d605d247404b99899c6af574b4959cf5"},
{file = "clickhouse_connect-0.6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83eadd349fb60862c0f8d4134770f40f3ac12e6f7b4f88b5d5029f19798adb4b"},
{file = "clickhouse_connect-0.6.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c058c6e8cb5d9364463e615a6071efcc375cb01574c1e70041670b384cd2fa20"},
{file = "clickhouse_connect-0.6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:86caa4c4e2a53f9422403e408436471e94799d2ffa6913b058c440abcff77726"},
{file = "clickhouse_connect-0.6.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f29a59878e12e844f9211bd3c2069ff62d23b572d0468b397a5224c566c24964"},
{file = "clickhouse_connect-0.6.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5c70e9e77688b58a6de214421a37ab4c6130a89ebe703fc0d3f088642d1a55f1"},
{file = "clickhouse_connect-0.6.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:05308b92ebf6fcb79603bedea8738db75525bedda2b0221d54b91ee29d9ecaff"},
{file = "clickhouse_connect-0.6.2-cp39-cp39-win32.whl", hash = "sha256:5e0160625c8ced5012c13a3c3c21e403109d0091f37318b31b0051cb6dfafc73"},
{file = "clickhouse_connect-0.6.2-cp39-cp39-win_amd64.whl", hash = "sha256:9102f118251547241ef576f12bb798012acfcb9e01617c252fb2e28e920a3f73"},
{file = "clickhouse_connect-0.6.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a6a0e44f427a30c746148a52e0335d36880484c8f777a09d83912e3ccec7030d"},
{file = "clickhouse_connect-0.6.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46cb48031c4e297711a0c6b5f5a048ff6de649612f19a016e30804f34f408b56"},
{file = "clickhouse_connect-0.6.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0a0e1d37f18fabca90ac8b51844b6228c5d900e05992639c78d84263e0602dd"},
{file = "clickhouse_connect-0.6.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:347c216e73511410182bb2823977809a1a8b996fb4ba68fd131294d3221d04f1"},
{file = "clickhouse_connect-0.6.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:8023a580aa43d7eeff90ccb70f1951f08f7f23521389ea708862494178c01783"},
{file = "clickhouse_connect-0.6.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:031a6191d27ee15b8f163a800ce766d38643f17b312590ea685ff8094af7caa3"},
{file = "clickhouse_connect-0.6.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b1d60c7641d7b49fcb85f2a4340a9b14ca77aa01908274cf12eb82b7fca4954"},
{file = "clickhouse_connect-0.6.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52438acab10e76db27c8dcbe37f1e0770c81aecb979423a026506701ed323d1b"},
{file = "clickhouse_connect-0.6.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd74434eff72cb0ca96023e706318969d9554367a7ccce9d90aee79ceacca7aa"},
{file = "clickhouse_connect-0.6.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4aa1335c720d301f29b36570c6beaa7153400a8415db88e514627a5883c55a90"},
{file = "clickhouse_connect-0.6.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0f860167bc422928c194c1f33afd6f8f7d191caaa42375aaa4b7b840b1d38647"},
{file = "clickhouse_connect-0.6.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfb4c342e241daee5223a7dc54517a4e511585c7dfa5f2655e5ec83917f3c5c7"},
{file = "clickhouse_connect-0.6.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d9539d47ba41cd368068a000e6cf68dbed45d5f77e7c9a3cef663e20387abcd"},
{file = "clickhouse_connect-0.6.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4deee26f96cf7617b2f65ceecbf7ee1480af13516ee9f3139d4c0d3d3e79b5b"},
{file = "clickhouse_connect-0.6.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:9704c5e71b902e647f9fafa14aa781f22c7d456d0ab92c9698f82740d8c87b94"},
{file = "clickhouse-connect-0.6.3.tar.gz", hash = "sha256:fc5eef5c2a564bb4ad1d098521f9d4b49b044658c49ac4dc4d4984402d800b88"},
{file = "clickhouse_connect-0.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2e1a2745070a1f1e71616bb04a66f288f8003b6f27612862f2290a576f1c293"},
{file = "clickhouse_connect-0.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e2ef65a1b238269be2cb0ea884850a835ddcfb9f6f7c98386b6b9e75b1341f2e"},
{file = "clickhouse_connect-0.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2dd5afcdbe437bf2c2817a781060b081678812e88a646e9fe79bd7a4b78b8fa"},
{file = "clickhouse_connect-0.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d9d7fb2bae412b3d00fd941d2ba83b8678772e10be91ba939f56f5c0fad8f6f"},
{file = "clickhouse_connect-0.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e7e52014d36e8d11ffc6c6a337dfb8a1bafa6e47dcd761db785e7052e760d45"},
{file = "clickhouse_connect-0.6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0a5046430ec73c7ca606d7e6353a8294f07d6661bf5e849240394402765ccc6e"},
{file = "clickhouse_connect-0.6.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0a16053aaf9f7f69f85fc279268745457cbb08f373557f9bf87bf18a5fe3ebf9"},
{file = "clickhouse_connect-0.6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:919cdcc409d9cfd0e461f8162b635b81922b93b02cf050c22ec6fcd1a2a6419d"},
{file = "clickhouse_connect-0.6.3-cp310-cp310-win32.whl", hash = "sha256:88c01a29a230737c2f3c7069c02df62cee1cfa2ebebe5be7d1a69bb70ff02209"},
{file = "clickhouse_connect-0.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:d96af7be37df2b8f27dbd9c6a6b3be0c63af7f5a461eb4efb16039bdaad80548"},
{file = "clickhouse_connect-0.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8595776e55cab03ea7ea66d26519c40f0e1393e5b0fc97d4128dd3dbcd2479aa"},
{file = "clickhouse_connect-0.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56edb9f2c33e2888306faba2401e777e4acc718689a6d271de4f2f0b64bc6855"},
{file = "clickhouse_connect-0.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ec7161f9fea2b28e0b3746febdb53c584e950c504ed79b1660dc4a5cfbeb0a6"},
{file = "clickhouse_connect-0.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2a3e1a1ba4e027519304c0706db6a573144083452c721baeb550604fd545277"},
{file = "clickhouse_connect-0.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1bfbcca40da96a84faac586df89c3f72c06fb0d500be46f5069d95a4c649de4"},
{file = "clickhouse_connect-0.6.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ac82b24037bf06231f19d6d6897a025c445ec97fac55cc98f32ee226e151215e"},
{file = "clickhouse_connect-0.6.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7def902edd46ae34bda26a9bf9bc9b353bf6379d2dfefd33877823394b5d15c1"},
{file = "clickhouse_connect-0.6.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d630b619cc60fa222068a659c0f0b94141a066ff0c4128d51876fd7d40e99156"},
{file = "clickhouse_connect-0.6.3-cp311-cp311-win32.whl", hash = "sha256:e4d8dd8baa66293782130120345c5b6052cf72f7c950446c0e3baa39662f85f3"},
{file = "clickhouse_connect-0.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:058ac19f3395db37208c02e051bbfb8a233bb14edcb9d9199e06947e56cf9836"},
{file = "clickhouse_connect-0.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8337b5db75ac257fb62be8ac1a1b6f2a0c718538e1a959cbec48c05e3c68a420"},
{file = "clickhouse_connect-0.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5090ab4c6ce36d43acaeaba0498c8ac5ff1a1d29bb6ff8c96d3a1c2034927b03"},
{file = "clickhouse_connect-0.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8abd9032df9da9e3e73851d3178505e50aa3d8dc4c42b7600ef0faf214ae77ad"},
{file = "clickhouse_connect-0.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8be41fdf1165b06b09c83dc5f19d62cdf8f77a3420a510f5674c1a9405d9fd69"},
{file = "clickhouse_connect-0.6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5568607d300c6dda03e9e166a037a15c467e4db69ce1858d6ac54e5f09d1bb47"},
{file = "clickhouse_connect-0.6.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:597466aba691b3fcb19555b0a8cff9acc0015f1412f3da5478763454a7813d58"},
{file = "clickhouse_connect-0.6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ee6a1f5cf86852e3a237ff0d7160ab18f9dd843a518f5ee068a58b1aa621032"},
{file = "clickhouse_connect-0.6.3-cp37-cp37m-win32.whl", hash = "sha256:aabaebfe51b28ffa051fb2e33f88e68ea0c4cfff83bbfb17e15db3bee69d94c4"},
{file = "clickhouse_connect-0.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:7a519cc779624c1fa89c25a54e336ebae0091e4abd8fffb6fa1913537279eaed"},
{file = "clickhouse_connect-0.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:40ffef5ff89193155a701a18cf024bd9cb3a3646eb5e6afb711f5f9d128abf60"},
{file = "clickhouse_connect-0.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:be346ba2390cc3255761044717d6bd5484740e259bbe9c3ff699101d6a56b027"},
{file = "clickhouse_connect-0.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:721cc849f5869ef0519d12360d8afc6942bae7ece4c60733858e91a8baf8b094"},
{file = "clickhouse_connect-0.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa0f871a6606262debef26286fe1644e7a60df34f389559b5c0ae791e61dc520"},
{file = "clickhouse_connect-0.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92c201875482c8a8629e3a9263f909b2a0dcba6b5b6ccd84532f8affd7fcea79"},
{file = "clickhouse_connect-0.6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5626443d81845a651af5384a260c50d387933a54434cb64e9dbfb49ecc563249"},
{file = "clickhouse_connect-0.6.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8b021f968d8a139579673f67d7524c84f5a0a1b546b23ddfea9ee362e6fbeb42"},
{file = "clickhouse_connect-0.6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:680769adddca98003799841692e3dcaf0006d7dd2232dc5489c5cbe6e84cae09"},
{file = "clickhouse_connect-0.6.3-cp38-cp38-win32.whl", hash = "sha256:e82ee89ba81a594406e79f1f35f04a38a4c8e384e3fc5b277d06249cac0ddf4a"},
{file = "clickhouse_connect-0.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:3b0a81f9e29b9e50d94542fe5599481f0f927d27269154ebad3734b8f9aa7182"},
{file = "clickhouse_connect-0.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:daff3f78c3b244e725d8d3a6bd4e2c40b00f012a70b5da4b6c75987bbfb727cf"},
{file = "clickhouse_connect-0.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d0060fe34222bf8f8ac0f5c4edbd3af7f7fab36ac1303e8491986d009b4a7048"},
{file = "clickhouse_connect-0.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a0fd3fdb1df76214db8aa17afd6bf570c4af45b83680ec6e11afd1c0fe78bc6"},
{file = "clickhouse_connect-0.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6971c1775e906f616ca0462e2a807ea1b919c4bc185f4ad0136db7a61e965510"},
{file = "clickhouse_connect-0.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e901de7eaeb36fdc3696ae254badf89e481738071b8743610de81c570368d2f"},
{file = "clickhouse_connect-0.6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:40f44f500a63e4833fe7c185bc07628c7b9d73d932b484d69c7faecc5db37a88"},
{file = "clickhouse_connect-0.6.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f598132a70b083fc40d7cd1f37ea9582de4658767c4fdc693637b58c245479ae"},
{file = "clickhouse_connect-0.6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:024618b6a904e188ed1bce18ae6a964d7bb493628c60c748bca90ec60c63f287"},
{file = "clickhouse_connect-0.6.3-cp39-cp39-win32.whl", hash = "sha256:33ce6884a3574321ff32cb3e30bf98f9c0753558b88fdc13439741207f77b4a7"},
{file = "clickhouse_connect-0.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:10de9eced5e7971e4d51e467e42b8151605e9d699674df721291e2af8f14d608"},
{file = "clickhouse_connect-0.6.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:02e3a05b002d1ca144264114d8898ccdf3331f1a8a01e10777e692661f91bbdd"},
{file = "clickhouse_connect-0.6.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c4e3ca74ae79415ea001ceaa8556a31693dd854ba785f1fb170bdf25d432f80"},
{file = "clickhouse_connect-0.6.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:585f4a2438af0a3d8bb1fdd4e9a72057e08eb521285ebd29ad4e7a6dd926cd1c"},
{file = "clickhouse_connect-0.6.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5361c3d4f45d72ed2acf58371ef8bdcc6b03934c52cbc9858c58d45b3d2b5b1d"},
{file = "clickhouse_connect-0.6.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:19c5803e6d1d4ce1c1e45b7e2577f65f3eae35d60346917780e963e89d395063"},
{file = "clickhouse_connect-0.6.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2e3580076a14a999af8fd346a51a95e4fd82cea49493e206d84737a51cec549e"},
{file = "clickhouse_connect-0.6.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7e034e7036a58e4f4ef11ccf0ad34d1b695aa69748655fbc6a4d42a5492606f"},
{file = "clickhouse_connect-0.6.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78bd3dd05b28a1fb7fb95f0cb770afc8fb77fc20f9056f372761ebdf2ef2e1e1"},
{file = "clickhouse_connect-0.6.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22268d580363f91ff980292289aa9735d6a09d27a33262a132c906e78a95a055"},
{file = "clickhouse_connect-0.6.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:22f37e67c7ba42b6e735421755cb0791018c19975818312ebdacedee9094ad8f"},
{file = "clickhouse_connect-0.6.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7bfd4e5ef45d796894c6e91a64194dcabb6c3cc783c98474e03a8b2be49c22f6"},
{file = "clickhouse_connect-0.6.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c188fde357f9b589ef5ae7a5744c80763c51a1934d44c55c52b91d13aa7f3529"},
{file = "clickhouse_connect-0.6.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50bed2c7ece8ce530d9de56c8008d2894d5eacdfb9f1d25f52bbf8622bf740f8"},
{file = "clickhouse_connect-0.6.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99334445e29b35a8b6d25b1476227fbb29beba1cf8201dc6696bc99b17f77972"},
{file = "clickhouse_connect-0.6.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:bed2b977c3fce5f841a2736bc92f1d395d1999e687a6ec3793a0d0c609b13c18"},
]
[package.dependencies]
@ -703,19 +724,20 @@ sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"]
[[package]]
name = "cohere"
version = "4.9.0"
version = "4.11.2"
description = ""
category = "main"
optional = false
python-versions = ">=3.7,<4.0"
files = [
{file = "cohere-4.9.0-py3-none-any.whl", hash = "sha256:d29affeb26e882518b0a28ee85aabb8bfbe65576228de04ec2a9aa375f582729"},
{file = "cohere-4.9.0.tar.gz", hash = "sha256:e1df3dc7e3e0e47652532c6bc87e8eb8c30688c7de1d7417e56cb45d2fbea1b6"},
{file = "cohere-4.11.2-py3-none-any.whl", hash = "sha256:c5032f4a2aafbcfdf1cacd5b49121c8cc4804fbd121d4a7ac0dfea499398ea28"},
{file = "cohere-4.11.2.tar.gz", hash = "sha256:4d3e663a306e6fcb87c41cded2195257ebc6992d361a70417f6616f045c4ec47"},
]
[package.dependencies]
aiohttp = ">=3.0,<4.0"
backoff = ">=2.0,<3.0"
importlib_metadata = ">=6.0,<7.0"
requests = ">=2.0,<3.0"
[[package]]
@ -1235,18 +1257,18 @@ importlib-resources = {version = ">=5.0", markers = "python_version < \"3.10\""}
[[package]]
name = "fastapi"
version = "0.95.2"
version = "0.96.1"
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "fastapi-0.95.2-py3-none-any.whl", hash = "sha256:d374dbc4ef2ad9b803899bd3360d34c534adc574546e25314ab72c0c4411749f"},
{file = "fastapi-0.95.2.tar.gz", hash = "sha256:4d9d3e8c71c73f11874bcf5e33626258d143252e329a01002f767306c64fb982"},
{file = "fastapi-0.96.1-py3-none-any.whl", hash = "sha256:22d773ce95f14f04f8f37a0c8998fc163e67af83b65510d2879de6cbaaa10215"},
{file = "fastapi-0.96.1.tar.gz", hash = "sha256:5c1d243030e63089ccfc0aec69c2da6d619943917727e8e82ee502358d5119bf"},
]
[package.dependencies]
pydantic = ">=1.6.2,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1.7.3,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0"
pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0"
starlette = ">=0.27.0,<0.28.0"
[package.extras]
@ -1369,26 +1391,26 @@ files = [
[[package]]
name = "google-api-core"
version = "2.11.0"
version = "2.11.1"
description = "Google API client core library"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "google-api-core-2.11.0.tar.gz", hash = "sha256:4b9bb5d5a380a0befa0573b302651b8a9a89262c1730e37bf423cec511804c22"},
{file = "google_api_core-2.11.0-py3-none-any.whl", hash = "sha256:ce222e27b0de0d7bc63eb043b956996d6dccab14cc3b690aaea91c9cc99dc16e"},
{file = "google-api-core-2.11.1.tar.gz", hash = "sha256:25d29e05a0058ed5f19c61c0a78b1b53adea4d9364b464d014fbda941f6d1c9a"},
{file = "google_api_core-2.11.1-py3-none-any.whl", hash = "sha256:d92a5a92dc36dd4f4b9ee4e55528a90e432b059f93aee6ad857f9de8cc7ae94a"},
]
[package.dependencies]
google-auth = ">=2.14.1,<3.0dev"
googleapis-common-protos = ">=1.56.2,<2.0dev"
protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev"
requests = ">=2.18.0,<3.0.0dev"
google-auth = ">=2.14.1,<3.0.dev0"
googleapis-common-protos = ">=1.56.2,<2.0.dev0"
protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0"
requests = ">=2.18.0,<3.0.0.dev0"
[package.extras]
grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0dev)", "grpcio-status (>=1.49.1,<2.0dev)"]
grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"]
grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0dev)"]
grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"]
grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
[[package]]
name = "google-api-python-client"
@ -2101,13 +2123,13 @@ test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pa
[[package]]
name = "jcloud"
version = "0.2.11"
version = "0.2.12"
description = "Simplify deploying and managing Jina projects on Jina Cloud"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "jcloud-0.2.11.tar.gz", hash = "sha256:c1216f67405b56695ac8da8b1b82209c4264d279cab92603a43d678994dc1a12"},
{file = "jcloud-0.2.12.tar.gz", hash = "sha256:b7bae0909e4f09267aaa681c59e86ae27d3812fb7b63ad018bfbbcfdf8c0ad1e"},
]
[package.dependencies]
@ -2342,14 +2364,14 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pyt
[[package]]
name = "jupyter-core"
version = "5.3.0"
version = "5.3.1"
description = "Jupyter core package. A base package on which Jupyter projects rely."
category = "dev"
optional = false
python-versions = ">=3.8"
files = [
{file = "jupyter_core-5.3.0-py3-none-any.whl", hash = "sha256:d4201af84559bc8c70cead287e1ab94aeef3c512848dde077b7684b54d67730d"},
{file = "jupyter_core-5.3.0.tar.gz", hash = "sha256:6db75be0c83edbf1b7c9f91ec266a9a24ef945da630f3120e1a0046dc13713fc"},
{file = "jupyter_core-5.3.1-py3-none-any.whl", hash = "sha256:ae9036db959a71ec1cac33081eeb040a79e681f08ab68b0883e9a676c7a90dce"},
{file = "jupyter_core-5.3.1.tar.gz", hash = "sha256:5ba5c7938a7f97a6b0481463f7ff0dbac7c15ba48cf46fa4035ca6e838aa1aba"},
]
[package.dependencies]
@ -2363,20 +2385,21 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"]
[[package]]
name = "langchain"
version = "0.0.186"
version = "0.0.202"
description = "Building applications with LLMs through composability"
category = "main"
optional = false
python-versions = ">=3.8.1,<4.0"
files = [
{file = "langchain-0.0.186-py3-none-any.whl", hash = "sha256:c53ac8943351602dbde84759d32d57fe2e6599279576935a004688e43ee8ffbf"},
{file = "langchain-0.0.186.tar.gz", hash = "sha256:36d6d3872727a6f7d6db1b05b13caac35fed19a0d395d2264ed82aae53cfddfd"},
{file = "langchain-0.0.202-py3-none-any.whl", hash = "sha256:63ef3dba8df4326579aea30e8a209d8cb6cd199b8bd91f4eaf470c3f79ed5a57"},
{file = "langchain-0.0.202.tar.gz", hash = "sha256:25f61952afe6c47e9eb4be8d6f23edf14a7a2dfa0ab74512f809a49bf34ef8e4"},
]
[package.dependencies]
aiohttp = ">=3.8.3,<4.0.0"
async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""}
dataclasses-json = ">=0.5.7,<0.6.0"
langchainplus-sdk = ">=0.0.9"
numexpr = ">=2.8.4,<3.0.0"
numpy = ">=1,<2"
openapi-schema-pydantic = ">=1.2,<2.0"
@ -2387,12 +2410,12 @@ SQLAlchemy = ">=1.4,<3"
tenacity = ">=8.1.0,<9.0.0"
[package.extras]
all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.2.6,<0.3.0)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=3,<4)", "deeplake (>=3.3.0,<4.0.0)", "docarray[hnswlib] (>=0.32.0,<0.33.0)", "duckduckgo-search (>=2.8.6,<3.0.0)", "elasticsearch (>=8,<9)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "langkit (>=0.0.1.dev3,<0.1.0)", "lark (>=1.1.5,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "momento (>=1.5.0,<2.0.0)", "neo4j (>=5.8.1,<6.0.0)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "openlm (>=0.0.5,<0.0.6)", "opensearch-py (>=2.0.0,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pymongo (>=4.3.3,<5.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.1.2,<2.0.0)", "redis (>=4,<5)", "requests-toolbelt (>=1.0.0,<2.0.0)", "sentence-transformers (>=2,<3)", "spacy (>=3,<4)", "steamship (>=2.16.9,<3.0.0)", "tensorflow-text (>=2.11.0,<3.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "openai (>=0,<1)"]
all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.2.6,<0.3.0)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "awadb (>=0.3.2,<0.4.0)", "azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=3,<4)", "deeplake (>=3.3.0,<4.0.0)", "docarray[hnswlib] (>=0.32.0,<0.33.0)", "duckduckgo-search (>=2.8.6,<3.0.0)", "elasticsearch (>=8,<9)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-auth (>=2.18.1,<3.0.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "langkit (>=0.0.1.dev3,<0.1.0)", "lark (>=1.1.5,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "momento (>=1.5.0,<2.0.0)", "nebula3-python (>=3.4.0,<4.0.0)", "neo4j (>=5.8.1,<6.0.0)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "openlm (>=0.0.5,<0.0.6)", "opensearch-py (>=2.0.0,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pymongo (>=4.3.3,<5.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.1.2,<2.0.0)", "redis (>=4,<5)", "requests-toolbelt (>=1.0.0,<2.0.0)", "sentence-transformers (>=2,<3)", "singlestoredb (>=0.6.1,<0.7.0)", "spacy (>=3,<4)", "steamship (>=2.16.9,<3.0.0)", "tensorflow-text (>=2.11.0,<3.0.0)", "tigrisdb (>=1.0.0b6,<2.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0a20230509004)", "openai (>=0,<1)"]
cohere = ["cohere (>=3,<4)"]
docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"]
embeddings = ["sentence-transformers (>=2,<3)"]
extended-testing = ["atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "chardet (>=5.1.0,<6.0.0)", "gql (>=3.4.1,<4.0.0)", "html2text (>=2020.1.16,<2021.0.0)", "jq (>=1.4.1,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "psychicapi (>=0.2,<0.3)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "telethon (>=1.28.5,<2.0.0)", "tqdm (>=4.48.0)", "zep-python (>=0.30,<0.31)"]
extended-testing = ["atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "chardet (>=5.1.0,<6.0.0)", "gql (>=3.4.1,<4.0.0)", "html2text (>=2020.1.16,<2021.0.0)", "jq (>=1.4.1,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "openai (>=0,<1)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "psychicapi (>=0.5,<0.6)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "telethon (>=1.28.5,<2.0.0)", "tqdm (>=4.48.0)", "zep-python (>=0.31)"]
llms = ["anthropic (>=0.2.6,<0.3.0)", "cohere (>=3,<4)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"]
openai = ["openai (>=0,<1)", "tiktoken (>=0.3.2,<0.4.0)"]
qdrant = ["qdrant-client (>=1.1.2,<2.0.0)"]
@ -2400,13 +2423,13 @@ text-helpers = ["chardet (>=5.1.0,<6.0.0)"]
[[package]]
name = "langchain-serve"
version = "0.0.44"
version = "0.0.45"
description = "Langchain Serve - serve your langchain apps on Jina AI Cloud."
category = "main"
optional = true
python-versions = "*"
files = [
{file = "langchain-serve-0.0.44.tar.gz", hash = "sha256:464070990915628d164f40a29272114c1df884c609cd187f80901fdf0e16b527"},
{file = "langchain-serve-0.0.45.tar.gz", hash = "sha256:d4cff0d8838dbe8debc2c5e3ee43d53245b7e7458c2dc23cf37a79be7327058b"},
]
[package.dependencies]
@ -2419,12 +2442,27 @@ nest-asyncio = "*"
requests = "*"
textual = "*"
toml = "*"
typing_extensions = "4.5.0"
typing-inspect = "0.8.0"
[package.extras]
test = ["psutil", "pytest", "pytest-asyncio"]
[[package]]
name = "langchainplus-sdk"
version = "0.0.10"
description = "Client library to connect to the LangChainPlus LLM Tracing and Evaluation Platform."
category = "main"
optional = false
python-versions = ">=3.8.1,<4.0"
files = [
{file = "langchainplus_sdk-0.0.10-py3-none-any.whl", hash = "sha256:6ea4013a92a4c33a61d22deb49620577c592a79ee44038b2c751032a71cbc7b6"},
{file = "langchainplus_sdk-0.0.10.tar.gz", hash = "sha256:4f810b38df74a99d01e5723e653da02f05df3ee922971cccabc365d00c33dbf6"},
]
[package.dependencies]
pydantic = ">=1,<2"
requests = ">=2,<3"
tenacity = ">=8.1.0,<9.0.0"
[[package]]
name = "linkify-it-py"
version = "2.0.2"
@ -2923,6 +2961,33 @@ files = [
{file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"},
]
[[package]]
name = "multiprocess"
version = "0.70.14"
description = "better multiprocessing and multithreading in python"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "multiprocess-0.70.14-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:560a27540daef4ce8b24ed3cc2496a3c670df66c96d02461a4da67473685adf3"},
{file = "multiprocess-0.70.14-pp37-pypy37_pp73-manylinux_2_24_i686.whl", hash = "sha256:bfbbfa36f400b81d1978c940616bc77776424e5e34cb0c94974b178d727cfcd5"},
{file = "multiprocess-0.70.14-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:89fed99553a04ec4f9067031f83a886d7fdec5952005551a896a4b6a59575bb9"},
{file = "multiprocess-0.70.14-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:40a5e3685462079e5fdee7c6789e3ef270595e1755199f0d50685e72523e1d2a"},
{file = "multiprocess-0.70.14-pp38-pypy38_pp73-manylinux_2_24_i686.whl", hash = "sha256:44936b2978d3f2648727b3eaeab6d7fa0bedf072dc5207bf35a96d5ee7c004cf"},
{file = "multiprocess-0.70.14-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:e628503187b5d494bf29ffc52d3e1e57bb770ce7ce05d67c4bbdb3a0c7d3b05f"},
{file = "multiprocess-0.70.14-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0d5da0fc84aacb0e4bd69c41b31edbf71b39fe2fb32a54eaedcaea241050855c"},
{file = "multiprocess-0.70.14-pp39-pypy39_pp73-manylinux_2_24_i686.whl", hash = "sha256:6a7b03a5b98e911a7785b9116805bd782815c5e2bd6c91c6a320f26fd3e7b7ad"},
{file = "multiprocess-0.70.14-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:cea5bdedd10aace3c660fedeac8b087136b4366d4ee49a30f1ebf7409bce00ae"},
{file = "multiprocess-0.70.14-py310-none-any.whl", hash = "sha256:7dc1f2f6a1d34894c8a9a013fbc807971e336e7cc3f3ff233e61b9dc679b3b5c"},
{file = "multiprocess-0.70.14-py37-none-any.whl", hash = "sha256:93a8208ca0926d05cdbb5b9250a604c401bed677579e96c14da3090beb798193"},
{file = "multiprocess-0.70.14-py38-none-any.whl", hash = "sha256:6725bc79666bbd29a73ca148a0fb5f4ea22eed4a8f22fce58296492a02d18a7b"},
{file = "multiprocess-0.70.14-py39-none-any.whl", hash = "sha256:63cee628b74a2c0631ef15da5534c8aedbc10c38910b9c8b18dcd327528d1ec7"},
{file = "multiprocess-0.70.14.tar.gz", hash = "sha256:3eddafc12f2260d27ae03fe6069b12570ab4764ab59a75e81624fac453fbf46a"},
]
[package.dependencies]
dill = ">=0.3.6"
[[package]]
name = "mypy"
version = "1.3.0"
@ -3507,6 +3572,62 @@ files = [
{file = "opentelemetry_util_http-0.39b0.tar.gz", hash = "sha256:1a78e53e97c8f0b05216dbe4d93836ae5f5f94ba877003e56d065f089373f0ce"},
]
[[package]]
name = "orjson"
version = "3.9.1"
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "orjson-3.9.1-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c4434b7b786fdc394b95d029fb99949d7c2b05bbd4bf5cb5e3906be96ffeee3b"},
{file = "orjson-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09faf14f74ed47e773fa56833be118e04aa534956f661eb491522970b7478e3b"},
{file = "orjson-3.9.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:503eb86a8d53a187fe66aa80c69295a3ca35475804da89a9547e4fce5f803822"},
{file = "orjson-3.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:20f2804b5a1dbd3609c086041bd243519224d47716efd7429db6c03ed28b7cc3"},
{file = "orjson-3.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fd828e0656615a711c4cc4da70f3cac142e66a6703ba876c20156a14e28e3fa"},
{file = "orjson-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec53d648176f873203b9c700a0abacab33ca1ab595066e9d616f98cdc56f4434"},
{file = "orjson-3.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e186ae76b0d97c505500664193ddf508c13c1e675d9b25f1f4414a7606100da6"},
{file = "orjson-3.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d4edee78503016f4df30aeede0d999b3cb11fb56f47e9db0e487bce0aaca9285"},
{file = "orjson-3.9.1-cp310-none-win_amd64.whl", hash = "sha256:a4cc5d21e68af982d9a2528ac61e604f092c60eed27aef3324969c68f182ec7e"},
{file = "orjson-3.9.1-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:761b6efd33c49de20dd73ce64cc59da62c0dab10aa6015f582680e0663cc792c"},
{file = "orjson-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31229f9d0b8dc2ef7ee7e4393f2e4433a28e16582d4b25afbfccc9d68dc768f8"},
{file = "orjson-3.9.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b7ab18d55ecb1de543d452f0a5f8094b52282b916aa4097ac11a4c79f317b86"},
{file = "orjson-3.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db774344c39041f4801c7dfe03483df9203cbd6c84e601a65908e5552228dd25"},
{file = "orjson-3.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ae47ef8c0fe89c4677db7e9e1fb2093ca6e66c3acbee5442d84d74e727edad5e"},
{file = "orjson-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:103952c21575b9805803c98add2eaecd005580a1e746292ed2ec0d76dd3b9746"},
{file = "orjson-3.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2cb0121e6f2c9da3eddf049b99b95fef0adf8480ea7cb544ce858706cdf916eb"},
{file = "orjson-3.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:24d4ddaa2876e657c0fd32902b5c451fd2afc35159d66a58da7837357044b8c2"},
{file = "orjson-3.9.1-cp311-none-win_amd64.whl", hash = "sha256:0b53b5f72cf536dd8aa4fc4c95e7e09a7adb119f8ff8ee6cc60f735d7740ad6a"},
{file = "orjson-3.9.1-cp37-cp37m-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d4b68d01a506242316a07f1d2f29fb0a8b36cee30a7c35076f1ef59dce0890c1"},
{file = "orjson-3.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9dd4abe6c6fd352f00f4246d85228f6a9847d0cc14f4d54ee553718c225388f"},
{file = "orjson-3.9.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9e20bca5e13041e31ceba7a09bf142e6d63c8a7467f5a9c974f8c13377c75af2"},
{file = "orjson-3.9.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8ae0467d01eb1e4bcffef4486d964bfd1c2e608103e75f7074ed34be5df48cc"},
{file = "orjson-3.9.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:06f6ab4697fab090517f295915318763a97a12ee8186054adf21c1e6f6abbd3d"},
{file = "orjson-3.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8515867713301fa065c58ec4c9053ba1a22c35113ab4acad555317b8fd802e50"},
{file = "orjson-3.9.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:393d0697d1dfa18d27d193e980c04fdfb672c87f7765b87952f550521e21b627"},
{file = "orjson-3.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d96747662d3666f79119e5d28c124e7d356c7dc195cd4b09faea4031c9079dc9"},
{file = "orjson-3.9.1-cp37-none-win_amd64.whl", hash = "sha256:6d173d3921dd58a068c88ec22baea7dbc87a137411501618b1292a9d6252318e"},
{file = "orjson-3.9.1-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d1c2b0b4246c992ce2529fc610a446b945f1429445ece1c1f826a234c829a918"},
{file = "orjson-3.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19f70ba1f441e1c4bb1a581f0baa092e8b3e3ce5b2aac2e1e090f0ac097966da"},
{file = "orjson-3.9.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:375d65f002e686212aac42680aed044872c45ee4bc656cf63d4a215137a6124a"},
{file = "orjson-3.9.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4751cee4a7b1daeacb90a7f5adf2170ccab893c3ab7c5cea58b45a13f89b30b3"},
{file = "orjson-3.9.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78d9a2a4b2302d5ebc3695498ebc305c3568e5ad4f3501eb30a6405a32d8af22"},
{file = "orjson-3.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46b4facc32643b2689dfc292c0c463985dac4b6ab504799cf51fc3c6959ed668"},
{file = "orjson-3.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ec7c8a0f1bf35da0d5fd14f8956f3b82a9a6918a3c6963d718dfd414d6d3b604"},
{file = "orjson-3.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d3a40b0fbe06ccd4d6a99e523d20b47985655bcada8d1eba485b1b32a43e4904"},
{file = "orjson-3.9.1-cp38-none-win_amd64.whl", hash = "sha256:402f9d3edfec4560a98880224ec10eba4c5f7b4791e4bc0d4f4d8df5faf2a006"},
{file = "orjson-3.9.1-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:49c0d78dcd34626e2e934f1192d7c052b94e0ecadc5f386fd2bda6d2e03dadf5"},
{file = "orjson-3.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:125f63e56d38393daa0a1a6dc6fedefca16c538614b66ea5997c3bd3af35ef26"},
{file = "orjson-3.9.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:08927970365d2e1f3ce4894f9ff928a7b865d53f26768f1bbdd85dd4fee3e966"},
{file = "orjson-3.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9a744e212d4780ecd67f4b6b128b2e727bee1df03e7059cddb2dfe1083e7dc4"},
{file = "orjson-3.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d1dbf36db7240c61eec98c8d21545d671bce70be0730deb2c0d772e06b71af3"},
{file = "orjson-3.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80a1e384626f76b66df615f7bb622a79a25c166d08c5d2151ffd41f24c4cc104"},
{file = "orjson-3.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:15d28872fb055bf17ffca913826e618af61b2f689d2b170f72ecae1a86f80d52"},
{file = "orjson-3.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1e4d905338f9ef32c67566929dfbfbb23cc80287af8a2c38930fb0eda3d40b76"},
{file = "orjson-3.9.1-cp39-none-win_amd64.whl", hash = "sha256:48a27da6c7306965846565cc385611d03382bbd84120008653aa2f6741e2105d"},
{file = "orjson-3.9.1.tar.gz", hash = "sha256:db373a25ec4a4fccf8186f9a72a1b3442837e40807a736a815ab42481e83b7d0"},
]
[[package]]
name = "overrides"
version = "7.3.1"
@ -5026,83 +5147,111 @@ files = [
[[package]]
name = "sqlalchemy"
version = "2.0.16"
version = "1.4.41"
description = "Database Abstraction Library"
category = "main"
optional = false
python-versions = ">=3.7"
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
files = [
{file = "SQLAlchemy-2.0.16-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7641f6ed2682de84d77c4894cf2e43700f3cf7a729361d7f9cac98febf3d8614"},
{file = "SQLAlchemy-2.0.16-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8d3cbdb2f07fb0e4b897dc1df39166735e194fb946f28f26f4c9f9801c8b24f7"},
{file = "SQLAlchemy-2.0.16-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08a791c75d6154d46914d1e23bd81d9455f2950ec1de81f2723848c593d2c8b"},
{file = "SQLAlchemy-2.0.16-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91eb8f89fcce8f709f8a4d65d265bc48a80264ee14c7c9e955f3222f19b4b39c"},
{file = "SQLAlchemy-2.0.16-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fc1dae11bd5167f9eb53b3ccad24a79813004612141e76de21cf4c028dc30b34"},
{file = "SQLAlchemy-2.0.16-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b2801f85c5c0293aa710f8aa5262c707a83c1c203962ae5a22b4d9095e71aa9d"},
{file = "SQLAlchemy-2.0.16-cp310-cp310-win32.whl", hash = "sha256:c5e333b81fe10d14efebd4e9429b7bb865ed9463ca8bef07a7136dfa1fd4a37b"},
{file = "SQLAlchemy-2.0.16-cp310-cp310-win_amd64.whl", hash = "sha256:f387b496a4c9474d8580195bb2660264a3f295a04d3a9d00f4fa15e9e597427e"},
{file = "SQLAlchemy-2.0.16-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7be04dbe3470fe8dd332fdb48c979887c381ef6c635eddf2dec43d2766111be4"},
{file = "SQLAlchemy-2.0.16-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f2938edc512dd1fa48653e14c1655ab46144d4450f0e6b33da7acd8ba77fbfd7"},
{file = "SQLAlchemy-2.0.16-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5a2856e12cf5f54301ddf043bcbf0552561d61555e1bcf348b63f42b8e1eec2"},
{file = "SQLAlchemy-2.0.16-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d320fde566b864adbc19abb40ecb80f4e25d6f084639969bb972d5cca16858"},
{file = "SQLAlchemy-2.0.16-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e85e315725807c127ad8ba3d628fdb861cf9ebfb0e10c39a97c01e257cdd71b"},
{file = "SQLAlchemy-2.0.16-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:63ea36c08792a7a8a08958bc806ecff6b491386feeaf14607c3d9d2d9325e67f"},
{file = "SQLAlchemy-2.0.16-cp311-cp311-win32.whl", hash = "sha256:bdaf89dd82f4a0e1b8b5ffc9cdc0c9551be6175f7eee5af6a838e92ba2e57100"},
{file = "SQLAlchemy-2.0.16-cp311-cp311-win_amd64.whl", hash = "sha256:5a934eff1a2882137be3384826f997db8441d43b61fda3094923e69fffe474be"},
{file = "SQLAlchemy-2.0.16-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fbcc51fdbc89fafe4f4fe66f59372a8be88ded04de34ef438ab04f980beb12d4"},
{file = "SQLAlchemy-2.0.16-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff6496ad5e9dc8baeb93a151cc2f599d01e5f8928a2aaf0b09a06428fdbaf553"},
{file = "SQLAlchemy-2.0.16-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d6ef848e5afcd1bda3e9a843751f845c0ca888b61e669237680e913d84ec206"},
{file = "SQLAlchemy-2.0.16-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3ef876615ff4b53e2033022195830ec4941a6e21068611f8d77de60203b90a98"},
{file = "SQLAlchemy-2.0.16-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8544c6e62eacb77d5106e2055ef10f2407fc0dbd547e879f8745b2032eefd2bc"},
{file = "SQLAlchemy-2.0.16-cp37-cp37m-win32.whl", hash = "sha256:2f3b6c31b915159b96b68372212fa77f69230b0a32acab40cf539d2823954f5a"},
{file = "SQLAlchemy-2.0.16-cp37-cp37m-win_amd64.whl", hash = "sha256:d0c96592f54edd571e00ba6b1ed5df8263328ca1da9e78088c0ebc93c2e6562c"},
{file = "SQLAlchemy-2.0.16-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a2e9f50a906d0b81292576a9fb458f8cace904c81a67088f4a2ca9ff2856f55d"},
{file = "SQLAlchemy-2.0.16-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dc97238fa44be86971270943a0c21c19ce18b8d1596919048e57912e8abc02cc"},
{file = "SQLAlchemy-2.0.16-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0db6734cb5644c55d0262a813b764c6e2cda1e66e939a488b3d6298cdc7344c2"},
{file = "SQLAlchemy-2.0.16-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:131f0c894c6572cb1bdcf97c92d999d3128c4ff1ca13061296057072f61afe13"},
{file = "SQLAlchemy-2.0.16-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f662cf69484c59f8a3435902c40dfc34d86050bdb15e23d437074ce9f153306b"},
{file = "SQLAlchemy-2.0.16-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b72f4e4def50414164a1d899f2ce4e782a029fad0ed5585981d1611e8ae29a74"},
{file = "SQLAlchemy-2.0.16-cp38-cp38-win32.whl", hash = "sha256:0e4645b260cfe375a0603aa117f0a47680864cf37833129da870919e88b08d8f"},
{file = "SQLAlchemy-2.0.16-cp38-cp38-win_amd64.whl", hash = "sha256:f409f35a0330ab0cb18ece736b86d8b8233c64f4461fcb10993f67afc0ac7e5a"},
{file = "SQLAlchemy-2.0.16-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e19546924f0cf2ec930d1faf318b7365e5827276410a513340f31a2b423e96a4"},
{file = "SQLAlchemy-2.0.16-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ce1fc3f64fd42d5f763d6b83651471f32920338a1ba107a3186211474861af57"},
{file = "SQLAlchemy-2.0.16-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e2569dac4e3cb85365b91ab569d06a221e0e17e65ce59949d00c3958946282b"},
{file = "SQLAlchemy-2.0.16-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61f2035dea56ff1a429077e481496f813378beb02b823d2e3e7eb05bc1a7a8ca"},
{file = "SQLAlchemy-2.0.16-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:81d867c1be5abd49f7e547c108391f371a9d980ba7ec34666c50d683f782b754"},
{file = "SQLAlchemy-2.0.16-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2de1477af7f48c633b8ecb88245aedd811dca88e88aee9e9d787b388abe74c44"},
{file = "SQLAlchemy-2.0.16-cp39-cp39-win32.whl", hash = "sha256:5e8522b49e0e640287308b68f71cc338446bbe1c226c8f81743baa91b0246e92"},
{file = "SQLAlchemy-2.0.16-cp39-cp39-win_amd64.whl", hash = "sha256:43e69c8c1cea0188b7094e22fb93ae1a1890aac748628b7e925024a206f75368"},
{file = "SQLAlchemy-2.0.16-py3-none-any.whl", hash = "sha256:53081c6fce0d49bb36d05f12dc87e008c9b0df58a163b792c5fc4ac638925f98"},
{file = "SQLAlchemy-2.0.16.tar.gz", hash = "sha256:1e2caba78e7d1f5003e88817b7a1754d4e58f4a8f956dc423bf8e304c568ab09"},
{file = "SQLAlchemy-1.4.41-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:13e397a9371ecd25573a7b90bd037db604331cf403f5318038c46ee44908c44d"},
{file = "SQLAlchemy-1.4.41-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2d6495f84c4fd11584f34e62f9feec81bf373787b3942270487074e35cbe5330"},
{file = "SQLAlchemy-1.4.41-cp27-cp27m-win32.whl", hash = "sha256:e570cfc40a29d6ad46c9aeaddbdcee687880940a3a327f2c668dd0e4ef0a441d"},
{file = "SQLAlchemy-1.4.41-cp27-cp27m-win_amd64.whl", hash = "sha256:5facb7fd6fa8a7353bbe88b95695e555338fb038ad19ceb29c82d94f62775a05"},
{file = "SQLAlchemy-1.4.41-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f37fa70d95658763254941ddd30ecb23fc4ec0c5a788a7c21034fc2305dab7cc"},
{file = "SQLAlchemy-1.4.41-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:361f6b5e3f659e3c56ea3518cf85fbdae1b9e788ade0219a67eeaaea8a4e4d2a"},
{file = "SQLAlchemy-1.4.41-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0990932f7cca97fece8017414f57fdd80db506a045869d7ddf2dda1d7cf69ecc"},
{file = "SQLAlchemy-1.4.41-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cd767cf5d7252b1c88fcfb58426a32d7bd14a7e4942497e15b68ff5d822b41ad"},
{file = "SQLAlchemy-1.4.41-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5102fb9ee2c258a2218281adcb3e1918b793c51d6c2b4666ce38c35101bb940e"},
{file = "SQLAlchemy-1.4.41-cp310-cp310-win32.whl", hash = "sha256:2082a2d2fca363a3ce21cfa3d068c5a1ce4bf720cf6497fb3a9fc643a8ee4ddd"},
{file = "SQLAlchemy-1.4.41-cp310-cp310-win_amd64.whl", hash = "sha256:e4b12e3d88a8fffd0b4ca559f6d4957ed91bd4c0613a4e13846ab8729dc5c251"},
{file = "SQLAlchemy-1.4.41-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:90484a2b00baedad361402c257895b13faa3f01780f18f4a104a2f5c413e4536"},
{file = "SQLAlchemy-1.4.41-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b67fc780cfe2b306180e56daaa411dd3186bf979d50a6a7c2a5b5036575cbdbb"},
{file = "SQLAlchemy-1.4.41-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ad2b727fc41c7f8757098903f85fafb4bf587ca6605f82d9bf5604bd9c7cded"},
{file = "SQLAlchemy-1.4.41-cp311-cp311-win32.whl", hash = "sha256:59bdc291165b6119fc6cdbc287c36f7f2859e6051dd923bdf47b4c55fd2f8bd0"},
{file = "SQLAlchemy-1.4.41-cp311-cp311-win_amd64.whl", hash = "sha256:d2e054aed4645f9b755db85bc69fc4ed2c9020c19c8027976f66576b906a74f1"},
{file = "SQLAlchemy-1.4.41-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:4ba7e122510bbc07258dc42be6ed45997efdf38129bde3e3f12649be70683546"},
{file = "SQLAlchemy-1.4.41-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0dcf127bb99458a9d211e6e1f0f3edb96c874dd12f2503d4d8e4f1fd103790b"},
{file = "SQLAlchemy-1.4.41-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e16c2be5cb19e2c08da7bd3a87fed2a0d4e90065ee553a940c4fc1a0fb1ab72b"},
{file = "SQLAlchemy-1.4.41-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5ebeeec5c14533221eb30bad716bc1fd32f509196318fb9caa7002c4a364e4c"},
{file = "SQLAlchemy-1.4.41-cp36-cp36m-win32.whl", hash = "sha256:3e2ef592ac3693c65210f8b53d0edcf9f4405925adcfc031ff495e8d18169682"},
{file = "SQLAlchemy-1.4.41-cp36-cp36m-win_amd64.whl", hash = "sha256:eb30cf008850c0a26b72bd1b9be6730830165ce049d239cfdccd906f2685f892"},
{file = "SQLAlchemy-1.4.41-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:c23d64a0b28fc78c96289ffbd0d9d1abd48d267269b27f2d34e430ea73ce4b26"},
{file = "SQLAlchemy-1.4.41-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8eb8897367a21b578b26f5713833836f886817ee2ffba1177d446fa3f77e67c8"},
{file = "SQLAlchemy-1.4.41-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:14576238a5f89bcf504c5f0a388d0ca78df61fb42cb2af0efe239dc965d4f5c9"},
{file = "SQLAlchemy-1.4.41-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:639e1ae8d48b3c86ffe59c0daa9a02e2bfe17ca3d2b41611b30a0073937d4497"},
{file = "SQLAlchemy-1.4.41-cp37-cp37m-win32.whl", hash = "sha256:0005bd73026cd239fc1e8ccdf54db58b6193be9a02b3f0c5983808f84862c767"},
{file = "SQLAlchemy-1.4.41-cp37-cp37m-win_amd64.whl", hash = "sha256:5323252be2bd261e0aa3f33cb3a64c45d76829989fa3ce90652838397d84197d"},
{file = "SQLAlchemy-1.4.41-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:05f0de3a1dc3810a776275763764bb0015a02ae0f698a794646ebc5fb06fad33"},
{file = "SQLAlchemy-1.4.41-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0002e829142b2af00b4eaa26c51728f3ea68235f232a2e72a9508a3116bd6ed0"},
{file = "SQLAlchemy-1.4.41-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:22ff16cedab5b16a0db79f1bc99e46a6ddececb60c396562e50aab58ddb2871c"},
{file = "SQLAlchemy-1.4.41-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccfd238f766a5bb5ee5545a62dd03f316ac67966a6a658efb63eeff8158a4bbf"},
{file = "SQLAlchemy-1.4.41-cp38-cp38-win32.whl", hash = "sha256:58bb65b3274b0c8a02cea9f91d6f44d0da79abc993b33bdedbfec98c8440175a"},
{file = "SQLAlchemy-1.4.41-cp38-cp38-win_amd64.whl", hash = "sha256:ce8feaa52c1640de9541eeaaa8b5fb632d9d66249c947bb0d89dd01f87c7c288"},
{file = "SQLAlchemy-1.4.41-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:199a73c31ac8ea59937cc0bf3dfc04392e81afe2ec8a74f26f489d268867846c"},
{file = "SQLAlchemy-1.4.41-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676d51c9f6f6226ae8f26dc83ec291c088fe7633269757d333978df78d931ab"},
{file = "SQLAlchemy-1.4.41-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:036d8472356e1d5f096c5e0e1a7e0f9182140ada3602f8fff6b7329e9e7cfbcd"},
{file = "SQLAlchemy-1.4.41-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2307495d9e0ea00d0c726be97a5b96615035854972cc538f6e7eaed23a35886c"},
{file = "SQLAlchemy-1.4.41-cp39-cp39-win32.whl", hash = "sha256:9c56e19780cd1344fcd362fd6265a15f48aa8d365996a37fab1495cae8fcd97d"},
{file = "SQLAlchemy-1.4.41-cp39-cp39-win_amd64.whl", hash = "sha256:f5fa526d027d804b1f85cdda1eb091f70bde6fb7d87892f6dd5a48925bc88898"},
{file = "SQLAlchemy-1.4.41.tar.gz", hash = "sha256:0292f70d1797e3c54e862e6f30ae474014648bc9c723e14a2fda730adb0a9791"},
]
[package.dependencies]
greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""}
typing-extensions = ">=4.2.0"
greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and platform_machine == \"aarch64\" or python_version >= \"3\" and platform_machine == \"ppc64le\" or python_version >= \"3\" and platform_machine == \"x86_64\" or python_version >= \"3\" and platform_machine == \"amd64\" or python_version >= \"3\" and platform_machine == \"AMD64\" or python_version >= \"3\" and platform_machine == \"win32\" or python_version >= \"3\" and platform_machine == \"WIN32\""}
[package.extras]
aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"]
asyncio = ["greenlet (!=0.4.17)"]
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"]
mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5)"]
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"]
mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"]
mssql = ["pyodbc"]
mssql-pymssql = ["pymssql"]
mssql-pyodbc = ["pyodbc"]
mypy = ["mypy (>=0.910)"]
mysql = ["mysqlclient (>=1.4.0)"]
mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"]
mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"]
mysql-connector = ["mysql-connector-python"]
oracle = ["cx-oracle (>=7)"]
oracle-oracledb = ["oracledb (>=1.0.1)"]
oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"]
postgresql = ["psycopg2 (>=2.7)"]
postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
postgresql-pg8000 = ["pg8000 (>=1.29.1)"]
postgresql-psycopg = ["psycopg (>=3.0.7)"]
postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
postgresql-psycopg2binary = ["psycopg2-binary"]
postgresql-psycopg2cffi = ["psycopg2cffi"]
postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"]
pymysql = ["pymysql"]
pymysql = ["pymysql", "pymysql (<1)"]
sqlcipher = ["sqlcipher3-binary"]
[[package]]
name = "sqlalchemy2-stubs"
version = "0.0.2a34"
description = "Typing Stubs for SQLAlchemy 1.4"
category = "main"
optional = false
python-versions = ">=3.6"
files = [
{file = "sqlalchemy2-stubs-0.0.2a34.tar.gz", hash = "sha256:2432137ab2fde1a608df4544f6712427b0b7ff25990cfbbc5a9d1db6c8c6f489"},
{file = "sqlalchemy2_stubs-0.0.2a34-py3-none-any.whl", hash = "sha256:a313220ac793404349899faf1272e821a62dbe1d3a029bd444faa8d3e966cd07"},
]
[package.dependencies]
typing-extensions = ">=3.7.4"
[[package]]
name = "sqlmodel"
version = "0.0.8"
description = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness."
category = "main"
optional = false
python-versions = ">=3.6.1,<4.0.0"
files = [
{file = "sqlmodel-0.0.8-py3-none-any.whl", hash = "sha256:0fd805719e0c5d4f22be32eb3ffc856eca3f7f20e8c7aa3e117ad91684b518ee"},
{file = "sqlmodel-0.0.8.tar.gz", hash = "sha256:3371b4d1ad59d2ffd0c530582c2140b6c06b090b32af9b9c6412986d7b117036"},
]
[package.dependencies]
pydantic = ">=1.8.2,<2.0.0"
SQLAlchemy = ">=1.4.17,<=1.4.41"
sqlalchemy2-stubs = "*"
[[package]]
name = "stack-data"
version = "0.6.2"
@ -5556,6 +5705,18 @@ dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2
doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"]
test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
[[package]]
name = "types-cachetools"
version = "5.3.0.5"
description = "Typing stubs for cachetools"
category = "main"
optional = false
python-versions = "*"
files = [
{file = "types-cachetools-5.3.0.5.tar.gz", hash = "sha256:67fa46d51a650896770aee0ba80f0e61dc4a7d1373198eec1bc0622263eaa256"},
{file = "types_cachetools-5.3.0.5-py3-none-any.whl", hash = "sha256:c0c5fa00199017d974c935bf043c467d5204e4f835141e489b48765b5ac1d960"},
]
[[package]]
name = "types-pillow"
version = "9.5.0.4"
@ -5633,14 +5794,14 @@ files = [
[[package]]
name = "typing-inspect"
version = "0.8.0"
version = "0.9.0"
description = "Runtime inspection utilities for typing module."
category = "main"
optional = false
python-versions = "*"
files = [
{file = "typing_inspect-0.8.0-py3-none-any.whl", hash = "sha256:5fbf9c1e65d4fa01e701fe12a5bca6c6e08a4ffd5bc60bfac028253a447c5188"},
{file = "typing_inspect-0.8.0.tar.gz", hash = "sha256:8b1ff0c400943b6145df8119c41c244ca8207f1f10c9c057aeed1560e4806e3d"},
{file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"},
{file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"},
]
[package.dependencies]
@ -5868,14 +6029,14 @@ files = [
[[package]]
name = "weaviate-client"
version = "3.20.0"
version = "3.20.1"
description = "A python native Weaviate client"
category = "main"
optional = false
python-versions = ">=3.8"
files = [
{file = "weaviate-client-3.20.0.tar.gz", hash = "sha256:27c724596e9d3a4609dc7b7e9e362fc97dcfd9ba25f931c95a177bc60789be8e"},
{file = "weaviate_client-3.20.0-py3-none-any.whl", hash = "sha256:7559cb1500a5a22019a32cb55b162e204620179792cdfdd98b81e258c6ca7d96"},
{file = "weaviate-client-3.20.1.tar.gz", hash = "sha256:752912423f6334575c3feffcc98e5604e2d4b3bd3baddbff57d38db23bec3e9f"},
{file = "weaviate_client-3.20.1-py3-none-any.whl", hash = "sha256:175f1665d9f1e580dcfa48cfd4dab1e49925d1655b0b4720d0f2ee7709c956f6"},
]
[package.dependencies]
@ -6265,4 +6426,4 @@ deploy = ["langchain-serve"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<3.12"
content-hash = "0b72cff85b2228a6f41d81ac2207cecf1d94c6adb914a3ef4fb19774d757f9f6"
content-hash = "c9284c917f124a3bd4bc4ea9f63932d4c91c6c558ee04c871aaab6b82b77bd2c"

View file

@ -1,11 +1,12 @@
[tool.poetry]
name = "langflow"
version = "0.0.89"
version = "0.1.0"
description = "A Python package with a built-in web application"
authors = ["Logspace <contact@logspace.ai>"]
maintainers = [
"Cristhian Zanforlin <cristhian.lousa@gmail.com>",
"Gabriel Almeida <gabriel@logspace.ai>",
"Ibis Prevedello <ibiscp@gmail.com>",
"Gustavo Schaedler <gustavopoa@gmail.com>",
"Lucas Eduoli <lucaseduoli@gmail.com>",
"Otávio Anovazzi <otavio2204@gmail.com>",
]
@ -22,17 +23,16 @@ langflow = "langflow.__main__:main"
[tool.poetry.dependencies]
python = ">=3.9,<3.12"
fastapi = "^0.95.0"
fastapi = "^0.96.0"
uvicorn = "^0.20.0"
beautifulsoup4 = "^4.11.2"
google-search-results = "^2.4.1"
google-api-python-client = "^2.79.0"
typer = "^0.7.0"
gunicorn = "^20.1.0"
langchain = "^0.0.186"
openai = "^0.27.7"
langchain = "^0.0.202"
openai = "^0.27.8"
types-pyyaml = "^6.0.12.8"
dill = "^0.3.6"
pandas = "^1.5.3"
chromadb = "^0.3.21"
huggingface-hub = "^0.13.3"
@ -57,7 +57,13 @@ jina = "3.15.2"
sentence-transformers = "^2.2.2"
ctransformers = "^0.2.2"
cohere = "^4.6.0"
sqlmodel = "^0.0.8"
faiss-cpu = "^1.7.4"
anthropic = "^0.2.9"
orjson = "^3.9.0"
multiprocess = "^0.70.14"
cachetools = "^5.3.1"
types-cachetools = "^5.3.0.5"
[tool.poetry.group.dev.dependencies]
@ -77,6 +83,15 @@ types-pillow = "^9.5.0.2"
[tool.poetry.extras]
deploy = ["langchain-serve"]
[tool.pytest.ini_options]
minversion = "6.0"
addopts = "-ra"
testpaths = ["tests", "integration"]
console_output_style = "progress"
filterwarnings = ["ignore::DeprecationWarning"]
log_cli = true
[tool.ruff]
line-length = 120

View file

@ -1,4 +1,12 @@
from importlib import metadata
from langflow.cache import cache_manager
from langflow.processing.process import load_flow_from_json
try:
__version__ = metadata.version(__package__)
except metadata.PackageNotFoundError:
# Case where package metadata is not available.
__version__ = ""
del metadata # optional, avoids polluting the results of dir(__package__)
__all__ = ["load_flow_from_json", "cache_manager"]

View file

@ -1,27 +1,46 @@
import multiprocessing
import sys
import time
from fastapi import FastAPI
import httpx
from multiprocess import Process, cpu_count # type: ignore
import platform
from pathlib import Path
from typing import Optional
import socket
from rich.panel import Panel
from rich import box
from rich import print as rprint
import typer
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse
from langflow.main import create_app
from langflow.settings import settings
from langflow.utils.logger import configure
from langflow.utils.logger import configure, logger
import webbrowser
from dotenv import load_dotenv
app = typer.Typer()
def get_number_of_workers(workers=None):
if workers == -1:
workers = (multiprocessing.cpu_count() * 2) + 1
workers = (cpu_count() * 2) + 1
return workers
def update_settings(config: str, dev: bool = False):
def update_settings(
config: str,
dev: bool = False,
database_url: Optional[str] = None,
remove_api_keys: bool = False,
):
"""Update the settings from a config file."""
if config:
settings.update_from_yaml(config, dev=dev)
if database_url:
settings.update_settings(database_url=database_url)
if remove_api_keys:
settings.update_settings(remove_api_keys=remove_api_keys)
def serve_on_jcloud():
@ -77,10 +96,28 @@ def serve(
timeout: int = typer.Option(60, help="Worker timeout in seconds."),
port: int = typer.Option(7860, help="Port to listen on."),
config: str = typer.Option("config.yaml", help="Path to the configuration file."),
log_level: str = typer.Option("info", help="Logging level."),
# .env file param
env_file: Path = typer.Option(
".env", help="Path to the .env file containing environment variables."
),
log_level: str = typer.Option("critical", help="Logging level."),
log_file: Path = typer.Option("logs/langflow.log", help="Path to the log file."),
jcloud: bool = typer.Option(False, help="Deploy on Jina AI Cloud"),
dev: bool = typer.Option(False, help="Run in development mode (may contain bugs)"),
database_url: str = typer.Option(
None,
help="Database URL to connect to. If not provided, a local SQLite database will be used.",
),
path: str = typer.Option(
None,
help="Path to the frontend directory containing build files. This is for development purposes only.",
),
open_browser: bool = typer.Option(
True, help="Open the browser after starting the server."
),
remove_api_keys: bool = typer.Option(
False, help="Remove API keys from the projects saved in the database."
),
):
"""
Run the Langflow server.
@ -89,17 +126,25 @@ def serve(
if jcloud:
return serve_on_jcloud()
load_dotenv(env_file)
configure(log_level=log_level, log_file=log_file)
update_settings(config, dev=dev)
app = create_app()
# get the directory of the current file
path = Path(__file__).parent
static_files_dir = path / "frontend"
app.mount(
"/",
StaticFiles(directory=static_files_dir, html=True),
name="static",
update_settings(
config, dev=dev, database_url=database_url, remove_api_keys=remove_api_keys
)
# get the directory of the current file
if not path:
frontend_path = Path(__file__).parent
static_files_dir = frontend_path / "frontend"
else:
static_files_dir = Path(path)
app = create_app()
setup_static_files(app, static_files_dir)
# check if port is being used
if is_port_in_use(port, host):
port = get_free_port(port)
options = {
"bind": f"{host}:{port}",
"workers": get_number_of_workers(workers),
@ -107,17 +152,128 @@ def serve(
"timeout": timeout,
}
if platform.system() in ["Darwin", "Windows"]:
# Run using uvicorn on MacOS and Windows
# Windows doesn't support gunicorn
# MacOS requires an env variable to be set to use gunicorn
import uvicorn
webapp_process = Process(
target=run_langflow, args=(host, port, log_level, options, app)
)
webapp_process.start()
status_code = 0
while status_code != 200:
try:
status_code = httpx.get(f"http://{host}:{port}/health").status_code
uvicorn.run(app, host=host, port=port, log_level=log_level)
else:
from langflow.server import LangflowApplication
except Exception:
time.sleep(1)
LangflowApplication(app, options).run()
print_banner(host, port)
if open_browser:
webbrowser.open(f"http://{host}:{port}")
def setup_static_files(app: FastAPI, static_files_dir: Path):
"""
Setup the static files directory.
Args:
app (FastAPI): FastAPI app.
path (str): Path to the static files directory.
"""
app.mount(
"/",
StaticFiles(directory=static_files_dir, html=True),
name="static",
)
@app.exception_handler(404)
async def custom_404_handler(request, __):
path = static_files_dir / "index.html"
if not path.exists():
raise RuntimeError(f"File at path {path} does not exist.")
return FileResponse(path)
def is_port_in_use(port, host="localhost"):
"""
Check if a port is in use.
Args:
port (int): The port number to check.
host (str): The host to check the port on. Defaults to 'localhost'.
Returns:
bool: True if the port is in use, False otherwise.
"""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
return s.connect_ex((host, port)) == 0
def get_free_port(port):
"""
Given a used port, find a free port.
Args:
port (int): The port number to check.
Returns:
int: A free port number.
"""
while is_port_in_use(port):
port += 1
return port
def print_banner(host, port):
# console = Console()
word = "LangFlow"
colors = ["#3300cc"]
styled_word = ""
for i, char in enumerate(word):
color = colors[i % len(colors)]
styled_word += f"[{color}]{char}[/]"
# Title with emojis and gradient text
title = (
f"[bold]Welcome to :chains: {styled_word} [/bold]\n\n"
f"Access [link=http://{host}:{port}]http://{host}:{port}[/link]"
)
info_text = (
"Collaborate, and contribute at our "
"[bold][link=https://github.com/logspace-ai/langflow]GitHub Repo[/link][/bold] :rocket:"
)
# Create a panel with the title and the info text, and a border around it
panel = Panel(
f"{title}\n{info_text}", box=box.ROUNDED, border_style="blue", expand=False
)
# Print the banner with a separator line before and after
rprint(panel)
def run_langflow(host, port, log_level, options, app):
"""
Run Langflow server on localhost
"""
try:
if platform.system() in ["Darwin", "Windows"]:
# Run using uvicorn on MacOS and Windows
# Windows doesn't support gunicorn
# MacOS requires an env variable to be set to use gunicorn
import uvicorn
uvicorn.run(app, host=host, port=port, log_level=log_level)
else:
from langflow.server import LangflowApplication
LangflowApplication(app, options).run()
except KeyboardInterrupt:
pass
except Exception as e:
logger.error(e)
sys.exit(1)
def main():

View file

@ -0,0 +1,3 @@
from langflow.api.router import router
__all__ = ["router"]

View file

@ -1,26 +0,0 @@
from fastapi import (
APIRouter,
WebSocket,
WebSocketDisconnect,
WebSocketException,
status,
)
from langflow.api.chat_manager import ChatManager
from langflow.utils.logger import logger
router = APIRouter()
chat_manager = ChatManager()
@router.websocket("/chat/{client_id}")
async def websocket_endpoint(client_id: str, websocket: WebSocket):
"""Websocket endpoint for chat."""
try:
await chat_manager.handle_websocket(client_id, websocket)
except WebSocketException as exc:
logger.error(exc)
await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=str(exc))
except WebSocketDisconnect as exc:
logger.error(exc)
await websocket.close(code=status.WS_1000_NORMAL_CLOSURE, reason=str(exc))

View file

@ -1,47 +0,0 @@
import logging
from importlib.metadata import version
from fastapi import APIRouter, HTTPException
from langflow.api.schemas import (
ExportedFlow,
GraphData,
PredictRequest,
PredictResponse,
)
from langflow.interface.run import process_graph_cached
from langflow.interface.types import build_langchain_types_dict
# build router
router = APIRouter()
logger = logging.getLogger(__name__)
@router.get("/all")
def get_all():
return build_langchain_types_dict()
@router.post("/predict", response_model=PredictResponse)
async def get_load(predict_request: PredictRequest):
try:
exported_flow: ExportedFlow = predict_request.exported_flow
graph_data: GraphData = exported_flow.data
data = graph_data.dict()
response = process_graph_cached(data, predict_request.message)
return PredictResponse(result=response.get("result", ""))
except Exception as e:
# Log stack trace
logger.exception(e)
raise HTTPException(status_code=500, detail=str(e)) from e
# get endpoint to return version of langflow
@router.get("/version")
def get_version():
return {"version": version("langflow")}
@router.get("/health")
def get_health():
return {"status": "OK"}

View file

@ -0,0 +1,18 @@
# Router for base api
from fastapi import APIRouter
from langflow.api.v1 import (
chat_router,
endpoints_router,
validate_router,
flows_router,
flow_styles_router,
)
router = APIRouter(
prefix="/api/v1",
)
router.include_router(chat_router)
router.include_router(endpoints_router)
router.include_router(validate_router)
router.include_router(flows_router)
router.include_router(flow_styles_router)

View file

@ -0,0 +1,24 @@
API_WORDS = ["api", "key", "token"]
def has_api_terms(word: str):
return "api" in word and (
"key" in word or ("token" in word and "tokens" not in word)
)
def remove_api_keys(flow: dict):
"""Remove api keys from flow data."""
if flow.get("data") and flow["data"].get("nodes"):
for node in flow["data"]["nodes"]:
node_data = node.get("data").get("node")
template = node_data.get("template")
for value in template.values():
if (
isinstance(value, dict)
and has_api_terms(value["name"])
and value.get("password")
):
value["value"] = None
return flow

View file

@ -0,0 +1,13 @@
from langflow.api.v1.endpoints import router as endpoints_router
from langflow.api.v1.validate import router as validate_router
from langflow.api.v1.chat import router as chat_router
from langflow.api.v1.flows import router as flows_router
from langflow.api.v1.flow_styles import router as flow_styles_router
__all__ = [
"chat_router",
"endpoints_router",
"validate_router",
"flows_router",
"flow_styles_router",
]

View file

@ -1,6 +1,6 @@
from pydantic import BaseModel, validator
from langflow.graph.utils import extract_input_variables_from_prompt
from langflow.interface.utils import extract_input_variables_from_prompt
class CacheResponse(BaseModel):

View file

@ -3,7 +3,7 @@ from typing import Any
from langchain.callbacks.base import AsyncCallbackHandler, BaseCallbackHandler
from langflow.api.schemas import ChatResponse
from langflow.api.v1.schemas import ChatResponse
# https://github.com/hwchase17/chat-langchain/blob/master/callback.py

View file

@ -0,0 +1,122 @@
import json
from fastapi import (
APIRouter,
HTTPException,
WebSocket,
WebSocketException,
status,
)
from fastapi.responses import StreamingResponse
from langflow.api.v1.schemas import BuiltResponse, InitResponse
from langflow.chat.manager import ChatManager
from langflow.graph.graph.base import Graph
from langflow.utils.logger import logger
from cachetools import LRUCache
router = APIRouter(tags=["Chat"])
chat_manager = ChatManager()
flow_data_store: LRUCache = LRUCache(maxsize=10)
@router.websocket("/chat/{client_id}")
async def chat(client_id: str, websocket: WebSocket):
"""Websocket endpoint for chat."""
try:
if client_id in chat_manager.in_memory_cache:
await chat_manager.handle_websocket(client_id, websocket)
else:
message = "Please, build the flow before sending messages"
await websocket.close(code=status.WS_1008_POLICY_VIOLATION, reason=message)
except WebSocketException as exc:
logger.error(exc)
await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=str(exc))
@router.post("/build/init", response_model=InitResponse, status_code=201)
async def init_build(graph_data: dict):
"""Initialize the build by storing graph data and returning a unique session ID."""
try:
flow_id = graph_data.get("id")
if flow_id is None:
raise ValueError("No ID provided")
flow_data_store[flow_id] = graph_data
return InitResponse(flowId=flow_id)
except Exception as exc:
logger.error(exc)
return HTTPException(status_code=500, detail=str(exc))
@router.get("/build/{flow_id}/status", response_model=BuiltResponse)
async def build_status(flow_id: str):
"""Check the flow_id is in the flow_data_store."""
try:
built = flow_id in flow_data_store and not isinstance(
flow_data_store[flow_id], dict
)
return BuiltResponse(
built=built,
)
except Exception as exc:
logger.error(exc)
return HTTPException(status_code=500, detail=str(exc))
@router.get("/build/stream/{flow_id}", response_class=StreamingResponse)
async def stream_build(flow_id: str):
"""Stream the build process based on stored flow data."""
async def event_stream(flow_id):
final_response = json.dumps({"end_of_stream": True})
try:
if flow_id not in flow_data_store:
error_message = "Invalid session ID"
yield f"data: {json.dumps({'error': error_message})}\n\n"
return
graph_data = flow_data_store[flow_id].get("data")
if not graph_data:
error_message = "No data provided"
yield f"data: {json.dumps({'error': error_message})}\n\n"
return
logger.debug("Building langchain object")
graph = Graph.from_payload(graph_data)
for node in graph.generator_build():
try:
node.build()
params = node._built_object_repr()
valid = True
logger.debug(
f"Building node {params[:50]}{'...' if len(params) > 50 else ''}"
)
except Exception as exc:
params = str(exc)
valid = False
response = json.dumps(
{
"valid": valid,
"params": params,
"id": node.id,
}
)
yield f"data: {response}\n\n"
chat_manager.set_cache(flow_id, graph.build())
except Exception as exc:
logger.error("Error while building the flow: %s", exc)
yield f"error: {json.dumps({'error': str(exc)})}\n\n"
finally:
yield f"data: {final_response}\n\n"
try:
return StreamingResponse(event_stream(flow_id), media_type="text/event-stream")
except Exception as exc:
logger.error(exc)
raise HTTPException(status_code=500, detail=str(exc))

View file

@ -0,0 +1,64 @@
from langflow.database.models.flow import Flow
from langflow.processing.process import process_graph_cached, process_tweaks
from langflow.utils.logger import logger
from fastapi import APIRouter, Depends, HTTPException
from langflow.api.v1.schemas import (
PredictRequest,
PredictResponse,
)
from langflow.interface.types import build_langchain_types_dict
from langflow.database.base import get_session
from sqlmodel import Session
# build router
router = APIRouter(tags=["Base"])
@router.get("/all")
def get_all():
return build_langchain_types_dict()
@router.post("/predict/{flow_id}", response_model=PredictResponse)
async def predict_flow(
predict_request: PredictRequest,
flow_id: str,
session: Session = Depends(get_session),
):
"""
Endpoint to process a message using the flow passed in the bearer token.
"""
try:
flow = session.get(Flow, flow_id)
if flow is None:
raise ValueError(f"Flow {flow_id} not found")
if flow.data is None:
raise ValueError(f"Flow {flow_id} has no data")
graph_data = flow.data
if predict_request.tweaks:
try:
graph_data = process_tweaks(graph_data, predict_request.tweaks)
except Exception as exc:
logger.error(f"Error processing tweaks: {exc}")
response = process_graph_cached(graph_data, predict_request.message)
return PredictResponse(
result=response.get("result", ""),
intermediate_steps=response.get("thought", ""),
)
except Exception as e:
# Log stack trace
logger.exception(e)
raise HTTPException(status_code=500, detail=str(e)) from e
# get endpoint to return version of langflow
@router.get("/version")
def get_version():
from langflow import __version__
return {"version": __version__}

View file

@ -0,0 +1,83 @@
from uuid import UUID
from langflow.database.models.flow_style import (
FlowStyle,
FlowStyleCreate,
FlowStyleRead,
FlowStyleUpdate,
)
from langflow.database.base import get_session
from sqlmodel import Session, select
from fastapi import APIRouter, Depends, HTTPException
# build router
router = APIRouter(prefix="/flow_styles", tags=["FlowStyles"])
# FlowStyleCreate:
# class FlowStyleBase(SQLModel):
# color: str = Field(index=True)
# emoji: str = Field(index=False)
# flow_id: UUID = Field(default=None, foreign_key="flow.id")
@router.post("/", response_model=FlowStyleRead)
def create_flow_style(
*, session: Session = Depends(get_session), flow_style: FlowStyleCreate
):
"""Create a new flow_style."""
db_flow_style = FlowStyle.from_orm(flow_style)
session.add(db_flow_style)
session.commit()
session.refresh(db_flow_style)
return db_flow_style
@router.get("/", response_model=list[FlowStyleRead])
def read_flow_styles(*, session: Session = Depends(get_session)):
"""Read all flows."""
try:
flows = session.exec(select(FlowStyle)).all()
except Exception as e:
raise HTTPException(status_code=500, detail=str(e)) from e
return flows
@router.get("/{flow_styles_id}", response_model=FlowStyleRead)
def read_flow_style(*, session: Session = Depends(get_session), flow_styles_id: UUID):
"""Read a flow_style."""
if flow_style := session.get(FlowStyle, flow_styles_id):
return flow_style
else:
raise HTTPException(status_code=404, detail="FlowStyle not found")
@router.patch("/{flow_style_id}", response_model=FlowStyleRead)
def update_flow_style(
*,
session: Session = Depends(get_session),
flow_style_id: UUID,
flow_style: FlowStyleUpdate,
):
"""Update a flow_style."""
db_flow_style = session.get(FlowStyle, flow_style_id)
if not db_flow_style:
raise HTTPException(status_code=404, detail="FlowStyle not found")
flow_data = flow_style.dict(exclude_unset=True)
for key, value in flow_data.items():
if hasattr(db_flow_style, key) and value is not None:
setattr(db_flow_style, key, value)
session.add(db_flow_style)
session.commit()
session.refresh(db_flow_style)
return db_flow_style
@router.delete("/{flow_id}")
def delete_flow_style(*, session: Session = Depends(get_session), flow_id: UUID):
"""Delete a flow_style."""
flow_style = session.get(FlowStyle, flow_id)
if not flow_style:
raise HTTPException(status_code=404, detail="FlowStyle not found")
session.delete(flow_style)
session.commit()
return {"message": "FlowStyle deleted successfully"}

View file

@ -0,0 +1,120 @@
from typing import List
from uuid import UUID
from langflow.settings import settings
from langflow.api.utils import remove_api_keys
from langflow.api.v1.schemas import FlowListCreate, FlowListRead
from langflow.database.models.flow import (
Flow,
FlowCreate,
FlowRead,
FlowReadWithStyle,
FlowUpdate,
)
from langflow.database.base import get_session
from sqlmodel import Session, select
from fastapi import APIRouter, Depends, HTTPException
from fastapi.encoders import jsonable_encoder
from fastapi import File, UploadFile
import json
# build router
router = APIRouter(prefix="/flows", tags=["Flows"])
@router.post("/", response_model=FlowRead, status_code=201)
def create_flow(*, session: Session = Depends(get_session), flow: FlowCreate):
"""Create a new flow."""
db_flow = Flow.from_orm(flow)
session.add(db_flow)
session.commit()
session.refresh(db_flow)
return db_flow
@router.get("/", response_model=list[FlowReadWithStyle], status_code=200)
def read_flows(*, session: Session = Depends(get_session)):
"""Read all flows."""
try:
flows = session.exec(select(Flow)).all()
except Exception as e:
raise HTTPException(status_code=500, detail=str(e)) from e
return [jsonable_encoder(flow) for flow in flows]
@router.get("/{flow_id}", response_model=FlowReadWithStyle, status_code=200)
def read_flow(*, session: Session = Depends(get_session), flow_id: UUID):
"""Read a flow."""
if flow := session.get(Flow, flow_id):
return flow
else:
raise HTTPException(status_code=404, detail="Flow not found")
@router.patch("/{flow_id}", response_model=FlowRead, status_code=200)
def update_flow(
*, session: Session = Depends(get_session), flow_id: UUID, flow: FlowUpdate
):
"""Update a flow."""
db_flow = session.get(Flow, flow_id)
if not db_flow:
raise HTTPException(status_code=404, detail="Flow not found")
flow_data = flow.dict(exclude_unset=True)
if not settings.remove_api_keys:
flow_data = remove_api_keys(flow_data)
for key, value in flow_data.items():
setattr(db_flow, key, value)
session.add(db_flow)
session.commit()
session.refresh(db_flow)
return db_flow
@router.delete("/{flow_id}", status_code=200)
def delete_flow(*, session: Session = Depends(get_session), flow_id: UUID):
"""Delete a flow."""
flow = session.get(Flow, flow_id)
if not flow:
raise HTTPException(status_code=404, detail="Flow not found")
session.delete(flow)
session.commit()
return {"message": "Flow deleted successfully"}
# Define a new model to handle multiple flows
@router.post("/batch/", response_model=List[FlowRead], status_code=201)
def create_flows(*, session: Session = Depends(get_session), flow_list: FlowListCreate):
"""Create multiple new flows."""
db_flows = []
for flow in flow_list.flows:
db_flow = Flow.from_orm(flow)
session.add(db_flow)
db_flows.append(db_flow)
session.commit()
for db_flow in db_flows:
session.refresh(db_flow)
return db_flows
@router.post("/upload/", response_model=List[FlowRead], status_code=201)
async def upload_file(
*, session: Session = Depends(get_session), file: UploadFile = File(...)
):
"""Upload flows from a file."""
contents = await file.read()
data = json.loads(contents)
if "flows" in data:
flow_list = FlowListCreate(**data)
else:
flow_list = FlowListCreate(flows=[FlowCreate(**flow) for flow in data])
return create_flows(session=session, flow_list=flow_list)
@router.get("/download/", response_model=FlowListRead, status_code=200)
async def download_file(*, session: Session = Depends(get_session)):
"""Download all flows as a file."""
flows = read_flows(session=session)
return FlowListRead(flows=flows)

View file

@ -1,6 +1,6 @@
from typing import Any, Dict, List, Union
from pydantic import BaseModel, validator
from typing import Any, Dict, List, Optional, Union
from langflow.database.models.flow import FlowCreate, FlowRead
from pydantic import BaseModel, Field, validator
class GraphData(BaseModel):
@ -23,13 +23,30 @@ class PredictRequest(BaseModel):
"""Predict request schema."""
message: str
exported_flow: ExportedFlow
tweaks: Optional[Dict[str, Dict[str, str]]] = Field(default_factory=dict)
class Config:
schema_extra = {
"example": {
"message": "Hello, how are you?",
"tweaks": {
"dndnode_986363f0-4677-4035-9f38-74b94af5dd78": {
"name": "A tool name",
"description": "A tool description",
},
"dndnode_986363f0-4677-4035-9f38-74b94af57378": {
"template": "A {template}",
},
},
}
}
class PredictResponse(BaseModel):
"""Predict response schema."""
result: str
intermediate_steps: str = ""
class ChatMessage(BaseModel):
@ -68,3 +85,19 @@ class FileResponse(ChatMessage):
if v not in ["image", "csv"]:
raise ValueError("data_type must be image or csv")
return v
class FlowListCreate(BaseModel):
flows: List[FlowCreate]
class FlowListRead(BaseModel):
flows: List[FlowRead]
class InitResponse(BaseModel):
flowId: str
class BuiltResponse(BaseModel):
built: bool

View file

@ -2,20 +2,20 @@ import json
from fastapi import APIRouter, HTTPException
from langflow.api.base import (
from langflow.api.v1.base import (
Code,
CodeValidationResponse,
Prompt,
PromptValidationResponse,
validate_prompt,
)
from langflow.graph.nodes import VectorStoreNode
from langflow.interface.run import build_graph
from langflow.graph.vertex.types import VectorStoreVertex
from langflow.graph import Graph
from langflow.utils.logger import logger
from langflow.utils.validate import validate_code
# build router
router = APIRouter(prefix="/validate", tags=["validate"])
router = APIRouter(prefix="/validate", tags=["Validate"])
@router.post("/code", status_code=200, response_model=CodeValidationResponse)
@ -44,12 +44,12 @@ def post_validate_prompt(prompt: Prompt):
def post_validate_node(node_id: str, data: dict):
try:
# build graph
graph = build_graph(data)
graph = Graph.from_payload(data)
# validate node
node = graph.get_node(node_id)
if node is None:
raise ValueError(f"Node {node_id} not found")
if not isinstance(node, VectorStoreNode):
if not isinstance(node, VectorStoreVertex):
node.build()
return json.dumps({"valid": True, "params": str(node._built_object_repr())})
except Exception as e:

View file

@ -1 +1,7 @@
from langflow.cache.manager import cache_manager # noqa
from langflow.cache.manager import cache_manager
from langflow.cache.flow import InMemoryCache
__all__ = [
"cache_manager",
"InMemoryCache",
]

View file

@ -1,154 +1,87 @@
import base64
import contextlib
import functools
import hashlib
import json
import os
import tempfile
from collections import OrderedDict
from pathlib import Path
from typing import Any, Dict
import dill # type: ignore
CACHE: Dict[str, Any] = {}
import abc
def create_cache_folder(func):
def wrapper(*args, **kwargs):
# Get the destination folder
cache_path = Path(tempfile.gettempdir()) / PREFIX
# Create the destination folder if it doesn't exist
os.makedirs(cache_path, exist_ok=True)
return func(*args, **kwargs)
return wrapper
def memoize_dict(maxsize=128):
cache = OrderedDict()
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
hashed = compute_dict_hash(args[0])
key = (func.__name__, hashed, frozenset(kwargs.items()))
if key not in cache:
result = func(*args, **kwargs)
cache[key] = result
if len(cache) > maxsize:
cache.popitem(last=False)
else:
result = cache[key]
return result
def clear_cache():
cache.clear()
wrapper.clear_cache = clear_cache # type: ignore
wrapper.cache = cache # type: ignore
return wrapper
return decorator
PREFIX = "langflow_cache"
@create_cache_folder
def clear_old_cache_files(max_cache_size: int = 3):
cache_dir = Path(tempfile.gettempdir()) / PREFIX
cache_files = list(cache_dir.glob("*.dill"))
if len(cache_files) > max_cache_size:
cache_files_sorted_by_mtime = sorted(
cache_files, key=lambda x: x.stat().st_mtime, reverse=True
)
for cache_file in cache_files_sorted_by_mtime[max_cache_size:]:
with contextlib.suppress(OSError):
os.remove(cache_file)
def compute_dict_hash(graph_data):
graph_data = filter_json(graph_data)
cleaned_graph_json = json.dumps(graph_data, sort_keys=True)
return hashlib.sha256(cleaned_graph_json.encode("utf-8")).hexdigest()
def filter_json(json_data):
filtered_data = json_data.copy()
# Remove 'viewport' and 'chatHistory' keys
if "viewport" in filtered_data:
del filtered_data["viewport"]
if "chatHistory" in filtered_data:
del filtered_data["chatHistory"]
# Filter nodes
if "nodes" in filtered_data:
for node in filtered_data["nodes"]:
if "position" in node:
del node["position"]
if "positionAbsolute" in node:
del node["positionAbsolute"]
if "selected" in node:
del node["selected"]
if "dragging" in node:
del node["dragging"]
return filtered_data
@create_cache_folder
def save_binary_file(content: str, file_name: str, accepted_types: list[str]) -> str:
class BaseCache(abc.ABC):
"""
Save a binary file to the specified folder.
Args:
content: The content of the file as a bytes object.
file_name: The name of the file, including its extension.
Returns:
The path to the saved file.
Abstract base class for a cache.
"""
if not any(file_name.endswith(suffix) for suffix in accepted_types):
raise ValueError(f"File {file_name} is not accepted")
# Get the destination folder
cache_path = Path(tempfile.gettempdir()) / PREFIX
if not content:
raise ValueError("Please, reload the file in the loader.")
data = content.split(",")[1]
decoded_bytes = base64.b64decode(data)
@abc.abstractmethod
def get(self, key):
"""
Retrieve an item from the cache.
# Create the full file path
file_path = os.path.join(cache_path, file_name)
Args:
key: The key of the item to retrieve.
# Save the binary content to the file
with open(file_path, "wb") as file:
file.write(decoded_bytes)
Returns:
The value associated with the key, or None if the key is not found.
"""
return file_path
@abc.abstractmethod
def set(self, key, value):
"""
Add an item to the cache.
Args:
key: The key of the item.
value: The value to cache.
"""
@create_cache_folder
def save_cache(hash_val: str, chat_data, clean_old_cache_files: bool):
cache_path = Path(tempfile.gettempdir()) / PREFIX / f"{hash_val}.dill"
with cache_path.open("wb") as cache_file:
dill.dump(chat_data, cache_file)
@abc.abstractmethod
def delete(self, key):
"""
Remove an item from the cache.
if clean_old_cache_files:
clear_old_cache_files()
Args:
key: The key of the item to remove.
"""
@abc.abstractmethod
def clear(self):
"""
Clear all items from the cache.
"""
@create_cache_folder
def load_cache(hash_val):
cache_path = Path(tempfile.gettempdir()) / PREFIX / f"{hash_val}.dill"
if cache_path.exists():
with cache_path.open("rb") as cache_file:
return dill.load(cache_file)
return None
@abc.abstractmethod
def __contains__(self, key):
"""
Check if the key is in the cache.
Args:
key: The key of the item to check.
Returns:
True if the key is in the cache, False otherwise.
"""
@abc.abstractmethod
def __getitem__(self, key):
"""
Retrieve an item from the cache using the square bracket notation.
Args:
key: The key of the item to retrieve.
Returns:
The value associated with the key, or None if the key is not found.
"""
@abc.abstractmethod
def __setitem__(self, key, value):
"""
Add an item to the cache using the square bracket notation.
Args:
key: The key of the item.
value: The value to cache.
"""
@abc.abstractmethod
def __delitem__(self, key):
"""
Remove an item from the cache using the square bracket notation.
Args:
key: The key of the item to remove.
"""

146
src/backend/langflow/cache/flow.py vendored Normal file
View file

@ -0,0 +1,146 @@
import threading
import time
from collections import OrderedDict
from langflow.cache.base import BaseCache
class InMemoryCache(BaseCache):
"""
A simple in-memory cache using an OrderedDict.
This cache supports setting a maximum size and expiration time for cached items.
When the cache is full, it uses a Least Recently Used (LRU) eviction policy.
Thread-safe using a threading Lock.
Attributes:
max_size (int, optional): Maximum number of items to store in the cache.
expiration_time (int, optional): Time in seconds after which a cached item expires. Default is 1 hour.
Example:
cache = InMemoryCache(max_size=3, expiration_time=5)
# setting cache values
cache.set("a", 1)
cache.set("b", 2)
cache["c"] = 3
# getting cache values
a = cache.get("a")
b = cache["b"]
"""
def __init__(self, max_size=None, expiration_time=60 * 60):
"""
Initialize a new InMemoryCache instance.
Args:
max_size (int, optional): Maximum number of items to store in the cache.
expiration_time (int, optional): Time in seconds after which a cached item expires. Default is 1 hour.
"""
self._cache = OrderedDict()
self._lock = threading.Lock()
self.max_size = max_size
self.expiration_time = expiration_time
def get(self, key):
"""
Retrieve an item from the cache.
Args:
key: The key of the item to retrieve.
Returns:
The value associated with the key, or None if the key is not found or the item has expired.
"""
with self._lock:
if key in self._cache:
item = self._cache.pop(key)
if (
self.expiration_time is None
or time.time() - item["time"] < self.expiration_time
):
# Move the key to the end to make it recently used
self._cache[key] = item
return item["value"]
else:
self.delete(key)
return None
def set(self, key, value):
"""
Add an item to the cache.
If the cache is full, the least recently used item is evicted.
Args:
key: The key of the item.
value: The value to cache.
"""
with self._lock:
if key in self._cache:
# Remove existing key before re-inserting to update order
self.delete(key)
elif self.max_size and len(self._cache) >= self.max_size:
# Remove least recently used item
self._cache.popitem(last=False)
self._cache[key] = {"value": value, "time": time.time()}
def get_or_set(self, key, value):
"""
Retrieve an item from the cache. If the item does not exist, set it with the provided value.
Args:
key: The key of the item.
value: The value to cache if the item doesn't exist.
Returns:
The cached value associated with the key.
"""
with self._lock:
if key in self._cache:
return self.get(key)
self.set(key, value)
return value
def delete(self, key):
"""
Remove an item from the cache.
Args:
key: The key of the item to remove.
"""
# with self._lock:
self._cache.pop(key, None)
def clear(self):
"""
Clear all items from the cache.
"""
with self._lock:
self._cache.clear()
def __contains__(self, key):
"""Check if the key is in the cache."""
return key in self._cache
def __getitem__(self, key):
"""Retrieve an item from the cache using the square bracket notation."""
return self.get(key)
def __setitem__(self, key, value):
"""Add an item to the cache using the square bracket notation."""
self.set(key, value)
def __delitem__(self, key):
"""Remove an item from the cache using the square bracket notation."""
self.delete(key)
def __len__(self):
"""Return the number of items in the cache."""
return len(self._cache)
def __repr__(self):
"""Return a string representation of the InMemoryCache instance."""
return f"InMemoryCache(max_size={self.max_size}, expiration_time={self.expiration_time})"

View file

@ -54,7 +54,7 @@ class CacheManager(Subject):
def __init__(self):
super().__init__()
self.CACHE = {}
self._cache = {}
self.current_client_id = None
self.current_cache = {}
@ -68,12 +68,12 @@ class CacheManager(Subject):
"""
previous_client_id = self.current_client_id
self.current_client_id = client_id
self.current_cache = self.CACHE.setdefault(client_id, {})
self.current_cache = self._cache.setdefault(client_id, {})
try:
yield
finally:
self.current_client_id = previous_client_id
self.current_cache = self.CACHE.get(self.current_client_id, {})
self.current_cache = self._cache.get(self.current_client_id, {})
def add(self, name: str, obj: Any, obj_type: str, extension: Optional[str] = None):
"""

134
src/backend/langflow/cache/utils.py vendored Normal file
View file

@ -0,0 +1,134 @@
import base64
import contextlib
import functools
import hashlib
import json
import os
import tempfile
from collections import OrderedDict
from pathlib import Path
from typing import Any, Dict
CACHE: Dict[str, Any] = {}
def create_cache_folder(func):
def wrapper(*args, **kwargs):
# Get the destination folder
cache_path = Path(tempfile.gettempdir()) / PREFIX
# Create the destination folder if it doesn't exist
os.makedirs(cache_path, exist_ok=True)
return func(*args, **kwargs)
return wrapper
def memoize_dict(maxsize=128):
cache = OrderedDict()
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
hashed = compute_dict_hash(args[0])
key = (func.__name__, hashed, frozenset(kwargs.items()))
if key not in cache:
result = func(*args, **kwargs)
cache[key] = result
if len(cache) > maxsize:
cache.popitem(last=False)
else:
result = cache[key]
return result
def clear_cache():
cache.clear()
wrapper.clear_cache = clear_cache # type: ignore
wrapper.cache = cache # type: ignore
return wrapper
return decorator
PREFIX = "langflow_cache"
@create_cache_folder
def clear_old_cache_files(max_cache_size: int = 3):
cache_dir = Path(tempfile.gettempdir()) / PREFIX
cache_files = list(cache_dir.glob("*.dill"))
if len(cache_files) > max_cache_size:
cache_files_sorted_by_mtime = sorted(
cache_files, key=lambda x: x.stat().st_mtime, reverse=True
)
for cache_file in cache_files_sorted_by_mtime[max_cache_size:]:
with contextlib.suppress(OSError):
os.remove(cache_file)
def compute_dict_hash(graph_data):
graph_data = filter_json(graph_data)
cleaned_graph_json = json.dumps(graph_data, sort_keys=True)
return hashlib.sha256(cleaned_graph_json.encode("utf-8")).hexdigest()
def filter_json(json_data):
filtered_data = json_data.copy()
# Remove 'viewport' and 'chatHistory' keys
if "viewport" in filtered_data:
del filtered_data["viewport"]
if "chatHistory" in filtered_data:
del filtered_data["chatHistory"]
# Filter nodes
if "nodes" in filtered_data:
for node in filtered_data["nodes"]:
if "position" in node:
del node["position"]
if "positionAbsolute" in node:
del node["positionAbsolute"]
if "selected" in node:
del node["selected"]
if "dragging" in node:
del node["dragging"]
return filtered_data
@create_cache_folder
def save_binary_file(content: str, file_name: str, accepted_types: list[str]) -> str:
"""
Save a binary file to the specified folder.
Args:
content: The content of the file as a bytes object.
file_name: The name of the file, including its extension.
Returns:
The path to the saved file.
"""
if not any(file_name.endswith(suffix) for suffix in accepted_types):
raise ValueError(f"File {file_name} is not accepted")
# Get the destination folder
cache_path = Path(tempfile.gettempdir()) / PREFIX
if not content:
raise ValueError("Please, reload the file in the loader.")
data = content.split(",")[1]
decoded_bytes = base64.b64decode(data)
# Create the full file path
file_path = os.path.join(cache_path, file_name)
# Save the binary content to the file
with open(file_path, "wb") as file:
file.write(decoded_bytes)
return file_path

View file

View file

@ -1,21 +1,20 @@
import asyncio
import json
from collections import defaultdict
from typing import Dict, List
from fastapi import WebSocket, status
from langflow.api.schemas import ChatMessage, ChatResponse, FileResponse
from langflow.api.v1.schemas import ChatMessage, ChatResponse, FileResponse
from langflow.cache import cache_manager
from langflow.cache.manager import Subject
from langflow.interface.run import (
get_result_and_steps,
load_or_build_langchain_object,
)
from langflow.interface.utils import pil_to_base64, try_setting_streaming_options
from langflow.chat.utils import process_graph
from langflow.interface.utils import pil_to_base64
from langflow.utils.logger import logger
import asyncio
import json
from typing import Any, Dict, List
from langflow.cache.flow import InMemoryCache
class ChatHistory(Subject):
def __init__(self):
super().__init__()
@ -49,6 +48,7 @@ class ChatManager:
self.chat_history = ChatHistory()
self.cache_manager = cache_manager
self.cache_manager.attach(self.update)
self.in_memory_cache = InMemoryCache()
def on_chat_history_update(self):
"""Send the last chat message to the client."""
@ -102,24 +102,30 @@ class ChatManager:
websocket = self.active_connections[client_id]
await websocket.send_json(message.dict())
async def process_message(self, client_id: str, payload: Dict):
async def close_connection(self, client_id: str, code: int, reason: str):
if websocket := self.active_connections[client_id]:
await websocket.close(code=code, reason=reason)
self.disconnect(client_id)
async def process_message(
self, client_id: str, payload: Dict, langchain_object: Any
):
# Process the graph data and chat message
chat_message = payload.pop("message", "")
chat_message = ChatMessage(message=chat_message)
self.chat_history.add_message(client_id, chat_message)
graph_data = payload
# graph_data = payload
start_resp = ChatResponse(message=None, type="start", intermediate_steps="")
await self.send_json(client_id, start_resp)
is_first_message = len(self.chat_history.get_history(client_id=client_id)) <= 1
# is_first_message = len(self.chat_history.get_history(client_id=client_id)) <= 1
# Generate result and thought
try:
logger.debug("Generating result and thought")
result, intermediate_steps = await process_graph(
graph_data=graph_data,
is_first_message=is_first_message,
langchain_object=langchain_object,
chat_message=chat_message,
websocket=self.active_connections[client_id],
)
@ -152,6 +158,14 @@ class ChatManager:
await self.send_json(client_id, response)
self.chat_history.add_message(client_id, response)
def set_cache(self, client_id: str, langchain_object: Any) -> bool:
"""
Set the cache for a client.
"""
self.in_memory_cache.set(client_id, langchain_object)
return client_id in self.in_memory_cache
async def handle_websocket(self, client_id: str, websocket: WebSocket):
await self.connect(client_id, websocket)
@ -172,52 +186,24 @@ class ChatManager:
continue
with self.cache_manager.set_client_id(client_id):
await self.process_message(client_id, payload)
langchain_object = self.in_memory_cache.get(client_id)
await self.process_message(client_id, payload, langchain_object)
except Exception as e:
# Handle any exceptions that might occur
logger.exception(e)
# send a message to the client
await self.active_connections[client_id].close(
code=status.WS_1011_INTERNAL_ERROR, reason=str(e)[:120]
logger.error(e)
await self.close_connection(
client_id=client_id,
code=status.WS_1011_INTERNAL_ERROR,
reason=str(e)[:120],
)
self.disconnect(client_id)
finally:
try:
connection = self.active_connections.get(client_id)
if connection:
await connection.close(code=1000, reason="Client disconnected")
self.disconnect(client_id)
await self.close_connection(
client_id=client_id,
code=status.WS_1000_NORMAL_CLOSURE,
reason="Client disconnected",
)
except Exception as e:
logger.exception(e)
logger.error(e)
self.disconnect(client_id)
async def process_graph(
graph_data: Dict,
is_first_message: bool,
chat_message: ChatMessage,
websocket: WebSocket,
):
langchain_object = load_or_build_langchain_object(graph_data, is_first_message)
langchain_object = try_setting_streaming_options(langchain_object, websocket)
logger.debug("Loaded langchain object")
if langchain_object is None:
# Raise user facing error
raise ValueError(
"There was an error loading the langchain_object. Please, check all the nodes and try again."
)
# Generate result and thought
try:
logger.debug("Generating result and thought")
result, intermediate_steps = await get_result_and_steps(
langchain_object, chat_message.message or "", websocket=websocket
)
logger.debug("Generated result and intermediate_steps")
return result, intermediate_steps
except Exception as e:
# Log stack trace
logger.exception(e)
raise e

View file

@ -0,0 +1,33 @@
from fastapi import WebSocket
from langflow.api.v1.schemas import ChatMessage
from langflow.processing.base import get_result_and_steps
from langflow.interface.utils import try_setting_streaming_options
from langflow.utils.logger import logger
async def process_graph(
langchain_object,
chat_message: ChatMessage,
websocket: WebSocket,
):
langchain_object = try_setting_streaming_options(langchain_object, websocket)
logger.debug("Loaded langchain object")
if langchain_object is None:
# Raise user facing error
raise ValueError(
"There was an error loading the langchain_object. Please, check all the nodes and try again."
)
# Generate result and thought
try:
logger.debug("Generating result and thought")
result, intermediate_steps = await get_result_and_steps(
langchain_object, chat_message.message or "", websocket=websocket
)
logger.debug("Generated result and intermediate_steps")
return result, intermediate_steps
except Exception as e:
# Log stack trace
logger.exception(e)
raise e

View file

@ -55,10 +55,13 @@ embeddings:
llms:
- OpenAI
# - AzureOpenAI
# - AzureChatOpenAI
- ChatOpenAI
- LlamaCpp
- LlamaCpp
- CTransformers
- Cohere
- Anthropic
- ChatAnthropic
- HuggingFaceHub
memories:
- ConversationBufferMemory
@ -70,7 +73,7 @@ prompts:
- ZeroShotPrompt
textsplitters:
- CharacterTextSplitter
# - RecursiveCharacterTextSplitter
- RecursiveCharacterTextSplitter
# - LatexTextSplitter
# - PythonCodeTextSplitter
toolkits:
@ -78,12 +81,14 @@ toolkits:
- JsonToolkit
- VectorStoreInfo
- VectorStoreRouterToolkit
- VectorStoreToolkit
tools:
- Search
- PAL-MATH
- Calculator
- Serper Search
- Tool
- PythonFunctionTool
- PythonFunction
- JsonSpec
- News API

View file

@ -2,8 +2,11 @@ from langflow.template import frontend_node
# These should always be instantiated
CUSTOM_NODES = {
"prompts": {"ZeroShotPrompt": frontend_node.prompts.ZeroShotPromptNode()},
"prompts": {
"ZeroShotPrompt": frontend_node.prompts.ZeroShotPromptNode(),
},
"tools": {
"PythonFunctionTool": frontend_node.tools.PythonFunctionToolNode(),
"PythonFunction": frontend_node.tools.PythonFunctionNode(),
"Tool": frontend_node.tools.ToolNode(),
},

View file

@ -0,0 +1,18 @@
from langflow.settings import settings
from sqlmodel import SQLModel, Session, create_engine
if settings.database_url.startswith("sqlite"):
connect_args = {"check_same_thread": False}
else:
connect_args = {}
engine = create_engine(settings.database_url, connect_args=connect_args)
def create_db_and_tables():
SQLModel.metadata.create_all(engine)
def get_session():
with Session(engine) as session:
yield session

View file

@ -0,0 +1,14 @@
from sqlmodel import SQLModel
import orjson
def orjson_dumps(v, *, default):
# orjson.dumps returns bytes, to match standard json.dumps we need to decode
return orjson.dumps(v, default=default).decode()
class SQLModelSerializable(SQLModel):
class Config:
orm_mode = True
json_loads = orjson.loads
json_dumps = orjson_dumps

View file

@ -0,0 +1,60 @@
# Path: src/backend/langflow/database/models/flow.py
from langflow.database.models.base import SQLModelSerializable
from pydantic import validator
from sqlmodel import Field, Relationship, JSON, Column
from uuid import UUID, uuid4
from typing import Dict, Optional
# if TYPE_CHECKING:
from langflow.database.models.flow_style import FlowStyle, FlowStyleRead
class FlowBase(SQLModelSerializable):
name: str = Field(index=True)
description: Optional[str] = Field(index=True)
data: Optional[Dict] = Field(default=None)
@validator("data")
def validate_json(v):
# dict_keys(['description', 'name', 'id', 'data'])
if not v:
return v
if not isinstance(v, dict):
raise ValueError("Flow must be a valid JSON")
# data must contain nodes and edges
if "nodes" not in v.keys():
raise ValueError("Flow must have nodes")
if "edges" not in v.keys():
raise ValueError("Flow must have edges")
return v
class Flow(FlowBase, table=True):
id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True)
data: Optional[Dict] = Field(default=None, sa_column=Column(JSON))
style: Optional["FlowStyle"] = Relationship(
back_populates="flow",
# use "uselist=False" to make it a one-to-one relationship
sa_relationship_kwargs={"uselist": False},
)
class FlowCreate(FlowBase):
pass
class FlowRead(FlowBase):
id: UUID
class FlowReadWithStyle(FlowRead):
style: Optional["FlowStyleRead"] = None
class FlowUpdate(SQLModelSerializable):
name: Optional[str] = None
description: Optional[str] = None
data: Optional[Dict] = None

View file

@ -0,0 +1,33 @@
# Path: src/backend/langflow/database/models/flowstyle.py
from langflow.database.models.base import SQLModelSerializable
from sqlmodel import Field, Relationship
from uuid import UUID, uuid4
from typing import TYPE_CHECKING, Optional
if TYPE_CHECKING:
from langflow.database.models.flow import Flow
class FlowStyleBase(SQLModelSerializable):
color: str
emoji: str
flow_id: UUID = Field(default=None, foreign_key="flow.id")
class FlowStyle(FlowStyleBase, table=True):
id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True)
flow: "Flow" = Relationship(back_populates="style")
class FlowStyleUpdate(SQLModelSerializable):
color: Optional[str] = None
emoji: Optional[str] = None
class FlowStyleCreate(FlowStyleBase):
pass
class FlowStyleRead(FlowStyleBase):
id: UUID

View file

@ -1,4 +1,35 @@
from langflow.graph.base import Edge, Node
from langflow.graph.graph import Graph
from langflow.graph.edge.base import Edge
from langflow.graph.graph.base import Graph
from langflow.graph.vertex.base import Vertex
from langflow.graph.vertex.types import (
AgentVertex,
ChainVertex,
DocumentLoaderVertex,
EmbeddingVertex,
LLMVertex,
MemoryVertex,
PromptVertex,
TextSplitterVertex,
ToolVertex,
ToolkitVertex,
VectorStoreVertex,
WrapperVertex,
)
__all__ = ["Graph", "Node", "Edge"]
__all__ = [
"Graph",
"Vertex",
"Edge",
"AgentVertex",
"ChainVertex",
"DocumentLoaderVertex",
"EmbeddingVertex",
"LLMVertex",
"MemoryVertex",
"PromptVertex",
"TextSplitterVertex",
"ToolVertex",
"ToolkitVertex",
"VectorStoreVertex",
"WrapperVertex",
]

View file

@ -0,0 +1,52 @@
from langflow.utils.logger import logger
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from langflow.graph.vertex.base import Vertex
class Edge:
def __init__(self, source: "Vertex", target: "Vertex"):
self.source: "Vertex" = source
self.target: "Vertex" = target
self.validate_edge()
def validate_edge(self) -> None:
# Validate that the outputs of the source node are valid inputs
# for the target node
self.source_types = self.source.output
self.target_reqs = self.target.required_inputs + self.target.optional_inputs
# Both lists contain strings and sometimes a string contains the value we are
# looking for e.g. comgin_out=["Chain"] and target_reqs=["LLMChain"]
# so we need to check if any of the strings in source_types is in target_reqs
self.valid = any(
output in target_req
for output in self.source_types
for target_req in self.target_reqs
)
# Get what type of input the target node is expecting
self.matched_type = next(
(
output
for output in self.source_types
for target_req in self.target_reqs
if output in target_req
),
None,
)
no_matched_type = self.matched_type is None
if no_matched_type:
logger.debug(self.source_types)
logger.debug(self.target_reqs)
if no_matched_type:
raise ValueError(
f"Edge between {self.source.vertex_type} and {self.target.vertex_type} "
f"has no matched type"
)
def __repr__(self) -> str:
return (
f"Edge(source={self.source.id}, target={self.target.id}, valid={self.valid}"
f", matched_type={self.matched_type})"
)

View file

@ -1,166 +0,0 @@
from typing import Dict, List, Type, Union
from langflow.graph.base import Edge, Node
from langflow.graph.nodes import (
AgentNode,
ChainNode,
DocumentLoaderNode,
EmbeddingNode,
FileToolNode,
LLMNode,
MemoryNode,
PromptNode,
TextSplitterNode,
ToolkitNode,
ToolNode,
VectorStoreNode,
WrapperNode,
)
from langflow.interface.agents.base import agent_creator
from langflow.interface.chains.base import chain_creator
from langflow.interface.document_loaders.base import documentloader_creator
from langflow.interface.embeddings.base import embedding_creator
from langflow.interface.llms.base import llm_creator
from langflow.interface.memories.base import memory_creator
from langflow.interface.prompts.base import prompt_creator
from langflow.interface.text_splitters.base import textsplitter_creator
from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.tools.base import tool_creator
from langflow.interface.tools.constants import FILE_TOOLS
from langflow.interface.vector_store.base import vectorstore_creator
from langflow.interface.wrappers.base import wrapper_creator
from langflow.utils import payload
class Graph:
def __init__(
self,
nodes: List[Dict[str, Union[str, Dict[str, Union[str, List[str]]]]]],
edges: List[Dict[str, str]],
) -> None:
self._nodes = nodes
self._edges = edges
self._build_graph()
def _build_graph(self) -> None:
self.nodes = self._build_nodes()
self.edges = self._build_edges()
for edge in self.edges:
edge.source.add_edge(edge)
edge.target.add_edge(edge)
# This is a hack to make sure that the LLM node is sent to
# the toolkit node
llm_node = None
for node in self.nodes:
node._build_params()
if isinstance(node, LLMNode):
llm_node = node
for node in self.nodes:
if isinstance(node, ToolkitNode):
node.params["llm"] = llm_node
# remove invalid nodes
self.nodes = [
node
for node in self.nodes
if self._validate_node(node)
or (len(self.nodes) == 1 and len(self.edges) == 0)
]
def _validate_node(self, node: Node) -> bool:
# All nodes that do not have edges are invalid
return len(node.edges) > 0
def get_node(self, node_id: str) -> Union[None, Node]:
return next((node for node in self.nodes if node.id == node_id), None)
def get_nodes_with_target(self, node: Node) -> List[Node]:
connected_nodes: List[Node] = [
edge.source for edge in self.edges if edge.target == node
]
return connected_nodes
def build(self) -> List[Node]:
# Get root node
root_node = payload.get_root_node(self)
if root_node is None:
raise ValueError("No root node found")
return root_node.build()
def get_node_neighbors(self, node: Node) -> Dict[Node, int]:
neighbors: Dict[Node, int] = {}
for edge in self.edges:
if edge.source == node:
neighbor = edge.target
if neighbor not in neighbors:
neighbors[neighbor] = 0
neighbors[neighbor] += 1
elif edge.target == node:
neighbor = edge.source
if neighbor not in neighbors:
neighbors[neighbor] = 0
neighbors[neighbor] += 1
return neighbors
def _build_edges(self) -> List[Edge]:
# Edge takes two nodes as arguments, so we need to build the nodes first
# and then build the edges
# if we can't find a node, we raise an error
edges: List[Edge] = []
for edge in self._edges:
source = self.get_node(edge["source"])
target = self.get_node(edge["target"])
if source is None:
raise ValueError(f"Source node {edge['source']} not found")
if target is None:
raise ValueError(f"Target node {edge['target']} not found")
edges.append(Edge(source, target))
return edges
def _get_node_class(self, node_type: str, node_lc_type: str) -> Type[Node]:
node_type_map: Dict[str, Type[Node]] = {
**{t: PromptNode for t in prompt_creator.to_list()},
**{t: AgentNode for t in agent_creator.to_list()},
**{t: ChainNode for t in chain_creator.to_list()},
**{t: ToolNode for t in tool_creator.to_list()},
**{t: ToolkitNode for t in toolkits_creator.to_list()},
**{t: WrapperNode for t in wrapper_creator.to_list()},
**{t: LLMNode for t in llm_creator.to_list()},
**{t: MemoryNode for t in memory_creator.to_list()},
**{t: EmbeddingNode for t in embedding_creator.to_list()},
**{t: VectorStoreNode for t in vectorstore_creator.to_list()},
**{t: DocumentLoaderNode for t in documentloader_creator.to_list()},
**{t: TextSplitterNode for t in textsplitter_creator.to_list()},
}
if node_type in FILE_TOOLS:
return FileToolNode
if node_type in node_type_map:
return node_type_map[node_type]
if node_lc_type in node_type_map:
return node_type_map[node_lc_type]
return Node
def _build_nodes(self) -> List[Node]:
nodes: List[Node] = []
for node in self._nodes:
node_data = node["data"]
node_type: str = node_data["type"] # type: ignore
node_lc_type: str = node_data["node"]["template"]["_type"] # type: ignore
NodeClass = self._get_node_class(node_type, node_lc_type)
nodes.append(NodeClass(node))
return nodes
def get_children_by_node_type(self, node: Node, node_type: str) -> List[Node]:
children = []
node_types = [node.data["type"]]
if "node" in node.data:
node_types += node.data["node"]["base_classes"]
if node_type in node_types:
children.append(node)
return children

View file

@ -0,0 +1,215 @@
from typing import Dict, Generator, List, Type, Union
from langflow.graph.edge.base import Edge
from langflow.graph.graph.constants import VERTEX_TYPE_MAP
from langflow.graph.vertex.base import Vertex
from langflow.graph.vertex.types import (
FileToolVertex,
LLMVertex,
ToolkitVertex,
)
from langflow.interface.tools.constants import FILE_TOOLS
from langflow.utils import payload
from langflow.utils.logger import logger
class Graph:
"""A class representing a graph of nodes and edges."""
def __init__(
self,
nodes: List[Dict[str, Union[str, Dict[str, Union[str, List[str]]]]]],
edges: List[Dict[str, str]],
) -> None:
self._nodes = nodes
self._edges = edges
self._build_graph()
@classmethod
def from_payload(cls, payload: Dict) -> "Graph":
"""
Creates a graph from a payload.
Args:
payload (Dict): The payload to create the graph from.˜`
Returns:
Graph: The created graph.
"""
if "data" in payload:
payload = payload["data"]
try:
nodes = payload["nodes"]
edges = payload["edges"]
return cls(nodes, edges)
except KeyError as exc:
raise ValueError(
f"Invalid payload. Expected keys 'nodes' and 'edges'. Found {list(payload.keys())}"
) from exc
def _build_graph(self) -> None:
"""Builds the graph from the nodes and edges."""
self.nodes = self._build_vertices()
self.edges = self._build_edges()
for edge in self.edges:
edge.source.add_edge(edge)
edge.target.add_edge(edge)
# This is a hack to make sure that the LLM node is sent to
# the toolkit node
self._build_node_params()
# remove invalid nodes
self._remove_invalid_nodes()
def _build_node_params(self) -> None:
"""Identifies and handles the LLM node within the graph."""
llm_node = None
for node in self.nodes:
node._build_params()
if isinstance(node, LLMVertex):
llm_node = node
if llm_node:
for node in self.nodes:
if isinstance(node, ToolkitVertex):
node.params["llm"] = llm_node
def _remove_invalid_nodes(self) -> None:
"""Removes invalid nodes from the graph."""
self.nodes = [
node
for node in self.nodes
if self._validate_node(node)
or (len(self.nodes) == 1 and len(self.edges) == 0)
]
def _validate_node(self, node: Vertex) -> bool:
"""Validates a node."""
# All nodes that do not have edges are invalid
return len(node.edges) > 0
def get_node(self, node_id: str) -> Union[None, Vertex]:
"""Returns a node by id."""
return next((node for node in self.nodes if node.id == node_id), None)
def get_nodes_with_target(self, node: Vertex) -> List[Vertex]:
"""Returns the nodes connected to a node."""
connected_nodes: List[Vertex] = [
edge.source for edge in self.edges if edge.target == node
]
return connected_nodes
def build(self) -> List[Vertex]:
"""Builds the graph."""
# Get root node
root_node = payload.get_root_node(self)
if root_node is None:
raise ValueError("No root node found")
return root_node.build()
def topological_sort(self) -> List[Vertex]:
"""
Performs a topological sort of the vertices in the graph.
Returns:
List[Vertex]: A list of vertices in topological order.
Raises:
ValueError: If the graph contains a cycle.
"""
# States: 0 = unvisited, 1 = visiting, 2 = visited
state = {node: 0 for node in self.nodes}
sorted_vertices = []
def dfs(node):
if state[node] == 1:
# We have a cycle
raise ValueError(
"Graph contains a cycle, cannot perform topological sort"
)
if state[node] == 0:
state[node] = 1
for edge in node.edges:
if edge.source == node:
dfs(edge.target)
state[node] = 2
sorted_vertices.append(node)
# Visit each node
for node in self.nodes:
if state[node] == 0:
dfs(node)
return list(reversed(sorted_vertices))
def generator_build(self) -> Generator:
"""Builds each vertex in the graph and yields it."""
sorted_vertices = self.topological_sort()
logger.info("Sorted vertices: %s", sorted_vertices)
yield from sorted_vertices
def get_node_neighbors(self, node: Vertex) -> Dict[Vertex, int]:
"""Returns the neighbors of a node."""
neighbors: Dict[Vertex, int] = {}
for edge in self.edges:
if edge.source == node:
neighbor = edge.target
if neighbor not in neighbors:
neighbors[neighbor] = 0
neighbors[neighbor] += 1
elif edge.target == node:
neighbor = edge.source
if neighbor not in neighbors:
neighbors[neighbor] = 0
neighbors[neighbor] += 1
return neighbors
def _build_edges(self) -> List[Edge]:
"""Builds the edges of the graph."""
# Edge takes two nodes as arguments, so we need to build the nodes first
# and then build the edges
# if we can't find a node, we raise an error
edges: List[Edge] = []
for edge in self._edges:
source = self.get_node(edge["source"])
target = self.get_node(edge["target"])
if source is None:
raise ValueError(f"Source node {edge['source']} not found")
if target is None:
raise ValueError(f"Target node {edge['target']} not found")
edges.append(Edge(source, target))
return edges
def _get_vertex_class(self, node_type: str, node_lc_type: str) -> Type[Vertex]:
"""Returns the node class based on the node type."""
if node_type in FILE_TOOLS:
return FileToolVertex
if node_type in VERTEX_TYPE_MAP:
return VERTEX_TYPE_MAP[node_type]
return (
VERTEX_TYPE_MAP[node_lc_type] if node_lc_type in VERTEX_TYPE_MAP else Vertex
)
def _build_vertices(self) -> List[Vertex]:
"""Builds the vertices of the graph."""
nodes: List[Vertex] = []
for node in self._nodes:
node_data = node["data"]
node_type: str = node_data["type"] # type: ignore
node_lc_type: str = node_data["node"]["template"]["_type"] # type: ignore
VertexClass = self._get_vertex_class(node_type, node_lc_type)
nodes.append(VertexClass(node))
return nodes
def get_children_by_node_type(self, node: Vertex, node_type: str) -> List[Vertex]:
"""Returns the children of a node based on the node type."""
children = []
node_types = [node.data["type"]]
if "node" in node.data:
node_types += node.data["node"]["base_classes"]
if node_type in node_types:
children.append(node)
return children

View file

@ -0,0 +1,49 @@
from langflow.graph.vertex.base import Vertex
from langflow.graph.vertex.types import (
AgentVertex,
ChainVertex,
DocumentLoaderVertex,
EmbeddingVertex,
LLMVertex,
MemoryVertex,
PromptVertex,
TextSplitterVertex,
ToolVertex,
ToolkitVertex,
VectorStoreVertex,
WrapperVertex,
)
from langflow.interface.agents.base import agent_creator
from langflow.interface.chains.base import chain_creator
from langflow.interface.document_loaders.base import documentloader_creator
from langflow.interface.embeddings.base import embedding_creator
from langflow.interface.llms.base import llm_creator
from langflow.interface.memories.base import memory_creator
from langflow.interface.prompts.base import prompt_creator
from langflow.interface.text_splitters.base import textsplitter_creator
from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.tools.base import tool_creator
from langflow.interface.vector_store.base import vectorstore_creator
from langflow.interface.wrappers.base import wrapper_creator
from typing import Dict, Type
DIRECT_TYPES = ["str", "bool", "code", "int", "float", "Any", "prompt"]
VERTEX_TYPE_MAP: Dict[str, Type[Vertex]] = {
**{t: PromptVertex for t in prompt_creator.to_list()},
**{t: AgentVertex for t in agent_creator.to_list()},
**{t: ChainVertex for t in chain_creator.to_list()},
**{t: ToolVertex for t in tool_creator.to_list()},
**{t: ToolkitVertex for t in toolkits_creator.to_list()},
**{t: WrapperVertex for t in wrapper_creator.to_list()},
**{t: LLMVertex for t in llm_creator.to_list()},
**{t: MemoryVertex for t in memory_creator.to_list()},
**{t: EmbeddingVertex for t in embedding_creator.to_list()},
**{t: VectorStoreVertex for t in vectorstore_creator.to_list()},
**{t: DocumentLoaderVertex for t in documentloader_creator.to_list()},
**{t: TextSplitterVertex for t in textsplitter_creator.to_list()},
}

View file

@ -1,4 +1,6 @@
import re
from typing import Any, Union
from langflow.interface.utils import extract_input_variables_from_prompt
def validate_prompt(prompt: str):
@ -14,6 +16,12 @@ def fix_prompt(prompt: str):
return prompt + " {input}"
def extract_input_variables_from_prompt(prompt: str) -> list[str]:
"""Extract input variables from prompt."""
return re.findall(r"{(.*?)}", prompt)
def flatten_list(list_of_lists: list[Union[list, Any]]) -> list:
"""Flatten list of lists."""
new_list = []
for item in list_of_lists:
if isinstance(item, list):
new_list.extend(item)
else:
new_list.append(item)
return new_list

View file

@ -1,27 +1,27 @@
# Description: Graph class for building a graph of nodes and edges
# Insights:
# - Defer prompts building to the last moment or when they have all the tools
# - Build each inner agent first, then build the outer agent
import contextlib
import inspect
import types
import warnings
from typing import Any, Dict, List, Optional
from langflow.cache import base as cache_utils
from langflow.graph.constants import DIRECT_TYPES
from langflow.cache import utils as cache_utils
from langflow.graph.vertex.constants import DIRECT_TYPES
from langflow.interface import loading
from langflow.interface.listing import ALL_TYPES_DICT
from langflow.utils.logger import logger
from langflow.utils.util import sync_to_async
class Node:
import contextlib
import inspect
import types
import warnings
from typing import Any, Dict, List, Optional
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from langflow.graph.edge.base import Edge
class Vertex:
def __init__(self, data: Dict, base_type: Optional[str] = None) -> None:
self.id: str = data["id"]
self._data = data
self.edges: List[Edge] = []
self.edges: List["Edge"] = []
self.base_type: Optional[str] = base_type
self._parse_data()
self._built_object = None
@ -48,12 +48,15 @@ class Node:
]
template_dict = self.data["node"]["template"]
self.node_type = (
self.data["type"] if "Tool" not in self.output else template_dict["_type"]
self.vertex_type = (
self.data["type"]
if "Tool" not in self.output or template_dict["_type"].islower()
else template_dict["_type"]
)
if self.base_type is None:
for base_type, value in ALL_TYPES_DICT.items():
if self.node_type in value:
if self.vertex_type in value:
self.base_type = base_type
break
@ -113,7 +116,7 @@ class Node:
if value["required"] and not edges:
# If a required parameter is not found, raise an error
raise ValueError(
f"Required input {key} for module {self.node_type} not found"
f"Required input {key} for module {self.vertex_type} not found"
)
elif value["list"]:
# If this is a list parameter, append all sources to a list
@ -128,7 +131,7 @@ class Node:
# so we need to check if value has value
new_value = value.get("value")
if new_value is None:
warnings.warn(f"Value for {key} in {self.node_type} is None. ")
warnings.warn(f"Value for {key} in {self.vertex_type} is None. ")
if value.get("type") == "int":
with contextlib.suppress(TypeError, ValueError):
new_value = int(new_value) # type: ignore
@ -148,12 +151,12 @@ class Node:
# and continue
# Another aspect is that the node_type is the class that we need to import
# and instantiate with these built params
logger.debug(f"Building {self.node_type}")
logger.debug(f"Building {self.vertex_type}")
# Build each node in the params dict
for key, value in self.params.copy().items():
# Check if Node or list of Nodes and not self
# to avoid recursion
if isinstance(value, Node):
if isinstance(value, Vertex):
if value == self:
del self.params[key]
continue
@ -174,10 +177,16 @@ class Node:
# turn result which is a function into a coroutine
# so that it can be awaited
self.params["coroutine"] = sync_to_async(result)
if isinstance(result, list):
# If the result is a list, then we need to extend the list
# with the result but first check if the key exists
# if it doesn't, then we need to create a new list
if isinstance(self.params[key], list):
self.params[key].extend(result)
self.params[key] = result
elif isinstance(value, list) and all(
isinstance(node, Node) for node in value
isinstance(node, Vertex) for node in value
):
self.params[key] = []
for node in value:
@ -193,17 +202,17 @@ class Node:
try:
self._built_object = loading.instantiate_class(
node_type=self.node_type,
node_type=self.vertex_type,
base_type=self.base_type,
params=self.params,
)
except Exception as exc:
raise ValueError(
f"Error building node {self.node_type}: {str(exc)}"
f"Error building node {self.vertex_type}: {str(exc)}"
) from exc
if self._built_object is None:
raise ValueError(f"Node type {self.node_type} not found")
raise ValueError(f"Node type {self.vertex_type} not found")
self._built = True
@ -220,57 +229,10 @@ class Node:
return f"Node(id={self.id}, data={self.data})"
def __eq__(self, __o: object) -> bool:
return self.id == __o.id if isinstance(__o, Node) else False
return self.id == __o.id if isinstance(__o, Vertex) else False
def __hash__(self) -> int:
return id(self)
def _built_object_repr(self):
return repr(self._built_object)
class Edge:
def __init__(self, source: "Node", target: "Node"):
self.source: "Node" = source
self.target: "Node" = target
self.validate_edge()
def validate_edge(self) -> None:
# Validate that the outputs of the source node are valid inputs
# for the target node
self.source_types = self.source.output
self.target_reqs = self.target.required_inputs + self.target.optional_inputs
# Both lists contain strings and sometimes a string contains the value we are
# looking for e.g. comgin_out=["Chain"] and target_reqs=["LLMChain"]
# so we need to check if any of the strings in source_types is in target_reqs
self.valid = any(
output in target_req
for output in self.source_types
for target_req in self.target_reqs
)
# Get what type of input the target node is expecting
self.matched_type = next(
(
output
for output in self.source_types
for target_req in self.target_reqs
if output in target_req
),
None,
)
no_matched_type = self.matched_type is None
if no_matched_type:
logger.debug(self.source_types)
logger.debug(self.target_reqs)
if no_matched_type:
raise ValueError(
f"Edge between {self.source.node_type} and {self.target.node_type} "
f"has no matched type"
)
def __repr__(self) -> str:
return (
f"Edge(source={self.source.id}, target={self.target.id}, valid={self.valid}"
f", matched_type={self.matched_type})"
)

View file

@ -1,22 +1,23 @@
from typing import Any, Dict, List, Optional, Union
from langflow.graph.base import Node
from langflow.graph.utils import extract_input_variables_from_prompt
from langflow.graph.vertex.base import Vertex
from langflow.graph.utils import flatten_list
from langflow.interface.utils import extract_input_variables_from_prompt
class AgentNode(Node):
class AgentVertex(Vertex):
def __init__(self, data: Dict):
super().__init__(data, base_type="agents")
self.tools: List[ToolNode] = []
self.chains: List[ChainNode] = []
self.tools: List[Union[ToolkitVertex, ToolVertex]] = []
self.chains: List[ChainVertex] = []
def _set_tools_and_chains(self) -> None:
for edge in self.edges:
source_node = edge.source
if isinstance(source_node, ToolNode):
if isinstance(source_node, (ToolVertex, ToolkitVertex)):
self.tools.append(source_node)
elif isinstance(source_node, ChainNode):
elif isinstance(source_node, ChainVertex):
self.chains.append(source_node)
def build(self, force: bool = False) -> Any:
@ -32,25 +33,130 @@ class AgentNode(Node):
self._build()
#! Cannot deepcopy VectorStore, VectorStoreRouter, or SQL agents
if self.node_type in ["VectorStoreAgent", "VectorStoreRouterAgent", "SQLAgent"]:
return self._built_object
return self._built_object
class ToolNode(Node):
class ToolVertex(Vertex):
def __init__(self, data: Dict):
super().__init__(data, base_type="tools")
class PromptNode(Node):
class LLMVertex(Vertex):
built_node_type = None
class_built_object = None
def __init__(self, data: Dict):
super().__init__(data, base_type="llms")
def build(self, force: bool = False) -> Any:
# LLM is different because some models might take up too much memory
# or time to load. So we only load them when we need them.ß
if self.vertex_type == self.built_node_type:
return self.class_built_object
if not self._built or force:
self._build()
self.built_node_type = self.vertex_type
self.class_built_object = self._built_object
# Avoid deepcopying the LLM
# that are loaded from a file
return self._built_object
class ToolkitVertex(Vertex):
def __init__(self, data: Dict):
super().__init__(data, base_type="toolkits")
class FileToolVertex(ToolVertex):
def __init__(self, data: Dict):
super().__init__(data)
class WrapperVertex(Vertex):
def __init__(self, data: Dict):
super().__init__(data, base_type="wrappers")
def build(self, force: bool = False) -> Any:
if not self._built or force:
if "headers" in self.params:
self.params["headers"] = eval(self.params["headers"])
self._build()
return self._built_object
class DocumentLoaderVertex(Vertex):
def __init__(self, data: Dict):
super().__init__(data, base_type="documentloaders")
def _built_object_repr(self):
# This built_object is a list of documents. Maybe we should
# show how many documents are in the list?
if self._built_object:
return f"""{self.vertex_type}({len(self._built_object)} documents)
Documents: {self._built_object[:3]}..."""
return f"{self.vertex_type}()"
class EmbeddingVertex(Vertex):
def __init__(self, data: Dict):
super().__init__(data, base_type="embeddings")
class VectorStoreVertex(Vertex):
def __init__(self, data: Dict):
super().__init__(data, base_type="vectorstores")
def _built_object_repr(self):
return "Vector stores can take time to build. It will build on the first query."
class MemoryVertex(Vertex):
def __init__(self, data: Dict):
super().__init__(data, base_type="memory")
class TextSplitterVertex(Vertex):
def __init__(self, data: Dict):
super().__init__(data, base_type="textsplitters")
def _built_object_repr(self):
# This built_object is a list of documents. Maybe we should
# show how many documents are in the list?
if self._built_object:
return f"""{self.vertex_type}({len(self._built_object)} documents)
\nDocuments: {self._built_object[:3]}..."""
return f"{self.vertex_type}()"
class ChainVertex(Vertex):
def __init__(self, data: Dict):
super().__init__(data, base_type="chains")
def build(
self,
force: bool = False,
tools: Optional[List[Union[ToolkitVertex, ToolVertex]]] = None,
) -> Any:
if not self._built or force:
# Check if the chain requires a PromptVertex
for key, value in self.params.items():
if isinstance(value, PromptVertex):
# Build the PromptVertex, passing the tools if available
self.params[key] = value.build(tools=tools, force=force)
self._build()
return self._built_object
class PromptVertex(Vertex):
def __init__(self, data: Dict):
super().__init__(data, base_type="prompts")
def build(
self,
force: bool = False,
tools: Optional[Union[List[Node], List[ToolNode]]] = None,
tools: Optional[List[Union[ToolkitVertex, ToolVertex]]] = None,
) -> Any:
if not self._built or force:
if (
@ -59,12 +165,16 @@ class PromptNode(Node):
):
self.params["input_variables"] = []
# Check if it is a ZeroShotPrompt and needs a tool
if "ShotPrompt" in self.node_type:
if "ShotPrompt" in self.vertex_type:
tools = (
[tool_node.build() for tool_node in tools]
if tools is not None
else []
)
# flatten the list of tools if it is a list of lists
# first check if it is a list
if tools and isinstance(tools, list) and isinstance(tools[0], list):
tools = flatten_list(tools)
self.params["tools"] = tools
prompt_params = [
key
@ -81,113 +191,3 @@ class PromptNode(Node):
self._build()
return self._built_object
class ChainNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="chains")
def build(
self,
force: bool = False,
tools: Optional[Union[List[Node], List[ToolNode]]] = None,
) -> Any:
if not self._built or force:
# Check if the chain requires a PromptNode
for key, value in self.params.items():
if isinstance(value, PromptNode):
# Build the PromptNode, passing the tools if available
self.params[key] = value.build(tools=tools, force=force)
self._build()
#! Cannot deepcopy SQLDatabaseChain
if self.node_type in ["SQLDatabaseChain"]:
return self._built_object
return self._built_object
class LLMNode(Node):
built_node_type = None
class_built_object = None
def __init__(self, data: Dict):
super().__init__(data, base_type="llms")
def build(self, force: bool = False) -> Any:
# LLM is different because some models might take up too much memory
# or time to load. So we only load them when we need them.ß
if self.node_type == self.built_node_type:
return self.class_built_object
if not self._built or force:
self._build()
self.built_node_type = self.node_type
self.class_built_object = self._built_object
# Avoid deepcopying the LLM
# that are loaded from a file
return self._built_object
class ToolkitNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="toolkits")
class FileToolNode(ToolNode):
def __init__(self, data: Dict):
super().__init__(data)
class WrapperNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="wrappers")
def build(self, force: bool = False) -> Any:
if not self._built or force:
if "headers" in self.params:
self.params["headers"] = eval(self.params["headers"])
self._build()
return self._built_object
class DocumentLoaderNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="documentloaders")
def _built_object_repr(self):
# This built_object is a list of documents. Maybe we should
# show how many documents are in the list?
if self._built_object:
return f"""{self.node_type}({len(self._built_object)} documents)
Documents: {self._built_object[:3]}..."""
return f"{self.node_type}()"
class EmbeddingNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="embeddings")
class VectorStoreNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="vectorstores")
def _built_object_repr(self):
return "Vector stores can take time to build. It will build on the first query."
class MemoryNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="memory")
class TextSplitterNode(Node):
def __init__(self, data: Dict):
super().__init__(data, base_type="textsplitters")
def _built_object_repr(self):
# This built_object is a list of documents. Maybe we should
# show how many documents are in the list?
if self._built_object:
return f"""{self.node_type}({len(self._built_object)} documents)\nDocuments: {self._built_object[:3]}..."""
return f"{self.node_type}()"

View file

@ -1,4 +1,3 @@
from abc import ABC
from typing import Any, List, Optional
from langchain import LLMChain
@ -33,24 +32,7 @@ from langchain.memory.chat_memory import BaseChatMemory
from langchain.sql_database import SQLDatabase
from langchain.tools.python.tool import PythonAstREPLTool
from langchain.tools.sql_database.prompt import QUERY_CHECKER
class CustomAgentExecutor(AgentExecutor, ABC):
"""Custom agent executor"""
@staticmethod
def function_name():
return "CustomAgentExecutor"
@classmethod
def initialize(cls, *args, **kwargs):
pass
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def run(self, *args, **kwargs):
return super().run(*args, **kwargs)
from langflow.interface.base import CustomAgentExecutor
class JsonAgent(CustomAgentExecutor):
@ -69,7 +51,7 @@ class JsonAgent(CustomAgentExecutor):
@classmethod
def from_toolkit_and_llm(cls, toolkit: JsonToolkit, llm: BaseLanguageModel):
tools = toolkit.get_tools()
tools = toolkit if isinstance(toolkit, list) else toolkit.get_tools()
tool_names = {tool.name for tool in tools}
prompt = ZeroShotAgent.create_prompt(
tools,
@ -142,7 +124,7 @@ class CSVAgent(CustomAgentExecutor):
class VectorStoreAgent(CustomAgentExecutor):
"""Vector Store agent"""
"""Vector store agent"""
@staticmethod
def function_name():
@ -199,7 +181,7 @@ class SQLAgent(CustomAgentExecutor):
def from_toolkit_and_llm(
cls, llm: BaseLanguageModel, database_uri: str, **kwargs: Any
):
"""Construct a sql agent from an LLM and tools."""
"""Construct an SQL agent from an LLM and tools."""
db = SQLDatabase.from_uri(database_uri)
toolkit = SQLDatabaseToolkit(db=db, llm=llm)
@ -278,7 +260,11 @@ class VectorStoreRouterAgent(CustomAgentExecutor):
):
"""Construct a vector store router agent from an LLM and tools."""
tools = vectorstoreroutertoolkit.get_tools()
tools = (
vectorstoreroutertoolkit
if isinstance(vectorstoreroutertoolkit, list)
else vectorstoreroutertoolkit.get_tools()
)
prompt = ZeroShotAgent.create_prompt(tools, prefix=VECTORSTORE_ROUTER_PREFIX)
llm_chain = LLMChain(
llm=llm,

View file

@ -1,6 +1,7 @@
from abc import ABC, abstractmethod
from typing import Any, Dict, List, Optional, Type, Union
from langchain.chains.base import Chain
from langchain.agents import AgentExecutor
from pydantic import BaseModel
from langflow.template.field.base import TemplateField
@ -102,3 +103,21 @@ class CustomChain(Chain, ABC):
def run(self, *args, **kwargs):
return super().run(*args, **kwargs)
class CustomAgentExecutor(AgentExecutor, ABC):
"""Custom chain"""
@staticmethod
def function_name():
return "CustomChain"
@classmethod
def initialize(cls, *args, **kwargs):
pass
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def run(self, *args, **kwargs):
return super().run(*args, **kwargs)

View file

@ -20,7 +20,10 @@ class ChainCreator(LangChainTypeCreator):
return ChainFrontendNode
#! We need to find a better solution for this
from_method_nodes = {"ConversationalRetrievalChain": "from_llm"}
from_method_nodes = {
"ConversationalRetrievalChain": "from_llm",
"LLMCheckerChain": "from_llm",
}
@property
def type_to_loader_dict(self) -> Dict:

View file

@ -2,7 +2,6 @@ import inspect
from typing import Any
from langchain import (
chains,
document_loaders,
embeddings,
llms,
@ -11,7 +10,8 @@ from langchain import (
text_splitter,
)
from langchain.agents import agent_toolkits
from langchain.chat_models import ChatOpenAI
from langchain.chat_models import AzureChatOpenAI, ChatOpenAI
from langchain.chat_models import ChatAnthropic
from langflow.interface.importing.utils import import_class
from langflow.interface.agents.custom import CUSTOM_AGENTS
@ -19,13 +19,10 @@ from langflow.interface.chains.custom import CUSTOM_CHAINS
# LLMs
llm_type_to_cls_dict = llms.type_to_cls_dict
llm_type_to_cls_dict["anthropic-chat"] = ChatAnthropic # type: ignore
llm_type_to_cls_dict["azure-chat"] = AzureChatOpenAI # type: ignore
llm_type_to_cls_dict["openai-chat"] = ChatOpenAI # type: ignore
# Chains
chain_type_to_cls_dict: dict[str, Any] = {
chain_name: import_class(f"langchain.chains.{chain_name}")
for chain_name in chains.__all__
}
# Toolkits
toolkit_type_to_loader_dict: dict[str, Any] = {

View file

@ -1,30 +1,20 @@
from typing import Dict, List, Optional
from typing import Dict, List, Optional, Type
from langflow.interface.base import LangChainTypeCreator
from langflow.template.frontend_node.documentloaders import DocumentLoaderFrontNode
from langflow.interface.custom_lists import documentloaders_type_to_cls_dict
from langflow.settings import settings
from langflow.utils.logger import logger
from langflow.utils.util import build_template_from_class
def build_file_path_template(
suffixes: list, fileTypes: list, name: str = "file_path"
) -> Dict:
"""Build a file path template for a document loader."""
return {
"type": "file",
"required": True,
"show": True,
"name": name,
"value": "",
"suffixes": suffixes,
"fileTypes": fileTypes,
}
class DocumentLoaderCreator(LangChainTypeCreator):
type_name: str = "documentloaders"
@property
def frontend_node_class(self) -> Type[DocumentLoaderFrontNode]:
return DocumentLoaderFrontNode
@property
def type_to_loader_dict(self) -> Dict:
return documentloaders_type_to_cls_dict
@ -32,106 +22,7 @@ class DocumentLoaderCreator(LangChainTypeCreator):
def get_signature(self, name: str) -> Optional[Dict]:
"""Get the signature of a document loader."""
try:
signature = build_template_from_class(
name, documentloaders_type_to_cls_dict
)
file_path_templates = {
"AirbyteJSONLoader": build_file_path_template(
suffixes=[".json"], fileTypes=["json"]
),
"CoNLLULoader": build_file_path_template(
suffixes=[".csv"], fileTypes=["csv"]
),
"CSVLoader": build_file_path_template(
suffixes=[".csv"], fileTypes=["csv"]
),
"UnstructuredEmailLoader": build_file_path_template(
suffixes=[".eml"], fileTypes=["eml"]
),
"EverNoteLoader": build_file_path_template(
suffixes=[".xml"], fileTypes=["xml"]
),
"FacebookChatLoader": build_file_path_template(
suffixes=[".json"], fileTypes=["json"]
),
"GutenbergLoader": build_file_path_template(
suffixes=[".txt"], fileTypes=["txt"]
),
"BSHTMLLoader": build_file_path_template(
suffixes=[".html"], fileTypes=["html"]
),
"UnstructuredHTMLLoader": build_file_path_template(
suffixes=[".html"], fileTypes=["html"]
),
"UnstructuredImageLoader": build_file_path_template(
suffixes=[".jpg", ".jpeg", ".png", ".gif", ".bmp"],
fileTypes=["jpg", "jpeg", "png", "gif", "bmp"],
),
"UnstructuredMarkdownLoader": build_file_path_template(
suffixes=[".md"], fileTypes=["md"]
),
"PyPDFLoader": build_file_path_template(
suffixes=[".pdf"], fileTypes=["pdf"]
),
"UnstructuredPowerPointLoader": build_file_path_template(
suffixes=[".pptx", ".ppt"], fileTypes=["pptx", "ppt"]
),
"SRTLoader": build_file_path_template(
suffixes=[".srt"], fileTypes=["srt"]
),
"TelegramChatLoader": build_file_path_template(
suffixes=[".json"], fileTypes=["json"]
),
"TextLoader": build_file_path_template(
suffixes=[".txt"], fileTypes=["txt"]
),
"UnstructuredWordDocumentLoader": build_file_path_template(
suffixes=[".docx", ".doc"], fileTypes=["docx", "doc"]
),
"SlackDirectoryLoader": build_file_path_template(
suffixes=[".zip"], fileTypes=["zip"]
),
}
if name in file_path_templates:
signature["template"]["file_path"] = file_path_templates[name]
elif name in {
"WebBaseLoader",
"AZLyricsLoader",
"CollegeConfidentialLoader",
"HNLoader",
"IFixitLoader",
"IMSDbLoader",
}:
signature["template"]["web_path"] = {
"type": "str",
"required": True,
"show": True,
"name": "web_path",
"value": "",
"display_name": "Web Page",
}
elif name in {"GitbookLoader"}:
signature["template"]["web_page"] = {
"type": "str",
"required": True,
"show": True,
"name": "web_page",
"value": "",
"display_name": "Web Page",
}
elif name in {"ReadTheDocsLoader", "NotionDirectoryLoader"}:
signature["template"]["path"] = {
"type": "str",
"required": True,
"show": True,
"name": "path",
"value": "",
"display_name": "Web Page",
}
return signature
return build_template_from_class(name, documentloaders_type_to_cls_dict)
except ValueError as exc:
raise ValueError(f"Documment Loader {name} not found") from exc
except AttributeError as exc:

View file

@ -20,7 +20,7 @@ from langchain.llms.loading import load_llm_from_config
from pydantic import ValidationError
from langflow.interface.custom_lists import CUSTOM_NODES
from langflow.interface.importing.utils import import_by_type
from langflow.interface.importing.utils import get_function, import_by_type
from langflow.interface.toolkits.base import toolkits_creator
from langflow.interface.chains.base import chain_creator
from langflow.interface.types import get_type_list
@ -114,6 +114,10 @@ def instantiate_tool(node_type, class_object, params):
if node_type == "JsonSpec":
params["dict_"] = load_file_into_dict(params.pop("path"))
return class_object(**params)
elif node_type == "PythonFunctionTool":
params["func"] = get_function(params.get("code"))
return class_object(**params)
# For backward compatibility
elif node_type == "PythonFunction":
function_string = params["code"]
if isinstance(function_string, str):
@ -126,8 +130,11 @@ def instantiate_tool(node_type, class_object, params):
def instantiate_toolkit(node_type, class_object, params):
loaded_toolkit = class_object(**params)
if toolkits_creator.has_create_function(node_type):
return load_toolkits_executor(node_type, loaded_toolkit, params)
# Commenting this out for now to use toolkits as normal tools
# if toolkits_creator.has_create_function(node_type):
# return load_toolkits_executor(node_type, loaded_toolkit, params)
if isinstance(loaded_toolkit, BaseToolkit):
return loaded_toolkit.get_tools()
return loaded_toolkit
@ -151,7 +158,6 @@ def instantiate_vectorstore(class_object, params):
"The source you provided did not load correctly or was empty."
"This may cause an error in the vectorstore."
)
# Chroma requires all metadata values to not be None
if class_object.__name__ == "Chroma":
for doc in params["documents"]:

View file

@ -3,7 +3,7 @@ from typing import Dict, List, Optional, Type
from langchain.prompts import PromptTemplate
from pydantic import root_validator
from langflow.graph.utils import extract_input_variables_from_prompt
from langflow.interface.utils import extract_input_variables_from_prompt
# Steps to create a BaseCustomPrompt:
# 1. Create a prompt template that endes with:
@ -71,7 +71,3 @@ Human: {input}
CUSTOM_PROMPTS: Dict[str, Type[BaseCustomPrompt]] = {
"SeriesCharacterPrompt": SeriesCharacterPrompt
}
if __name__ == "__main__":
prompt = SeriesCharacterPrompt(character="Harry Potter", series="Harry Potter")
print(prompt.template)

View file

@ -1,38 +1,8 @@
import contextlib
import io
from typing import Any, Dict, List, Tuple
from langchain.schema import AgentAction
from langflow.api.callback import AsyncStreamingLLMCallbackHandler, StreamingLLMCallbackHandler # type: ignore
from langflow.cache.base import compute_dict_hash, load_cache, memoize_dict
from langflow.graph.graph import Graph
from langflow.cache.utils import memoize_dict
from langflow.graph import Graph
from langflow.utils.logger import logger
def load_langchain_object(data_graph, is_first_message=False):
"""
Load langchain object from cache if it exists, otherwise build it.
"""
computed_hash = compute_dict_hash(data_graph)
if is_first_message:
langchain_object = build_langchain_object(data_graph)
else:
logger.debug("Loading langchain object from cache")
langchain_object = load_cache(computed_hash)
return computed_hash, langchain_object
def load_or_build_langchain_object(data_graph, is_first_message=False):
"""
Load langchain object from cache if it exists, otherwise build it.
"""
if is_first_message:
build_langchain_object_with_caching.clear_cache()
return build_langchain_object_with_caching(data_graph)
@memoize_dict(maxsize=10)
def build_langchain_object_with_caching(data_graph):
"""
@ -40,16 +10,10 @@ def build_langchain_object_with_caching(data_graph):
"""
logger.debug("Building langchain object")
graph = build_graph(data_graph)
graph = Graph.from_payload(data_graph)
return graph.build()
def build_graph(data_graph):
nodes = data_graph["nodes"]
edges = data_graph["edges"]
return Graph(nodes, edges)
def build_langchain_object(data_graph):
"""
Build langchain object from data_graph.
@ -66,29 +30,6 @@ def build_langchain_object(data_graph):
return graph.build()
def process_graph_cached(data_graph: Dict[str, Any], message: str):
"""
Process graph by extracting input variables and replacing ZeroShotPrompt
with PromptTemplate,then run the graph and return the result and thought.
"""
# Load langchain object
is_first_message = len(data_graph.get("chatHistory", [])) == 0
langchain_object = load_or_build_langchain_object(data_graph, is_first_message)
logger.debug("Loaded langchain object")
if langchain_object is None:
# Raise user facing error
raise ValueError(
"There was an error loading the langchain_object. Please, check all the nodes and try again."
)
# Generate result and thought
logger.debug("Generating result and thought")
result, thought = get_result_and_thought(langchain_object, message)
logger.debug("Generated result and thought")
return {"result": str(result), "thought": thought.strip()}
def get_memory_key(langchain_object):
"""
Given a LangChain object, this function retrieves the current memory key from the object's memory attribute.
@ -124,147 +65,3 @@ def update_memory_keys(langchain_object, possible_new_mem_key):
langchain_object.memory.input_key = input_key
langchain_object.memory.output_key = output_key
langchain_object.memory.memory_key = possible_new_mem_key
def fix_memory_inputs(langchain_object):
"""
Given a LangChain object, this function checks if it has a memory attribute and if that memory key exists in the
object's input variables. If so, it does nothing. Otherwise, it gets a possible new memory key using the
get_memory_key function and updates the memory keys using the update_memory_keys function.
"""
if hasattr(langchain_object, "memory") and langchain_object.memory is not None:
try:
if langchain_object.memory.memory_key in langchain_object.input_variables:
return
except AttributeError:
input_variables = (
langchain_object.prompt.input_variables
if hasattr(langchain_object, "prompt")
else langchain_object.input_keys
)
if langchain_object.memory.memory_key in input_variables:
return
possible_new_mem_key = get_memory_key(langchain_object)
if possible_new_mem_key is not None:
update_memory_keys(langchain_object, possible_new_mem_key)
async def get_result_and_steps(langchain_object, message: str, **kwargs):
"""Get result and thought from extracted json"""
try:
if hasattr(langchain_object, "verbose"):
langchain_object.verbose = True
chat_input = None
memory_key = ""
if hasattr(langchain_object, "memory") and langchain_object.memory is not None:
memory_key = langchain_object.memory.memory_key
if hasattr(langchain_object, "input_keys"):
for key in langchain_object.input_keys:
if key not in [memory_key, "chat_history"]:
chat_input = {key: message}
else:
chat_input = message # type: ignore
if hasattr(langchain_object, "return_intermediate_steps"):
# https://github.com/hwchase17/langchain/issues/2068
# Deactivating until we have a frontend solution
# to display intermediate steps
langchain_object.return_intermediate_steps = True
fix_memory_inputs(langchain_object)
try:
async_callbacks = [AsyncStreamingLLMCallbackHandler(**kwargs)]
output = await langchain_object.acall(chat_input, callbacks=async_callbacks)
except Exception as exc:
# make the error message more informative
logger.debug(f"Error: {str(exc)}")
sync_callbacks = [StreamingLLMCallbackHandler(**kwargs)]
output = langchain_object(chat_input, callbacks=sync_callbacks)
intermediate_steps = (
output.get("intermediate_steps", []) if isinstance(output, dict) else []
)
result = (
output.get(langchain_object.output_keys[0])
if isinstance(output, dict)
else output
)
thought = format_actions(intermediate_steps) if intermediate_steps else ""
except Exception as exc:
raise ValueError(f"Error: {str(exc)}") from exc
return result, thought
def get_result_and_thought(langchain_object, message: str):
"""Get result and thought from extracted json"""
try:
if hasattr(langchain_object, "verbose"):
langchain_object.verbose = True
chat_input = None
memory_key = ""
if hasattr(langchain_object, "memory") and langchain_object.memory is not None:
memory_key = langchain_object.memory.memory_key
if hasattr(langchain_object, "input_keys"):
for key in langchain_object.input_keys:
if key not in [memory_key, "chat_history"]:
chat_input = {key: message}
else:
chat_input = message # type: ignore
if hasattr(langchain_object, "return_intermediate_steps"):
# https://github.com/hwchase17/langchain/issues/2068
# Deactivating until we have a frontend solution
# to display intermediate steps
langchain_object.return_intermediate_steps = False
fix_memory_inputs(langchain_object)
with io.StringIO() as output_buffer, contextlib.redirect_stdout(output_buffer):
try:
# if hasattr(langchain_object, "acall"):
# output = await langchain_object.acall(chat_input)
# else:
output = langchain_object(chat_input)
except ValueError as exc:
# make the error message more informative
logger.debug(f"Error: {str(exc)}")
output = langchain_object.run(chat_input)
intermediate_steps = (
output.get("intermediate_steps", []) if isinstance(output, dict) else []
)
result = (
output.get(langchain_object.output_keys[0])
if isinstance(output, dict)
else output
)
if intermediate_steps:
thought = format_actions(intermediate_steps)
else:
thought = output_buffer.getvalue()
except Exception as exc:
raise ValueError(f"Error: {str(exc)}") from exc
return result, thought
def format_actions(actions: List[Tuple[AgentAction, str]]) -> str:
"""Format a list of (AgentAction, answer) tuples into a string."""
output = []
for action, answer in actions:
log = action.log
tool = action.tool
tool_input = action.tool_input
output.append(f"Log: {log}")
if "Action" not in log and "Action Input" not in log:
output.append(f"Tool: {tool}")
output.append(f"Tool Input: {tool_input}")
output.append(f"Answer: {answer}")
output.append("") # Add a blank line
return "\n".join(output)

View file

@ -1,6 +1,7 @@
from typing import Dict, List, Optional
from typing import Dict, List, Optional, Type
from langflow.interface.base import LangChainTypeCreator
from langflow.template.frontend_node.textsplitters import TextSplittersFrontendNode
from langflow.interface.custom_lists import textsplitter_type_to_cls_dict
from langflow.settings import settings
from langflow.utils.logger import logger
@ -10,6 +11,10 @@ from langflow.utils.util import build_template_from_class
class TextSplitterCreator(LangChainTypeCreator):
type_name: str = "textsplitters"
@property
def frontend_node_class(self) -> Type[TextSplittersFrontendNode]:
return TextSplittersFrontendNode
@property
def type_to_loader_dict(self) -> Dict:
return textsplitter_type_to_cls_dict
@ -17,43 +22,7 @@ class TextSplitterCreator(LangChainTypeCreator):
def get_signature(self, name: str) -> Optional[Dict]:
"""Get the signature of a text splitter."""
try:
signature = build_template_from_class(name, textsplitter_type_to_cls_dict)
signature["template"]["documents"] = {
"type": "BaseLoader",
"required": True,
"show": True,
"name": "documents",
}
signature["template"]["separator"] = {
"type": "str",
"required": True,
"show": True,
"value": ".",
"name": "separator",
"display_name": "Separator",
}
signature["template"]["chunk_size"] = {
"type": "int",
"required": True,
"show": True,
"value": 1000,
"name": "chunk_size",
"display_name": "Chunk Size",
}
signature["template"]["chunk_overlap"] = {
"type": "int",
"required": True,
"show": True,
"value": 200,
"name": "chunk_overlap",
"display_name": "Chunk Overlap",
}
return signature
return build_template_from_class(name, textsplitter_type_to_cls_dict)
except ValueError as exc:
raise ValueError(f"Text Splitter {name} not found") from exc
except AttributeError as exc:

View file

@ -42,24 +42,27 @@ class ToolkitCreator(LangChainTypeCreator):
def get_signature(self, name: str) -> Optional[Dict]:
try:
return build_template_from_class(name, self.type_to_loader_dict)
template = build_template_from_class(name, self.type_to_loader_dict)
# add Tool to base_classes
if "toolkit" in name.lower() and template:
template["base_classes"].append("Tool")
return template
except ValueError as exc:
raise ValueError("Prompt not found") from exc
raise ValueError("Toolkit not found") from exc
except AttributeError as exc:
logger.error(f"Prompt {name} not loaded: {exc}")
logger.error(f"Toolkit {name} not loaded: {exc}")
return None
def to_list(self) -> List[str]:
return list(self.type_to_loader_dict.keys())
def get_create_function(self, name: str) -> Callable:
if loader_name := self.create_functions.get(name, None):
# import loader
if loader_name := self.create_functions.get(name):
return import_module(
f"from langchain.agents.agent_toolkits import {loader_name[0]}"
)
else:
raise ValueError("Loader not found")
raise ValueError("Toolkit not found")
def has_create_function(self, name: str) -> bool:
# check if the function list is not empty

View file

@ -71,7 +71,8 @@ class ToolCreator(LangChainTypeCreator):
for tool, tool_fcn in ALL_TOOLS_NAMES.items():
tool_params = get_tool_params(tool_fcn)
tool_name = tool_params.get("name", tool)
tool_name = tool_params.get("name") or tool
if tool_name in settings.tools or settings.dev:
if tool_name == "JsonSpec":

View file

@ -9,10 +9,14 @@ from langchain.agents.load_tools import (
from langchain.tools.json.tool import JsonSpec
from langflow.interface.importing.utils import import_class
from langflow.interface.tools.custom import PythonFunction
from langflow.interface.tools.custom import PythonFunctionTool, PythonFunction
FILE_TOOLS = {"JsonSpec": JsonSpec}
CUSTOM_TOOLS = {"Tool": Tool, "PythonFunction": PythonFunction}
CUSTOM_TOOLS = {
"Tool": Tool,
"PythonFunctionTool": PythonFunctionTool,
"PythonFunction": PythonFunction,
}
OTHER_TOOLS = {tool: import_class(f"langchain.tools.{tool}") for tool in tools.__all__}

View file

@ -1,8 +1,10 @@
from typing import Callable, Optional
from langflow.interface.importing.utils import get_function
from pydantic import BaseModel, validator
from langflow.utils import validate
from langchain.agents.tools import Tool
class Function(BaseModel):
@ -31,6 +33,21 @@ class Function(BaseModel):
return validate.create_function(self.code, function_name)
class PythonFunctionTool(Function, Tool):
"""Python function"""
name: str = "Custom Tool"
description: str
code: str
def ___init__(self, name: str, description: str, code: str):
self.name = name
self.description = description
self.code = code
self.func = get_function(self.code)
super().__init__(name=name, description=description, func=self.func)
class PythonFunction(Function):
"""Python function"""

View file

@ -1,19 +1,23 @@
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from langflow.api.chat import router as chat_router
from langflow.api.endpoints import router as endpoints_router
from langflow.api.validate import router as validate_router
from langflow.api import router
from langflow.database.base import create_db_and_tables
def create_app():
"""Create the FastAPI app and include the router."""
app = FastAPI()
origins = [
"*",
]
@app.get("/health")
def get_health():
return {"status": "OK"}
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
@ -22,9 +26,8 @@ def create_app():
allow_headers=["*"],
)
app.include_router(endpoints_router)
app.include_router(validate_router)
app.include_router(chat_router)
app.include_router(router)
app.on_event("startup")(create_db_and_tables)
return app

View file

@ -0,0 +1,55 @@
from langflow.api.v1.callback import (
AsyncStreamingLLMCallbackHandler,
StreamingLLMCallbackHandler,
)
from langflow.processing.process import fix_memory_inputs, format_actions
from langflow.utils.logger import logger
async def get_result_and_steps(langchain_object, message: str, **kwargs):
"""Get result and thought from extracted json"""
try:
if hasattr(langchain_object, "verbose"):
langchain_object.verbose = True
chat_input = None
memory_key = ""
if hasattr(langchain_object, "memory") and langchain_object.memory is not None:
memory_key = langchain_object.memory.memory_key
if hasattr(langchain_object, "input_keys"):
for key in langchain_object.input_keys:
if key not in [memory_key, "chat_history"]:
chat_input = {key: message}
else:
chat_input = message # type: ignore
if hasattr(langchain_object, "return_intermediate_steps"):
# https://github.com/hwchase17/langchain/issues/2068
# Deactivating until we have a frontend solution
# to display intermediate steps
langchain_object.return_intermediate_steps = True
fix_memory_inputs(langchain_object)
try:
async_callbacks = [AsyncStreamingLLMCallbackHandler(**kwargs)]
output = await langchain_object.acall(chat_input, callbacks=async_callbacks)
except Exception as exc:
# make the error message more informative
logger.debug(f"Error: {str(exc)}")
sync_callbacks = [StreamingLLMCallbackHandler(**kwargs)]
output = langchain_object(chat_input, callbacks=sync_callbacks)
intermediate_steps = (
output.get("intermediate_steps", []) if isinstance(output, dict) else []
)
result = (
output.get(langchain_object.output_keys[0])
if isinstance(output, dict)
else output
)
thought = format_actions(intermediate_steps) if intermediate_steps else ""
except Exception as exc:
raise ValueError(f"Error: {str(exc)}") from exc
return result, thought

View file

@ -1,11 +1,19 @@
import contextlib
import io
from langchain.schema import AgentAction
import json
from langflow.interface.run import (
build_langchain_object_with_caching,
get_memory_key,
update_memory_keys,
)
from langflow.utils.logger import logger
from langflow.graph import Graph
from typing import Any, Dict, List, Tuple
def fix_memory_inputs(langchain_object):
"""
Given a LangChain object, this function checks if it has a memory attribute and if that memory key exists in the
@ -30,6 +38,109 @@ def fix_memory_inputs(langchain_object):
update_memory_keys(langchain_object, possible_new_mem_key)
def format_actions(actions: List[Tuple[AgentAction, str]]) -> str:
"""Format a list of (AgentAction, answer) tuples into a string."""
output = []
for action, answer in actions:
log = action.log
tool = action.tool
tool_input = action.tool_input
output.append(f"Log: {log}")
if "Action" not in log and "Action Input" not in log:
output.append(f"Tool: {tool}")
output.append(f"Tool Input: {tool_input}")
output.append(f"Answer: {answer}")
output.append("") # Add a blank line
return "\n".join(output)
def get_result_and_thought(langchain_object, message: str):
"""Get result and thought from extracted json"""
try:
if hasattr(langchain_object, "verbose"):
langchain_object.verbose = True
chat_input = None
memory_key = ""
if hasattr(langchain_object, "memory") and langchain_object.memory is not None:
memory_key = langchain_object.memory.memory_key
if hasattr(langchain_object, "input_keys"):
for key in langchain_object.input_keys:
if key not in [memory_key, "chat_history"]:
chat_input = {key: message}
else:
chat_input = message # type: ignore
if hasattr(langchain_object, "return_intermediate_steps"):
# https://github.com/hwchase17/langchain/issues/2068
# Deactivating until we have a frontend solution
# to display intermediate steps
langchain_object.return_intermediate_steps = False
fix_memory_inputs(langchain_object)
with io.StringIO() as output_buffer, contextlib.redirect_stdout(output_buffer):
try:
# if hasattr(langchain_object, "acall"):
# output = await langchain_object.acall(chat_input)
# else:
output = langchain_object(chat_input)
except ValueError as exc:
# make the error message more informative
logger.debug(f"Error: {str(exc)}")
output = langchain_object.run(chat_input)
intermediate_steps = (
output.get("intermediate_steps", []) if isinstance(output, dict) else []
)
result = (
output.get(langchain_object.output_keys[0])
if isinstance(output, dict)
else output
)
if intermediate_steps:
thought = format_actions(intermediate_steps)
else:
thought = output_buffer.getvalue()
except Exception as exc:
raise ValueError(f"Error: {str(exc)}") from exc
return result, thought
def load_or_build_langchain_object(data_graph, is_first_message=False):
"""
Load langchain object from cache if it exists, otherwise build it.
"""
if is_first_message:
build_langchain_object_with_caching.clear_cache()
return build_langchain_object_with_caching(data_graph)
def process_graph_cached(data_graph: Dict[str, Any], message: str):
"""
Process graph by extracting input variables and replacing ZeroShotPrompt
with PromptTemplate,then run the graph and return the result and thought.
"""
# Load langchain object
is_first_message = len(data_graph.get("chatHistory", [])) == 0
langchain_object = load_or_build_langchain_object(data_graph, is_first_message)
logger.debug("Loaded langchain object")
if langchain_object is None:
# Raise user facing error
raise ValueError(
"There was an error loading the langchain_object. Please, check all the nodes and try again."
)
# Generate result and thought
logger.debug("Generating result and thought")
result, thought = get_result_and_thought(langchain_object, message)
logger.debug("Generated result and thought")
return {"result": str(result), "thought": thought.strip()}
def load_flow_from_json(path: str, build=True):
"""Load flow from json file"""
# This is done to avoid circular imports
@ -59,3 +170,28 @@ def load_flow_from_json(path: str, build=True):
fix_memory_inputs(langchain_object)
return langchain_object
return graph
def process_tweaks(graph_data: Dict, tweaks: Dict):
"""This function is used to tweak the graph data using the node id and the tweaks dict"""
# the tweaks dict is a dict of dicts
# the key is the node id and the value is a dict of the tweaks
# the dict of tweaks contains the name of a certain parameter and the value to be tweaked
# We need to process the graph data to add the tweaks
if "data" not in graph_data and "nodes" in graph_data:
nodes = graph_data["nodes"]
else:
nodes = graph_data["data"]["nodes"]
for node in nodes:
node_id = node["id"]
if node_id in tweaks:
node_tweaks = tweaks[node_id]
template_data = node["data"]["node"]["template"]
for tweak_name, tweake_value in node_tweaks.items():
if tweak_name in template_data:
template_data[tweak_name]["value"] = tweake_value
print(
f"Something changed in node {node_id} with tweak {tweak_name} and value {tweake_value}"
)
return graph_data

View file

@ -20,10 +20,13 @@ class Settings(BaseSettings):
textsplitters: List[str] = []
utilities: List[str] = []
dev: bool = False
database_url: str = "sqlite:///./langflow.db"
remove_api_keys: bool = False
class Config:
validate_assignment = True
extra = "ignore"
env_prefix = "LANGFLOW_"
@root_validator(allow_reuse=True)
def validate_lists(cls, values):
@ -46,6 +49,11 @@ class Settings(BaseSettings):
self.utilities = new_settings.utilities or []
self.dev = dev
def update_settings(self, **kwargs):
for key, value in kwargs.items():
if hasattr(self, key):
setattr(self, key, value)
def save_settings_to_yaml(settings: Settings, file_path: str):
with open(file_path, "w") as f:

View file

@ -7,6 +7,8 @@ from langflow.template.frontend_node import (
prompts,
tools,
vectorstores,
documentloaders,
textsplitters,
)
__all__ = [
@ -18,4 +20,6 @@ __all__ = [
"llms",
"prompts",
"vectorstores",
"documentloaders",
"textsplitters",
]

View file

@ -37,7 +37,7 @@ class SQLAgentNode(FrontendNode):
),
],
)
description: str = """Construct a sql agent from an LLM and tools."""
description: str = """Construct an SQL agent from an LLM and tools."""
base_classes: list[str] = ["AgentExecutor"]
def to_dict(self):
@ -146,7 +146,7 @@ class CSVAgentNode(FrontendNode):
),
],
)
description: str = """Construct a json agent from a CSV and tools."""
description: str = """Construct a CSV agent from a CSV and tools."""
base_classes: list[str] = ["AgentExecutor"]
def to_dict(self):
@ -155,8 +155,9 @@ class CSVAgentNode(FrontendNode):
class InitializeAgentNode(FrontendNode):
name: str = "AgentInitializer"
display_name: str = "AgentInitializer"
template: Template = Template(
type_name="AgentInitializer",
type_name="initialize_agent",
fields=[
TemplateField(
field_type="str",
@ -194,7 +195,7 @@ class InitializeAgentNode(FrontendNode):
),
],
)
description: str = """Construct a json agent from an LLM and tools."""
description: str = """Construct a zero shot agent from an LLM and tools."""
base_classes: list[str] = ["AgentExecutor", "function"]
def to_dict(self):

View file

@ -14,6 +14,7 @@ class FrontendNode(BaseModel):
description: str
base_classes: List[str]
name: str = ""
display_name: str = ""
def to_dict(self) -> dict:
return {
@ -21,7 +22,8 @@ class FrontendNode(BaseModel):
"template": self.template.to_dict(self.format_field),
"description": self.description,
"base_classes": self.base_classes,
}
"display_name": self.display_name or self.name,
},
}
def add_extra_fields(self) -> None:
@ -120,14 +122,30 @@ class FrontendNode(BaseModel):
) -> None:
"""Handles specific field values for certain fields."""
if key == "headers":
field.value = """{'Authorization':
'Bearer <token>'}"""
if name == "OpenAI" and key == "model_name":
field.options = constants.OPENAI_MODELS
field.is_list = True
elif name == "ChatOpenAI" and key == "model_name":
field.options = constants.CHAT_OPENAI_MODELS
field.value = """{'Authorization': 'Bearer <token>'}"""
FrontendNode._handle_model_specific_field_values(field, key, name)
FrontendNode._handle_api_key_specific_field_values(field, key, name)
@staticmethod
def _handle_model_specific_field_values(
field: TemplateField, key: str, name: Optional[str] = None
) -> None:
"""Handles specific field values related to models."""
model_dict = {
"OpenAI": constants.OPENAI_MODELS,
"ChatOpenAI": constants.CHAT_OPENAI_MODELS,
"Anthropic": constants.ANTHROPIC_MODELS,
"ChatAnthropic": constants.ANTHROPIC_MODELS,
}
if name in model_dict and key == "model_name":
field.options = model_dict[name]
field.is_list = True
@staticmethod
def _handle_api_key_specific_field_values(
field: TemplateField, key: str, name: Optional[str] = None
) -> None:
"""Handles specific field values related to API keys."""
if "api_key" in key and "OpenAI" in str(name):
field.display_name = "OpenAI API Key"
field.required = False

View file

@ -33,6 +33,14 @@ class ChainFrontendNode(FrontendNode):
field.show = True
field.advanced = True
# We should think of a way to deal with this later
# if field.field_type == "PromptTemplate":
# field.field_type = "str"
# field.multiline = True
# field.show = True
# field.advanced = False
# field.value = field.value.template
# Separated for possible future changes
if field.name == "prompt" and field.value is None:
field.required = True
@ -126,7 +134,7 @@ class TimeTravelGuideChainNode(FrontendNode):
),
],
)
description: str = "Time travel guide chain to be used in the flow."
description: str = "Time travel guide chain."
base_classes: list[str] = [
"LLMChain",
"BaseCustomChain",
@ -197,7 +205,7 @@ class CombineDocsChainNode(FrontendNode):
),
],
)
description: str = """Construct a zero shot agent from an LLM and tools."""
description: str = """Load question answering chain."""
base_classes: list[str] = ["BaseCombineDocumentsChain", "function"]
def to_dict(self):

View file

@ -0,0 +1,79 @@
from langflow.template.field.base import TemplateField
from langflow.template.frontend_node.base import FrontendNode
def build_template(
suffixes: list, fileTypes: list, name: str = "file_path"
) -> TemplateField:
"""Build a template field for a document loader."""
return TemplateField(
field_type="file",
required=True,
show=True,
name=name,
value="",
suffixes=suffixes,
fileTypes=fileTypes,
)
class DocumentLoaderFrontNode(FrontendNode):
file_path_templates = {
"AirbyteJSONLoader": build_template(suffixes=[".json"], fileTypes=["json"]),
"CoNLLULoader": build_template(suffixes=[".csv"], fileTypes=["csv"]),
"CSVLoader": build_template(suffixes=[".csv"], fileTypes=["csv"]),
"UnstructuredEmailLoader": build_template(suffixes=[".eml"], fileTypes=["eml"]),
"EverNoteLoader": build_template(suffixes=[".xml"], fileTypes=["xml"]),
"FacebookChatLoader": build_template(suffixes=[".json"], fileTypes=["json"]),
"GutenbergLoader": build_template(suffixes=[".txt"], fileTypes=["txt"]),
"BSHTMLLoader": build_template(suffixes=[".html"], fileTypes=["html"]),
"UnstructuredHTMLLoader": build_template(
suffixes=[".html"], fileTypes=["html"]
),
"UnstructuredImageLoader": build_template(
suffixes=[".jpg", ".jpeg", ".png", ".gif", ".bmp"],
fileTypes=["jpg", "jpeg", "png", "gif", "bmp"],
),
"UnstructuredMarkdownLoader": build_template(
suffixes=[".md"], fileTypes=["md"]
),
"PyPDFLoader": build_template(suffixes=[".pdf"], fileTypes=["pdf"]),
"UnstructuredPowerPointLoader": build_template(
suffixes=[".pptx", ".ppt"], fileTypes=["pptx", "ppt"]
),
"SRTLoader": build_template(suffixes=[".srt"], fileTypes=["srt"]),
"TelegramChatLoader": build_template(suffixes=[".json"], fileTypes=["json"]),
"TextLoader": build_template(suffixes=[".txt"], fileTypes=["txt"]),
"UnstructuredWordDocumentLoader": build_template(
suffixes=[".docx", ".doc"], fileTypes=["docx", "doc"]
),
}
def add_extra_fields(self) -> None:
name = None
if self.template.type_name in self.file_path_templates:
self.template.add_field(self.file_path_templates[self.template.type_name])
elif self.template.type_name in {
"WebBaseLoader",
"AZLyricsLoader",
"CollegeConfidentialLoader",
"HNLoader",
"IFixitLoader",
"IMSDbLoader",
}:
name = "web_path"
elif self.template.type_name in {"GitbookLoader"}:
name = "web_page"
elif self.template.type_name in {"ReadTheDocsLoader"}:
name = "path"
if name:
self.template.add_field(
TemplateField(
field_type="str",
required=True,
show=True,
name=name,
value="",
display_name="Web Page",
)
)

View file

@ -12,17 +12,44 @@ class LLMFrontendNode(FrontendNode):
field.name.title().replace("Openai", "OpenAI").replace("_", " ")
).replace("Api", "API")
if "key" not in field.name.lower() and "token" not in field.name.lower():
field.password = False
@staticmethod
def format_azure_field(field: TemplateField):
if field.name == "model_name":
field.show = False # Azure uses deployment_name instead of model_name.
elif field.name == "openai_api_type":
field.show = False
field.password = False
field.value = "azure"
elif field.name == "openai_api_version":
field.password = False
@staticmethod
def format_llama_field(field: TemplateField):
field.show = True
field.advanced = not field.required
@staticmethod
def format_field(field: TemplateField, name: Optional[str] = None) -> None:
display_names_dict = {
"huggingfacehub_api_token": "HuggingFace Hub API Token",
}
FrontendNode.format_field(field, name)
LLMFrontendNode.format_openai_field(field)
if name and "azure" in name.lower():
LLMFrontendNode.format_azure_field(field)
if name and "llama" in name.lower():
LLMFrontendNode.format_llama_field(field)
SHOW_FIELDS = ["repo_id"]
if field.name in SHOW_FIELDS:
field.show = True
if "api" in field.name and ("key" in field.name or "token" in field.name):
if "api" in field.name and (
"key" in field.name
or ("token" in field.name and "tokens" not in field.name)
):
field.password = True
field.show = True
# Required should be False to support
@ -44,8 +71,12 @@ class LLMFrontendNode(FrontendNode):
field.field_type = "code"
field.advanced = True
field.show = True
elif field.name in ["model_name", "temperature", "model_file", "model_type"]:
elif field.name in [
"model_name",
"temperature",
"model_file",
"model_type",
"deployment_name",
]:
field.advanced = False
field.show = True
LLMFrontendNode.format_openai_field(field)

View file

@ -5,6 +5,20 @@ from langflow.template.frontend_node.base import FrontendNode
class MemoryFrontendNode(FrontendNode):
#! Needs testing
def add_extra_fields(self) -> None:
# add return_messages field
self.template.add_field(
TemplateField(
field_type="bool",
required=False,
show=True,
name="return_messages",
advanced=False,
value=False,
)
)
@staticmethod
def format_field(field: TemplateField, name: Optional[str] = None) -> None:
FrontendNode.format_field(field, name)
@ -18,3 +32,7 @@ class MemoryFrontendNode(FrontendNode):
field.value = 10
field.display_name = "Memory Size"
field.password = False
if field.name == "return_messages":
field.required = False
field.show = True
field.advanced = False

View file

@ -74,7 +74,7 @@ class BasePromptFrontendNode(FrontendNode):
class ZeroShotPromptNode(BasePromptFrontendNode):
name: str = "ZeroShotPrompt"
template: Template = Template(
type_name="zero_shot",
type_name="ZeroShotPrompt",
fields=[
TemplateField(
field_type="str",

View file

@ -0,0 +1,49 @@
from langflow.template.field.base import TemplateField
from langflow.template.frontend_node.base import FrontendNode
class TextSplittersFrontendNode(FrontendNode):
def add_extra_fields(self) -> None:
self.template.add_field(
TemplateField(
field_type="BaseLoader",
required=True,
show=True,
name="documents",
)
)
name = "separator"
if self.template.type_name == "CharacterTextSplitter":
name = "separator"
elif self.template.type_name == "RecursiveCharacterTextSplitter":
name = "separators"
self.template.add_field(
TemplateField(
field_type="str",
required=True,
show=True,
value=".",
name=name,
display_name="Separator",
)
)
self.template.add_field(
TemplateField(
field_type="int",
required=True,
show=True,
value=1000,
name="chunk_size",
display_name="Chunk Size",
)
)
self.template.add_field(
TemplateField(
field_type="int",
required=True,
show=True,
value=200,
name="chunk_overlap",
display_name="Chunk Overlap",
)
)

View file

@ -52,7 +52,53 @@ class ToolNode(FrontendNode):
),
],
)
description: str = "Tool to be used in the flow."
description: str = "Converts a chain, agent or function into a tool."
base_classes: list[str] = ["Tool"]
def to_dict(self):
return super().to_dict()
class PythonFunctionToolNode(FrontendNode):
name: str = "PythonFunctionTool"
template: Template = Template(
type_name="PythonFunctionTool",
fields=[
TemplateField(
field_type="str",
required=True,
placeholder="",
is_list=False,
show=True,
multiline=False,
value="",
name="name",
advanced=False,
),
TemplateField(
field_type="str",
required=True,
placeholder="",
is_list=False,
show=True,
multiline=False,
value="",
name="description",
advanced=False,
),
TemplateField(
field_type="code",
required=True,
placeholder="",
is_list=False,
show=True,
value=DEFAULT_PYTHON_FUNCTION,
name="code",
advanced=False,
),
],
)
description: str = "Python function to be executed."
base_classes: list[str] = ["Tool"]
def to_dict(self):
@ -62,7 +108,7 @@ class ToolNode(FrontendNode):
class PythonFunctionNode(FrontendNode):
name: str = "PythonFunction"
template: Template = Template(
type_name="python_function",
type_name="PythonFunction",
fields=[
TemplateField(
field_type="code",

View file

@ -5,8 +5,31 @@ OPENAI_MODELS = [
"text-babbage-001",
"text-ada-001",
]
CHAT_OPENAI_MODELS = ["gpt-3.5-turbo", "gpt-4", "gpt-4-32k"]
CHAT_OPENAI_MODELS = [
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-16k",
"gpt-4-0613",
"gpt-4-32k-0613",
"gpt-4",
"gpt-4-32k",
]
ANTHROPIC_MODELS = [
"claude-v1", # largest model, ideal for a wide range of more complex tasks.
"claude-v1-100k", # An enhanced version of claude-v1 with a 100,000 token (roughly 75,000 word) context window.
"claude-instant-v1", # A smaller model with far lower latency, sampling at roughly 40 words/sec!
"claude-instant-v1-100k", # Like claude-instant-v1 with a 100,000 token context window but retains its performance.
# Specific sub-versions of the above models:
"claude-v1.3", # Vs claude-v1.2: better instruction-following, code, and non-English dialogue and writing.
"claude-v1.3-100k", # An enhanced version of claude-v1.3 with a 100,000 token (roughly 75,000 word) context window.
"claude-v1.2", # Vs claude-v1.1: small adv in general helpfulness, instruction following, coding, and other tasks.
"claude-v1.0", # An earlier version of claude-v1.
"claude-instant-v1.1", # Latest version of claude-instant-v1. Better than claude-instant-v1.0 at most tasks.
"claude-instant-v1.1-100k", # Version of claude-instant-v1.1 with a 100K token context window.
"claude-instant-v1.0", # An earlier version of claude-instant-v1.
]
DEFAULT_PYTHON_FUNCTION = """
def python_function(text: str) -> str:

View file

@ -302,7 +302,9 @@ def format_dict(d, name: Optional[str] = None):
elif name == "ChatOpenAI" and key == "model_name":
value["options"] = constants.CHAT_OPENAI_MODELS
value["list"] = True
elif (name == "Anthropic" or name == "ChatAnthropic") and key == "model_name":
value["options"] = constants.ANTHROPIC_MODELS
value["list"] = True
return d

View file

@ -1,4 +1,5 @@
import ast
import contextlib
import importlib
import types
from typing import Dict
@ -147,11 +148,8 @@ def create_function(code, function_name):
code_obj = compile(
ast.Module(body=[function_code], type_ignores=[]), "<string>", "exec"
)
try:
with contextlib.suppress(Exception):
exec(code_obj, exec_globals, locals())
except Exception:
pass
exec_globals[function_name] = locals()[function_name]
# Return a function that imports necessary modules and calls the target function

View file

@ -0,0 +1 @@
/usr/lib/node_modules/opencommit/out/cli.cjs

File diff suppressed because it is too large Load diff

View file

@ -1,83 +1,106 @@
{
"name": "langflow",
"version": "0.1.2",
"private": true,
"dependencies": {
"@emotion/react": "^11.10.5",
"@emotion/styled": "^11.10.5",
"@headlessui/react": "^1.7.10",
"@heroicons/react": "^2.0.15",
"@mui/material": "^5.11.9",
"@tabler/icons-react": "^2.18.0",
"@tailwindcss/forms": "^0.5.3",
"@tailwindcss/line-clamp": "^0.4.4",
"ace-builds": "^1.16.0",
"ansi-to-html": "^0.7.2",
"axios": "^1.3.2",
"base64-js": "^1.5.1",
"lodash": "^4.17.21",
"react": "^18.2.0",
"react-ace": "^10.1.0",
"react-cookie": "^4.1.1",
"react-dom": "^18.2.0",
"react-error-boundary": "^4.0.2",
"react-icons": "^4.8.0",
"react-laag": "^2.0.5",
"react-markdown": "^8.0.7",
"react-router-dom": "^6.8.1",
"react-syntax-highlighter": "^15.5.0",
"react-tabs": "^6.0.0",
"react-tooltip": "^5.13.1",
"reactflow": "^11.5.5",
"rehype-mathjax": "^4.0.2",
"remark-gfm": "^3.0.1",
"remark-math": "^5.1.1",
"uuid": "^9.0.0",
"vite-plugin-svgr": "^3.2.0",
"web-vitals": "^2.1.4"
},
"scripts": {
"dev:docker": "vite --host 0.0.0.0",
"start": "vite",
"build": "vite build",
"serve": "vite preview",
"format": "npx prettier --write \"src/**/*.{js,jsx,ts,tsx,json,md}\""
},
"eslintConfig": {
"extends": [
"react-app",
"react-app/jest"
]
},
"browserslist": {
"production": [
">0.2%",
"not dead",
"not op_mini all"
],
"development": [
"last 1 chrome version",
"last 1 firefox version",
"last 1 safari version"
]
},
"proxy": "http://127.0.0.1:7860",
"devDependencies": {
"@tailwindcss/typography": "^0.5.9",
"@testing-library/jest-dom": "^5.16.5",
"@testing-library/react": "^13.4.0",
"@testing-library/user-event": "^13.5.0",
"@types/jest": "^27.5.2",
"@types/lodash": "^4.14.194",
"@types/node": "^16.18.12",
"@types/react": "^18.0.28",
"@types/react-dom": "^18.0.11",
"@types/uuid": "^9.0.1",
"@vitejs/plugin-react-swc": "^3.0.0",
"autoprefixer": "^10.4.14",
"postcss": "^8.4.23",
"tailwindcss": "^3.3.2",
"typescript": "^5.0.2",
"vite": "^4.3.5"
}
"name": "langflow",
"version": "0.1.2",
"private": true,
"dependencies": {
"@emotion/react": "^11.10.5",
"@emotion/styled": "^11.10.5",
"@headlessui/react": "^1.7.10",
"@heroicons/react": "^2.0.15",
"@mui/material": "^5.11.9",
"@radix-ui/react-dropdown-menu": "^2.0.5",
"@radix-ui/react-menubar": "^1.0.3",
"@radix-ui/react-separator": "^1.0.3",
"@radix-ui/react-slot": "^1.0.2",
"@radix-ui/react-tabs": "^1.0.4",
"@radix-ui/react-checkbox": "^1.0.4",
"@radix-ui/react-dialog": "^1.0.4",
"@radix-ui/react-label": "^2.0.2",
"@radix-ui/react-switch": "^1.0.3",
"@radix-ui/react-tooltip": "^1.0.6",
"@tabler/icons-react": "^2.18.0",
"@tailwindcss/forms": "^0.5.3",
"@tailwindcss/line-clamp": "^0.4.4",
"ace-builds": "^1.16.0",
"add": "^2.0.6",
"ansi-to-html": "^0.7.2",
"axios": "^1.3.2",
"base64-js": "^1.5.1",
"class-variance-authority": "^0.6.0",
"clsx": "^1.2.1",
"esbuild": "^0.17.18",
"lodash": "^4.17.21",
"lucide-react": "^0.233.0",
"react": "^18.2.0",
"react-ace": "^10.1.0",
"react-cookie": "^4.1.1",
"react-dom": "^18.2.0",
"react-error-boundary": "^4.0.2",
"react-icons": "^4.8.0",
"react-laag": "^2.0.5",
"react-markdown": "^8.0.7",
"react-router-dom": "^6.8.1",
"react-syntax-highlighter": "^15.5.0",
"react-tabs": "^6.0.0",
"react-tooltip": "^5.13.1",
"reactflow": "^11.5.5",
"rehype-mathjax": "^4.0.2",
"remark-gfm": "^3.0.1",
"remark-math": "^5.1.1",
"shadcn-ui": "^0.1.3",
"short-unique-id": "^4.4.4",
"switch": "^0.0.0",
"table": "^6.8.1",
"tailwind-merge": "^1.13.0",
"tailwindcss-animate": "^1.0.5",
"uuid": "^9.0.0",
"vite-plugin-svgr": "^3.2.0",
"web-vitals": "^2.1.4"
},
"scripts": {
"dev:docker": "vite --host 0.0.0.0",
"start": "vite",
"build": "vite build",
"serve": "vite preview",
"format": "npx prettier --write \"**/*.{js,jsx,ts,tsx,json,md}\""
},
"eslintConfig": {
"extends": [
"react-app",
"react-app/jest"
]
},
"browserslist": {
"production": [
">0.2%",
"not dead",
"not op_mini all"
],
"development": [
"last 1 chrome version",
"last 1 firefox version",
"last 1 safari version"
]
},
"proxy": "http://127.0.0.1:7860",
"devDependencies": {
"@swc/cli": "^0.1.62",
"@swc/core": "^1.3.62",
"@tailwindcss/typography": "^0.5.9",
"@testing-library/jest-dom": "^5.16.5",
"@testing-library/react": "^13.4.0",
"@testing-library/user-event": "^13.5.0",
"@types/jest": "^27.5.2",
"@types/lodash": "^4.14.194",
"@types/node": "^16.18.12",
"@types/react": "^18.0.28",
"@types/react-dom": "^18.0.11",
"@types/uuid": "^9.0.1",
"@vitejs/plugin-react-swc": "^3.0.0",
"autoprefixer": "^10.4.14",
"postcss": "^8.4.23",
"tailwindcss": "^3.3.2",
"typescript": "^5.0.2",
"vite": "^4.3.5"
}
}

View file

@ -3,4 +3,4 @@ module.exports = {
tailwindcss: {},
autoprefixer: {},
},
}
};

View file

@ -7,13 +7,14 @@ import _ from "lodash";
import ErrorAlert from "./alerts/error";
import NoticeAlert from "./alerts/notice";
import SuccessAlert from "./alerts/success";
import ExtraSidebar from "./components/ExtraSidebarComponent";
import { alertContext } from "./contexts/alertContext";
import { locationContext } from "./contexts/locationContext";
import TabsManagerComponent from "./pages/FlowPage/components/tabsManagerComponent";
import { ErrorBoundary } from "react-error-boundary";
import CrashErrorComponent from "./components/CrashErrorComponent";
import { TabsContext } from "./contexts/tabsContext";
import { getVersion } from "./controllers/API";
import Router from "./routes";
import Header from "./components/headerComponent";
export default function App() {
let { setCurrent, setShowSideBar, setIsStackedOpen } =
@ -46,15 +47,6 @@ export default function App() {
}>
>([]);
// Initialize state variable for the version
const [version, setVersion] = useState("");
useEffect(() => {
fetch("/version")
.then((res) => res.json())
.then((data) => {
setVersion(data.version);
});
}, []);
// Use effect hook to update alertsList when a new alert is added
useEffect(() => {
// If there is an error alert open with data, add it to the alertsList
@ -112,7 +104,6 @@ export default function App() {
return (
//need parent component with width and height
<div className="h-full flex flex-col">
<div className="flex grow-0 shrink basis-auto"></div>
<ErrorBoundary
onReset={() => {
window.localStorage.removeItem("tabsData");
@ -122,19 +113,14 @@ export default function App() {
}}
FallbackComponent={CrashErrorComponent}
>
<div className="flex grow shrink basis-auto min-h-0 flex-1 overflow-hidden">
<ExtraSidebar />
{/* Main area */}
<main className="min-w-0 flex-1 border-t border-gray-200 dark:border-gray-700 flex">
{/* Primary column */}
<div className="w-full h-full">
<TabsManagerComponent></TabsManagerComponent>
</div>
</main>
</div>
<Header />
<Router />
</ErrorBoundary>
<div></div>
<div className="flex z-40 flex-col-reverse fixed bottom-5 left-5">
<div
className="flex flex-col-reverse fixed bottom-5 left-5"
style={{ zIndex: 999 }}
>
{alertsList.map((alert) => (
<div key={alert.id}>
{alert.type === "error" ? (
@ -164,14 +150,6 @@ export default function App() {
</div>
))}
</div>
<a
target={"_blank"}
href="https://logspace.ai/"
className="absolute left-7 bottom-2 flex h-6 cursor-pointer flex-col items-center justify-start overflow-hidden rounded-lg bg-gray-800 px-2 text-center font-sans text-xs tracking-wide text-gray-300 transition-all duration-500 ease-in-out hover:h-12 dark:bg-gray-100 dark:text-gray-800"
>
{version && <div className="mt-1"> LangFlow v{version}</div>}
<div className="mt-2">Created by Logspace</div>
</a>
</div>
);
}

View file

@ -1,9 +1,11 @@
import { Handle, Position, useUpdateNodeInternals } from "reactflow";
import Tooltip from "../../../../components/TooltipComponent";
import { classNames, isValidConnection } from "../../../../utils";
import {
classNames,
groupByFamily,
isValidConnection,
} from "../../../../utils";
import { useContext, useEffect, useRef, useState } from "react";
import InputComponent from "../../../../components/inputComponent";
import ToggleComponent from "../../../../components/toggleComponent";
import InputListComponent from "../../../../components/inputListComponent";
import TextAreaComponent from "../../../../components/textAreaComponent";
import { typesContext } from "../../../../contexts/typesContext";
@ -15,6 +17,12 @@ import InputFileComponent from "../../../../components/inputFileComponent";
import { TabsContext } from "../../../../contexts/tabsContext";
import IntComponent from "../../../../components/intComponent";
import PromptAreaComponent from "../../../../components/promptComponent";
import { nodeNames, nodeIcons } from "../../../../utils";
import React from "react";
import { nodeColors } from "../../../../utils";
import ShadTooltip from "../../../../components/ShadTooltipComponent";
import { PopUpContext } from "../../../../contexts/popUpContext";
import ToggleShadComponent from "../../../../components/toggleShadComponent";
export default function ParameterComponent({
left,
@ -28,14 +36,18 @@ export default function ParameterComponent({
required = false,
}: ParameterComponentType) {
const ref = useRef(null);
const refHtml = useRef(null);
const updateNodeInternals = useUpdateNodeInternals();
const [position, setPosition] = useState(0);
const { closePopUp } = useContext(PopUpContext);
const { setTabsState, tabId } = useContext(TabsContext);
useEffect(() => {
if (ref.current && ref.current.offsetTop && ref.current.clientHeight) {
setPosition(ref.current.offsetTop + ref.current.clientHeight / 2);
updateNodeInternals(data.id);
}
}, [data.id, ref, updateNodeInternals]);
}, [data.id, ref, ref.current, ref.current?.offsetTop, updateNodeInternals]);
useEffect(() => {
updateNodeInternals(data.id);
@ -44,15 +56,72 @@ export default function ParameterComponent({
const [enabled, setEnabled] = useState(
data.node.template[name]?.value ?? false
);
useEffect(() => {}, [closePopUp, data.node.template]);
const { reactFlowInstance } = useContext(typesContext);
let disabled =
reactFlowInstance?.getEdges().some((e) => e.targetHandle === id) ?? false;
const { save } = useContext(TabsContext);
const [myData, setMyData] = useState(useContext(typesContext).data);
const handleOnNewValue = (newValue: any) => {
data.node.template[name].value = newValue;
// Set state to pending
setTabsState((prev) => {
return {
...prev,
[tabId]: {
isPending: true,
},
};
});
};
useEffect(() => {
const groupedObj = groupByFamily(myData, tooltipTitle);
refHtml.current = groupedObj.map((item, i) => (
<span
key={i}
className={classNames(
i > 0 ? "items-center flex mt-3" : "items-center flex"
)}
>
<div
className="h-5 w-5"
style={{
color: nodeColors[item.family],
}}
>
{React.createElement(nodeIcons[item.family])}
</div>
<span className="ps-2 text-gray-950">
{nodeNames[item.family] ?? ""}{" "}
<span className={classNames(left ? "hidden" : "")}>
{" "}
-&nbsp;
{item.type.split(", ").length > 2
? item.type.split(", ").map((el, i) => (
<>
<span key={i}>
{i == item.type.split(", ").length - 1
? el
: (el += `, `)}
</span>
{i % 2 == 0 && i > 0 && <br></br>}
</>
))
: item.type}
</span>
</span>
</span>
));
}, [tooltipTitle]);
return (
<div
ref={ref}
className="w-full flex flex-wrap justify-between items-center bg-gray-50 dark:bg-gray-800 dark:text-white mt-1 px-5 py-2"
className="w-full flex flex-wrap justify-between items-center bg-muted dark:bg-gray-800 dark:text-white mt-1 px-5 py-2"
>
<>
<div className={"text-sm truncate w-full " + (left ? "" : "text-end")}>
@ -69,7 +138,12 @@ export default function ParameterComponent({
type === "int") ? (
<></>
) : (
<Tooltip title={tooltipTitle + (required ? " (required)" : "")}>
<ShadTooltip
delayDuration={0}
content={refHtml.current}
side={left ? "left" : "right"}
open={refHtml?.current?.length > 0}
>
<Handle
type={left ? "target" : "source"}
position={left ? Position.Left : Position.Right}
@ -86,7 +160,7 @@ export default function ParameterComponent({
top: position,
}}
></Handle>
</Tooltip>
</ShadTooltip>
)}
{left === true &&
@ -102,19 +176,13 @@ export default function ParameterComponent({
? [""]
: data.node.template[name].value
}
onChange={(t: string[]) => {
data.node.template[name].value = t;
save();
}}
onChange={handleOnNewValue}
/>
) : data.node.template[name].multiline ? (
<TextAreaComponent
disabled={disabled}
value={data.node.template[name].value ?? ""}
onChange={(t: string) => {
data.node.template[name].value = t;
save();
}}
onChange={handleOnNewValue}
/>
) : (
<InputComponent
@ -122,84 +190,72 @@ export default function ParameterComponent({
disableCopyPaste={true}
password={data.node.template[name].password ?? false}
value={data.node.template[name].value ?? ""}
onChange={(t) => {
data.node.template[name].value = t;
save();
}}
onChange={handleOnNewValue}
/>
)}
</div>
) : left === true && type === "bool" ? (
<div className="mt-2">
<ToggleComponent
<ToggleShadComponent
disabled={disabled}
enabled={enabled}
setEnabled={(t) => {
data.node.template[name].value = t;
handleOnNewValue(t);
setEnabled(t);
save();
}}
size="large"
/>
</div>
) : left === true && type === "float" ? (
<FloatComponent
disabled={disabled}
disableCopyPaste={true}
value={data.node.template[name].value ?? ""}
onChange={(t) => {
data.node.template[name].value = t;
save();
}}
/>
<div className="mt-2 w-full">
<FloatComponent
disabled={disabled}
disableCopyPaste={true}
value={data.node.template[name].value ?? ""}
onChange={handleOnNewValue}
/>
</div>
) : left === true &&
type === "str" &&
data.node.template[name].options ? (
<Dropdown
options={data.node.template[name].options}
onSelect={(newValue) => (data.node.template[name].value = newValue)}
value={data.node.template[name].value ?? "Choose an option"}
></Dropdown>
<div className="w-full">
<Dropdown
options={data.node.template[name].options}
onSelect={handleOnNewValue}
value={data.node.template[name].value ?? "Choose an option"}
></Dropdown>
</div>
) : left === true && type === "code" ? (
<CodeAreaComponent
disabled={disabled}
value={data.node.template[name].value ?? ""}
onChange={(t: string) => {
data.node.template[name].value = t;
save();
}}
onChange={handleOnNewValue}
/>
) : left === true && type === "file" ? (
<InputFileComponent
disabled={disabled}
value={data.node.template[name].value ?? ""}
onChange={(t: string) => {
data.node.template[name].value = t;
}}
onChange={handleOnNewValue}
fileTypes={data.node.template[name].fileTypes}
suffixes={data.node.template[name].suffixes}
onFileChange={(t: string) => {
data.node.template[name].content = t;
save();
}}
></InputFileComponent>
) : left === true && type === "int" ? (
<IntComponent
disabled={disabled}
disableCopyPaste={true}
value={data.node.template[name].value ?? ""}
onChange={(t) => {
data.node.template[name].value = t;
save();
}}
/>
<div className="mt-2 w-full">
<IntComponent
disabled={disabled}
disableCopyPaste={true}
value={data.node.template[name].value ?? ""}
onChange={handleOnNewValue}
/>
</div>
) : left === true && type === "prompt" ? (
<PromptAreaComponent
disabled={disabled}
value={data.node.template[name].value ?? ""}
onChange={(t: string) => {
data.node.template[name].value = t;
save();
}}
onChange={handleOnNewValue}
/>
) : (
<></>

View file

@ -1,35 +1,18 @@
import {
BugAntIcon,
Cog6ToothIcon,
InformationCircleIcon,
TrashIcon,
} from "@heroicons/react/24/outline";
import {
CheckCircleIcon,
EllipsisHorizontalCircleIcon,
ExclamationCircleIcon,
} from "@heroicons/react/24/solid";
import {
classNames,
nodeColors,
nodeIcons,
toNormalCase,
toTitleCase,
} from "../../utils";
import { classNames, nodeColors, nodeIcons, toTitleCase } from "../../utils";
import ParameterComponent from "./components/parameterComponent";
import { typesContext } from "../../contexts/typesContext";
import { useContext, useState, useEffect, useRef, Fragment } from "react";
import { useContext, useState, useEffect, useRef } from "react";
import { NodeDataType } from "../../types/flow";
import { alertContext } from "../../contexts/alertContext";
import { PopUpContext } from "../../contexts/popUpContext";
import NodeModal from "../../modals/NodeModal";
import { useCallback } from "react";
import { TabsContext } from "../../contexts/tabsContext";
import { debounce } from "../../utils";
import TooltipReact from "../../components/ReactTooltipComponent";
import Tooltip from "../../components/TooltipComponent";
import { NodeToolbar } from "reactflow";
import NodeToolbarComponent from "../../pages/FlowPage/components/nodeToolbarComponent";
import ShadTooltip from "../../components/ShadTooltipComponent";
import { useSSE } from "../../contexts/SSEContext";
export default function GenericNode({
data,
selected,
@ -40,50 +23,30 @@ export default function GenericNode({
const { setErrorData } = useContext(alertContext);
const showError = useRef(true);
const { types, deleteNode } = useContext(typesContext);
const { openPopUp } = useContext(PopUpContext);
const { closePopUp, openPopUp } = useContext(PopUpContext);
const Icon = nodeIcons[data.type] || nodeIcons[types[data.type]];
const [validationStatus, setValidationStatus] = useState(null);
// State for outline color
const [isValid, setIsValid] = useState(false);
const { save } = useContext(TabsContext);
const { reactFlowInstance } = useContext(typesContext);
const [params, setParams] = useState([]);
const { sseData, isBuilding } = useSSE();
// useEffect(() => {
// if (reactFlowInstance) {
// setParams(Object.values(reactFlowInstance.toObject()));
// }
// }, [save]);
// New useEffect to watch for changes in sseData and update validation status
useEffect(() => {
if (reactFlowInstance) {
setParams(Object.values(reactFlowInstance.toObject()));
const relevantData = sseData[data.id];
if (relevantData) {
// Extract validation information from relevantData and update the validationStatus state
setValidationStatus(relevantData);
} else {
setValidationStatus(null);
}
}, [save]);
const validateNode = useCallback(
debounce(async () => {
try {
const response = await fetch(`/validate/node/${data.id}`, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(reactFlowInstance.toObject()),
});
if (response.status === 200) {
let jsonResponse = await response.json();
let jsonResponseParsed = await JSON.parse(jsonResponse);
setValidationStatus(jsonResponseParsed);
}
} catch (error) {
// console.error("Error validating node:", error);
setValidationStatus("error");
}
}, 1000), // Adjust the debounce delay (500ms) as needed
[reactFlowInstance, data.id]
);
useEffect(() => {
if (params.length > 0) {
validateNode();
}
}, [params, validateNode]);
}, [sseData, data.id]);
if (!Icon) {
if (showError.current) {
@ -97,199 +60,174 @@ export default function GenericNode({
deleteNode(data.id);
return;
}
console.log(data);
useEffect(() => {}, [closePopUp, data.node.template]);
return (
<div
className={classNames(
selected ? "border border-blue-500" : "border dark:border-gray-700",
"prompt-node relative flex w-96 flex-col justify-center rounded-lg bg-white dark:bg-gray-900"
)}
>
<div className="flex w-full items-center justify-between gap-8 rounded-t-lg border-b bg-gray-50 p-4 dark:border-b-gray-700 dark:bg-gray-800 dark:text-white ">
<div className="flex w-full items-center gap-2 truncate text-lg">
<Icon
className="h-10 w-10 rounded p-1"
style={{
color: nodeColors[types[data.type]] ?? nodeColors.unknown,
}}
/>
<div className="ml-2 truncate">
<TooltipReact
delayShow={1000}
selector={`node-selector-${data.type}`}
htmlContent={data.type}
position="top"
>
<div className="ml-2 truncate">{data.type}</div>
</TooltipReact>
</div>
</div>
<div className="flex gap-3">
<button
className="relative"
onClick={(event) => {
event.preventDefault();
openPopUp(<NodeModal data={data} />);
}}
>
<div className=" absolute -right-1 -top-2 text-red-600">
{Object.keys(data.node.template).some(
(t) =>
data.node.template[t].advanced &&
data.node.template[t].required
)
? " *"
: ""}
<>
<NodeToolbar>
<NodeToolbarComponent
data={data}
openPopUp={openPopUp}
deleteNode={deleteNode}
></NodeToolbarComponent>
</NodeToolbar>
<div
className={classNames(
selected ? "border border-ring" : "border dark:border-gray-700",
"prompt-node relative flex w-96 flex-col justify-center rounded-lg bg-white dark:bg-gray-900"
)}
>
<div className="flex w-full items-center justify-between gap-8 rounded-t-lg border-b bg-muted p-4 dark:border-b-gray-700 dark:bg-gray-800 dark:text-white ">
<div className="flex w-full items-center gap-2 truncate text-lg">
<Icon
className="h-10 w-10 rounded p-1"
style={{
color: nodeColors[types[data.type]] ?? nodeColors.unknown,
}}
/>
<div className="ml-2 truncate">
<ShadTooltip delayDuration={1500} content={data.type}>
<div className="ml-2 truncate text-gray-800">{data.type}</div>
</ShadTooltip>
</div>
<Cog6ToothIcon
</div>
<div className="flex gap-3">
<button
className="relative"
onClick={(event) => {
event.preventDefault();
openPopUp(<NodeModal data={data} />);
}}
></button>
</div>
<div className="flex gap-3">
<div>
<Tooltip
title={
!validationStatus ? (
"Validating..."
) : (
<div className="max-h-96 overflow-auto">
{validationStatus.params ||
""
.split("\n")
.map((line, index) => <div key={index}>{line}</div>)}
</div>
)
}
>
<div className="w-5 h-5 relative top-[3px]">
<div
className={classNames(
validationStatus && validationStatus.valid
? "w-4 h-4 rounded-full bg-green-500 opacity-100"
: "w-4 h-4 rounded-full bg-gray-500 opacity-0 hidden animate-spin",
"absolute w-4 hover:text-gray-500 hover:dark:text-gray-300 transition-all ease-in-out duration-200"
)}
></div>
<div
className={classNames(
validationStatus && !validationStatus.valid
? "w-4 h-4 rounded-full bg-red-500 opacity-100"
: "w-4 h-4 rounded-full bg-gray-500 opacity-0 hidden animate-spin",
"absolute w-4 hover:text-gray-500 hover:dark:text-gray-300 transition-all ease-in-out duration-200"
)}
></div>
<div
className={classNames(
!validationStatus || isBuilding
? "w-4 h-4 rounded-full bg-yellow-500 opacity-100"
: "w-4 h-4 rounded-full bg-gray-500 opacity-0 hidden animate-spin",
"absolute w-4 hover:text-gray-500 hover:dark:text-gray-300 transition-all ease-in-out duration-200"
)}
></div>
</div>
</Tooltip>
</div>
</div>
</div>
<div className="h-full w-full py-5 text-gray-800">
<div className="w-full px-5 pb-3 text-sm text-muted-foreground">
{data.node.description}
</div>
<>
{Object.keys(data.node.template)
.filter((t) => t.charAt(0) !== "_")
.map((t: string, idx) => (
<div key={idx}>
{/* {idx === 0 ? (
<div
className={classNames(
"px-5 py-2 mt-2 dark:text-white text-center",
Object.keys(data.node.template).filter(
(key) =>
!key.startsWith("_") &&
data.node.template[key].show &&
!data.node.template[key].advanced
).length === 0
? "hidden"
: ""
)}
>
Inputs
</div>
) : (
<></>
)} */}
{data.node.template[t].show &&
!data.node.template[t].advanced ? (
<ParameterComponent
data={data}
color={
nodeColors[types[data.node.template[t].type]] ??
nodeColors.unknown
}
title={
data.node.template[t].display_name
? data.node.template[t].display_name
: data.node.template[t].name
? toTitleCase(data.node.template[t].name)
: toTitleCase(t)
}
name={t}
tooltipTitle={data.node.template[t].type}
required={data.node.template[t].required}
id={data.node.template[t].type + "|" + t + "|" + data.id}
left={true}
type={data.node.template[t].type}
/>
) : (
<></>
)}
</div>
))}
<div
className={classNames(
Object.keys(data.node.template).some(
(t) =>
data.node.template[t].advanced && data.node.template[t].show
)
? ""
: "hidden",
"w-5 h-5 dark:text-gray-300"
Object.keys(data.node.template).length < 1 ? "hidden" : "",
"flex w-full justify-center"
)}
></Cog6ToothIcon>
</button>
<button
onClick={() => {
deleteNode(data.id);
}}
>
<TrashIcon className="w-5 h-5 dark:text-gray-300"></TrashIcon>
</button>
<div>
<Tooltip
title={
!validationStatus ? (
"Validating..."
) : (
<div className="max-h-96 overflow-auto">
{validationStatus.params.split("\n").map((line, index) => (
<div key={index}>{line}</div>
))}
</div>
)
}
>
<div className="w-5 h-5 relative top-[3px]">
<div
className={classNames(
validationStatus && validationStatus.valid
? "w-4 h-4 rounded-full bg-green-500 opacity-100"
: "w-4 h-4 rounded-full bg-gray-500 opacity-0 hidden animate-spin",
"absolute w-4 hover:text-gray-500 hover:dark:text-gray-300 transition-all ease-in-out duration-200"
)}
></div>
<div
className={classNames(
validationStatus && !validationStatus.valid
? "w-4 h-4 rounded-full bg-red-500 opacity-100"
: "w-4 h-4 rounded-full bg-gray-500 opacity-0 hidden animate-spin",
"absolute w-4 hover:text-gray-500 hover:dark:text-gray-300 transition-all ease-in-out duration-200"
)}
></div>
<div
className={classNames(
!validationStatus
? "w-4 h-4 rounded-full bg-yellow-500 opacity-100"
: "w-4 h-4 rounded-full bg-gray-500 opacity-0 hidden animate-spin",
"absolute w-4 hover:text-gray-500 hover:dark:text-gray-300 transition-all ease-in-out duration-200"
)}
></div>
</div>
</Tooltip>
</div>
{" "}
</div>
{/* <div className="px-5 py-2 mt-2 dark:text-white text-center">
Output
</div> */}
<ParameterComponent
data={data}
color={nodeColors[types[data.type]] ?? nodeColors.unknown}
title={data.type}
tooltipTitle={`${data.node.base_classes.join("\n")}`}
id={[data.type, data.id, ...data.node.base_classes].join("|")}
type={data.node.base_classes.join("|")}
left={false}
/>
</>
</div>
</div>
<div className="h-full w-full py-5">
<div className="w-full px-5 pb-3 text-sm text-gray-500 dark:text-gray-300">
{data.node.description}
</div>
<>
{Object.keys(data.node.template)
.filter((t) => t.charAt(0) !== "_")
.map((t: string, idx) => (
<div key={idx}>
{/* {idx === 0 ? (
<div
className={classNames(
"px-5 py-2 mt-2 dark:text-white text-center",
Object.keys(data.node.template).filter(
(key) =>
!key.startsWith("_") &&
data.node.template[key].show &&
!data.node.template[key].advanced
).length === 0
? "hidden"
: ""
)}
>
Inputs
</div>
) : (
<></>
)} */}
{data.node.template[t].show &&
!data.node.template[t].advanced ? (
<ParameterComponent
data={data}
color={
nodeColors[types[data.node.template[t].type]] ??
nodeColors.unknown
}
title={
data.node.template[t].display_name
? data.node.template[t].display_name
: data.node.template[t].name
? toTitleCase(data.node.template[t].name)
: toTitleCase(t)
}
name={t}
tooltipTitle={
"Type: " +
data.node.template[t].type +
(data.node.template[t].list ? " list" : "")
}
required={data.node.template[t].required}
id={data.node.template[t].type + "|" + t + "|" + data.id}
left={true}
type={data.node.template[t].type}
/>
) : (
<></>
)}
</div>
))}
<div
className={classNames(
Object.keys(data.node.template).length < 1 ? "hidden" : "",
"flex w-full justify-center"
)}
>
{" "}
</div>
{/* <div className="px-5 py-2 mt-2 dark:text-white text-center">
Output
</div> */}
<ParameterComponent
data={data}
color={nodeColors[types[data.type]] ?? nodeColors.unknown}
title={data.type}
tooltipTitle={`Type: ${data.node.base_classes.join(" | ")}`}
id={[data.type, data.id, ...data.node.base_classes].join("|")}
type={data.node.base_classes.join("|")}
left={false}
/>
</>
</div>
</div>
</>
);
}

View file

@ -94,7 +94,7 @@ export default function SingleAlert({
{dropItem.link ? (
<Link
to={dropItem.link}
className="whitespace-nowrap font-medium text-blue-700 dark:text-blue-50 dark:hover:text-blue-100 hover:text-blue-600"
className="whitespace-nowrap font-medium text-blue-700 dark:text-blue-50 dark:hover:text-blue-100 hover:text-ring"
>
Details
</Link>

View file

@ -51,7 +51,7 @@ export default function NoticeAlert({
{link !== "" ? (
<Link
to={link}
className="whitespace-nowrap font-medium text-blue-700 dark:text-blue-50 hover:dark:text-blue-10 hover:text-blue-600"
className="whitespace-nowrap font-medium text-blue-700 dark:text-blue-50 hover:dark:text-blue-10 hover:text-ring"
>
Details
</Link>

View file

@ -0,0 +1,82 @@
import React, { useState, ChangeEvent } from "react";
import { Textarea } from "../../components/ui/textarea";
import { Label } from "../../components/ui/label";
import { Input } from "../../components/ui/input";
type InputProps = {
name: string | null;
description: string | null;
maxLength?: number;
flows: Array<{ id: string; name: string }>;
tabId: string;
setName: (name: string) => void;
setDescription: (description: string) => void;
updateFlow: (flow: { id: string; name: string }) => void;
};
export const EditFlowSettings: React.FC<InputProps> = ({
name,
description,
maxLength = 50,
flows,
tabId,
setName,
setDescription,
updateFlow,
}) => {
const [isMaxLength, setIsMaxLength] = useState(false);
const handleNameChange = (event: ChangeEvent<HTMLInputElement>) => {
const { value } = event.target;
if (value.length >= maxLength) {
setIsMaxLength(true);
} else {
setIsMaxLength(false);
}
setName(value);
};
const handleDescriptionChange = (event: ChangeEvent<HTMLTextAreaElement>) => {
setDescription(event.target.value);
};
return (
<>
<Label>
<div className="flex justify-between">
<span className="font-medium">Name</span>{" "}
{isMaxLength && (
<span className="text-red-500 animate-pulse ml-10">
Character limit reached
</span>
)}
</div>
<Input
className="mt-2 font-normal"
onChange={handleNameChange}
type="text"
name="name"
value={name ?? ""}
placeholder="File name"
id="name"
maxLength={maxLength}
/>
</Label>
<Label>
<span className="font-medium">Description (optional)</span>
<Textarea
name="description"
id="description"
onChange={handleDescriptionChange}
value={description ?? ""}
placeholder="Flow description"
className="max-h-[100px] mt-2 font-normal"
rows={3}
/>
</Label>
</>
);
};
export default EditFlowSettings;

View file

@ -1,6 +1,6 @@
import { Disclosure } from "@headlessui/react";
import { ChevronLeftIcon } from "@heroicons/react/24/outline";
import { useContext } from "react";
import { useContext, useState } from "react";
import { Link } from "react-router-dom";
import { classNames } from "../../utils";
import { locationContext } from "../../contexts/locationContext";
@ -13,6 +13,7 @@ export default function ExtraSidebar() {
extraNavigation,
extraComponent,
} = useContext(locationContext);
return (
<>
<aside
@ -20,12 +21,7 @@ export default function ExtraSidebar() {
isStackedOpen ? "w-52" : "w-0 "
} flex-shrink-0 flex overflow-hidden flex-col border-r dark:border-r-gray-700 transition-all duration-500`}
>
<div className="w-52 dark:bg-gray-800 border dark:border-gray-700 overflow-y-auto scrollbar-hide h-full flex flex-col items-start">
<div className="flex pt-1 px-4 justify-between align-middle w-full">
<span className="text-gray-900 dark:text-white py-[2px] font-medium ">
{extraNavigation.title}
</span>
</div>
<div className="w-52 dark:bg-gray-800 border dark:border-gray-700 overflow-y-auto scrollbar-hide h-full flex flex-col items-start bg-white">
<div className="flex flex-grow flex-col w-full">
{extraNavigation.options ? (
<div className="p-4">
@ -37,8 +33,8 @@ export default function ExtraSidebar() {
to={item.href}
className={classNames(
item.href.split("/")[2] === current[4]
? "bg-gray-100 text-gray-900"
: "bg-white text-gray-600 hover:bg-gray-50 hover:text-gray-900",
? "bg-muted text-gray-900"
: "bg-white text-gray-600 hover:bg-muted hover:text-gray-900",
"group w-full flex items-center pl-2 py-2 text-sm font-medium rounded-md"
)}
>
@ -64,9 +60,9 @@ export default function ExtraSidebar() {
<Disclosure.Button
className={classNames(
item.href.split("/")[2] === current[4]
? "bg-gray-100 text-gray-900"
: "bg-white text-gray-600 hover:bg-gray-50 hover:text-gray-900",
"group w-full flex items-center pl-2 pr-1 py-2 text-left text-sm font-medium rounded-md focus:outline-none focus:ring-2 focus:ring-indigo-500"
? "bg-muted text-gray-900"
: "bg-white text-gray-600 hover:bg-muted hover:text-gray-900",
"group w-full flex items-center pl-2 pr-1 py-2 text-left text-sm font-medium rounded-md focus:outline-none focus:ring-1 focus:ring-indigo-500"
)}
>
<item.icon
@ -97,8 +93,8 @@ export default function ExtraSidebar() {
to={subItem.href}
className={classNames(
subItem.href.split("/")[3] === current[5]
? "bg-gray-100 text-gray-900"
: "bg-white text-gray-600 hover:bg-gray-50 hover:text-gray-900",
? "bg-muted text-gray-900"
: "bg-white text-gray-600 hover:bg-muted hover:text-gray-900",
"group flex w-full items-center rounded-md py-2 pl-11 pr-2 text-sm font-medium"
)}
>

View file

@ -37,13 +37,15 @@ const TooltipReact: FC<TooltipProps> = ({
id={selector}
content={content}
className={classNames(
"!bg-white !text-xs !font-normal !text-gray-700 !shadow-md !opacity-100 z-20",
"!bg-white !text-xs !font-normal !text-gray-700 !shadow-md !opacity-100 z-[9999]",
className
)}
place={position}
clickable={clickable}
isOpen={disabled ? false : undefined}
delayShow={delayShow}
positionStrategy="absolute"
float={true}
>
{htmlContent && htmlContent}
</ReactTooltip>

View file

@ -0,0 +1,26 @@
import {
Tooltip,
TooltipContent,
TooltipProvider,
TooltipTrigger,
} from "../ui/tooltip";
const ShadTooltip = (props) => {
return (
<TooltipProvider>
<Tooltip delayDuration={props.delayDuration}>
<TooltipTrigger asChild>{props.children}</TooltipTrigger>
<TooltipContent
side={props.side}
avoidCollisions={false}
sticky="always"
>
{props.content}
</TooltipContent>
</Tooltip>
</TooltipProvider>
);
};
export default ShadTooltip;

View file

@ -0,0 +1,71 @@
import { Trash2, ExternalLink } from "lucide-react";
import { useContext } from "react";
import { Link } from "react-router-dom";
import { TabsContext } from "../../contexts/tabsContext";
import { FlowType } from "../../types/flow";
import { gradients } from "../../utils";
import {
CardTitle,
CardDescription,
CardFooter,
Card,
CardHeader,
} from "../ui/card";
export const CardComponent = ({
flow,
id,
onDelete,
button,
}: {
flow: FlowType;
id: string;
onDelete?: () => void;
button?: JSX.Element;
}) => {
const { removeFlow } = useContext(TabsContext);
return (
<Card className="group">
<CardHeader>
<CardTitle className="flex w-full items-center gap-4">
<span
className={
"rounded-full w-7 h-7 flex items-center justify-center text-2xl " +
gradients[parseInt(flow.id.slice(0, 12), 16) % gradients.length]
}
></span>
<span className="flex-1 w-full inline-block truncate-doubleline break-words">
{flow.name}
</span>
{onDelete && (
<button className="flex self-start" onClick={onDelete}>
<Trash2 className="w-4 h-4 text-primary opacity-0 group-hover:opacity-100 transition-all" />
</button>
)}
</CardTitle>
<CardDescription className="pt-2 pb-2">
<div className="truncate-doubleline">
{flow.description}
{/* {flow.description} */}
</div>
</CardDescription>
</CardHeader>
<CardFooter>
<div className="flex gap-2 w-full justify-between items-end">
<div className="flex flex-wrap gap-2">
{/* <Badge variant="secondary">Agent</Badge>
<Badge variant="secondary">
<div className="w-3">
<OpenAiIcon />
</div>
<span className="text-base">&nbsp;</span>OpenAI+
</Badge> */}
</div>
{button && button}
</div>
</CardFooter>
</Card>
);
};

View file

@ -0,0 +1,159 @@
import { useState, useContext } from "react";
import { Transition } from "@headlessui/react";
import { Zap } from "lucide-react";
import { validateNodes } from "../../../utils";
import { FlowType } from "../../../types/flow";
import Loading from "../../../components/ui/loading";
import { useSSE } from "../../../contexts/SSEContext";
import { typesContext } from "../../../contexts/typesContext";
import { alertContext } from "../../../contexts/alertContext";
import { postBuildInit } from "../../../controllers/API";
export default function BuildTrigger({
open,
flow,
setIsBuilt,
isBuilt,
}: {
open: boolean;
flow: FlowType;
setIsBuilt: any;
isBuilt: boolean;
}) {
const { updateSSEData, isBuilding, setIsBuilding } = useSSE();
const { reactFlowInstance } = useContext(typesContext);
const { setErrorData } = useContext(alertContext);
async function handleBuild(flow: FlowType) {
try {
if (isBuilding) {
return;
}
const errors = validateNodes(reactFlowInstance);
if (errors.length > 0) {
setErrorData({
title: "Oops! Looks like you missed something",
list: errors,
});
return;
}
const minimumLoadingTime = 200; // in milliseconds
const startTime = Date.now();
setIsBuilding(true);
const allNodesValid = await streamNodeData(flow);
await enforceMinimumLoadingTime(startTime, minimumLoadingTime);
setIsBuilt(allNodesValid);
if (!allNodesValid) {
setErrorData({
title: "Oops! Looks like you missed something",
list: [
"Check components and retry. Hover over component status icon 🔴 to inspect.",
],
});
}
} catch (error) {
console.error("Error:", error);
} finally {
setIsBuilding(false);
}
}
async function streamNodeData(flow: FlowType) {
// Step 1: Make a POST request to send the flow data and receive a unique session ID
const response = await postBuildInit(flow);
const { flowId } = response.data;
// Step 2: Use the session ID to establish an SSE connection using EventSource
let validationResults = [];
let finished = false;
const apiUrl = `/api/v1/build/stream/${flowId}`;
const eventSource = new EventSource(apiUrl);
eventSource.onmessage = (event) => {
// If the event is parseable, return
if (!event.data) {
return;
}
const parsedData = JSON.parse(event.data);
// if the event is the end of the stream, close the connection
if (parsedData.end_of_stream) {
eventSource.close();
return;
}
// Otherwise, process the data
const isValid = processStreamResult(parsedData);
validationResults.push(isValid);
};
eventSource.onerror = (error) => {
console.error("EventSource failed:", error);
eventSource.close();
};
// Step 3: Wait for the stream to finish
while (!finished) {
await new Promise((resolve) => setTimeout(resolve, 100));
finished = validationResults.length === flow.data.nodes.length;
}
// Step 4: Return true if all nodes are valid, false otherwise
return validationResults.every((result) => result);
}
function processStreamResult(parsedData) {
// Process each chunk of data here
// Parse the chunk and update the context
try {
updateSSEData({ [parsedData.id]: parsedData });
} catch (err) {
console.log("Error parsing stream data: ", err);
}
return parsedData.valid;
}
async function enforceMinimumLoadingTime(
startTime: number,
minimumLoadingTime: number
) {
const elapsedTime = Date.now() - startTime;
const remainingTime = minimumLoadingTime - elapsedTime;
if (remainingTime > 0) {
return new Promise((resolve) => setTimeout(resolve, remainingTime));
}
}
return (
<Transition
show={!open}
appear={true}
enter="transition ease-out duration-300"
enterFrom="translate-y-96"
enterTo="translate-y-0"
leave="transition ease-in duration-300"
leaveFrom="translate-y-0"
leaveTo="translate-y-96"
>
<div className={`fixed right-4` + (isBuilt ? " bottom-20" : " bottom-4")}>
<div
className="flex justify-center align-center py-1 px-3 w-12 h-12 rounded-full shadow-md shadow-[#0000002a] hover:shadow-[#00000032]
bg-[#E2E7EE] dark:border-gray-600 cursor-pointer"
onClick={() => {
handleBuild(flow);
}}
>
<button>
<div className="flex gap-3 items-center">
{isBuilding ? (
// Render your loading animation here when isBuilding is true
<Loading strokeWidth={1.5} style={{ color: "white" }} />
) : (
<Zap className="sh-6 w-6 fill-orange-400 stroke-1 stroke-orange-400" />
)}
</div>
</button>
</div>
</div>
</Transition>
);
}

View file

@ -1,15 +1,23 @@
import { Transition } from "@headlessui/react";
import {
Bars3CenterLeftIcon,
ChatBubbleBottomCenterTextIcon,
} from "@heroicons/react/24/outline";
import { nodeColors } from "../../../utils";
import { PopUpContext } from "../../../contexts/popUpContext";
import { useContext } from "react";
import ChatModal from "../../../modals/chatModal";
import { MessagesSquare } from "lucide-react";
import { alertContext } from "../../../contexts/alertContext";
import { useContext } from "react";
export default function ChatTrigger({ open, setOpen, isBuilt }) {
const { setErrorData } = useContext(alertContext);
function handleClick() {
if (isBuilt) {
setOpen(true);
} else {
setErrorData({
title: "Flow not built",
list: ["Please build the flow before chatting"],
});
}
}
export default function ChatTrigger({ open, setOpen }) {
const { openPopUp } = useContext(PopUpContext);
return (
<Transition
show={!open}
@ -23,16 +31,16 @@ export default function ChatTrigger({ open, setOpen }) {
>
<div className="absolute bottom-4 right-3">
<div
className="border flex justify-center align-center py-1 px-3 w-12 h-12 rounded-full bg-gradient-to-r from-blue-500 via-blue-600 to-blue-700 dark:border-gray-600 cursor-pointer"
onClick={() => {
setOpen(true);
}}
className="flex justify-center align-center py-1 px-3 w-12 h-12 rounded-full shadow-md shadow-[#0000002a] hover:shadow-[#00000032]
bg-[#E2E7EE] dark:border-gray-600 cursor-pointer"
onClick={handleClick}
>
<button>
<div className="flex gap-3 items-center">
<ChatBubbleBottomCenterTextIcon
className="h-6 w-6 mt-1"
<div className="flex gap-3">
<MessagesSquare
className="pth-6 w-6 fill-[#5c8be1] stroke-1 stroke-[#5c8be1]"
style={{ color: "white" }}
strokeWidth={1.5}
/>
</div>
</button>

View file

@ -1,18 +1,23 @@
import { useEffect, useRef, useState } from "react";
import { ChatMessageType, ChatType } from "../../types/chat";
import { useNodes } from "reactflow";
import { ChatType } from "../../types/chat";
import ChatTrigger from "./chatTrigger";
import BuildTrigger from "./buildTrigger";
import ChatModal from "../../modals/chatModal";
import _ from "lodash";
import { getBuildStatus } from "../../controllers/API";
import { NodeType } from "../../types/flow";
export default function Chat({ flow }: ChatType) {
const [open, setOpen] = useState(false);
const [isBuilt, setIsBuilt] = useState(false);
useEffect(() => {
const handleKeyDown = (event: KeyboardEvent) => {
if (
(event.key === "K" || event.key === "k") &&
(event.metaKey || event.ctrlKey)
(event.metaKey || event.ctrlKey) &&
isBuilt
) {
event.preventDefault();
setOpen((oldState) => !oldState);
@ -22,11 +27,58 @@ export default function Chat({ flow }: ChatType) {
return () => {
document.removeEventListener("keydown", handleKeyDown);
};
}, []);
}, [isBuilt]);
useEffect(() => {
// Define an async function within the useEffect hook
const fetchBuildStatus = async () => {
const response = await getBuildStatus(flow.id);
setIsBuilt(response.built);
};
// Call the async function
fetchBuildStatus();
}, [flow]);
const prevNodesRef = useRef<any[] | undefined>();
const nodes = useNodes();
useEffect(() => {
const prevNodes = prevNodesRef.current;
const currentNodes = nodes.map(
(node: NodeType) => node.data.node.template.value
);
if (
prevNodes &&
JSON.stringify(prevNodes) !== JSON.stringify(currentNodes)
) {
setIsBuilt(false);
}
prevNodesRef.current = currentNodes;
}, [nodes]);
return (
<>
<ChatModal key={flow.id} flow={flow} open={open} setOpen={setOpen} />
<ChatTrigger open={open} setOpen={setOpen} />
{isBuilt ? (
<div>
<BuildTrigger
open={open}
flow={flow}
setIsBuilt={setIsBuilt}
isBuilt={isBuilt}
/>
<ChatModal key={flow.id} flow={flow} open={open} setOpen={setOpen} />
<ChatTrigger open={open} setOpen={setOpen} isBuilt={isBuilt} />
</div>
) : (
<BuildTrigger
open={open}
flow={flow}
setIsBuilt={setIsBuilt}
isBuilt={isBuilt}
/>
)}
</>
);
}

Some files were not shown because too many files have changed in this diff Show more