Merge remote-tracking branch 'origin/feature/store' into bug/undo-copy

This commit is contained in:
Lucas Oliveira 2023-11-28 10:59:04 -03:00
commit 7ee1ad03f4
101 changed files with 1925 additions and 919 deletions

568
poetry.lock generated
View file

@ -13,87 +13,87 @@ files = [
[[package]]
name = "aiohttp"
version = "3.9.0"
version = "3.9.1"
description = "Async http client/server framework (asyncio)"
optional = false
python-versions = ">=3.8"
files = [
{file = "aiohttp-3.9.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6896b8416be9ada4d22cd359d7cb98955576ce863eadad5596b7cdfbf3e17c6c"},
{file = "aiohttp-3.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1736d87dad8ef46a8ec9cddd349fa9f7bd3a064c47dd6469c0d6763d3d49a4fc"},
{file = "aiohttp-3.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c9e5f4d7208cda1a2bb600e29069eecf857e6980d0ccc922ccf9d1372c16f4b"},
{file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8488519aa05e636c5997719fe543c8daf19f538f4fa044f3ce94bee608817cff"},
{file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ab16c254e2312efeb799bc3c06897f65a133b38b69682bf75d1f1ee1a9c43a9"},
{file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7a94bde005a8f926d0fa38b88092a03dea4b4875a61fbcd9ac6f4351df1b57cd"},
{file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b777c9286b6c6a94f50ddb3a6e730deec327e9e2256cb08b5530db0f7d40fd8"},
{file = "aiohttp-3.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:571760ad7736b34d05597a1fd38cbc7d47f7b65deb722cb8e86fd827404d1f6b"},
{file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:deac0a32aec29608eb25d730f4bc5a261a65b6c48ded1ed861d2a1852577c932"},
{file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4ee1b4152bc3190cc40ddd6a14715e3004944263ea208229ab4c297712aa3075"},
{file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:3607375053df58ed6f23903aa10cf3112b1240e8c799d243bbad0f7be0666986"},
{file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:65b0a70a25456d329a5e1426702dde67be0fb7a4ead718005ba2ca582d023a94"},
{file = "aiohttp-3.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a2eb5311a37fe105aa35f62f75a078537e1a9e4e1d78c86ec9893a3c97d7a30"},
{file = "aiohttp-3.9.0-cp310-cp310-win32.whl", hash = "sha256:2cbc14a13fb6b42d344e4f27746a4b03a2cb0c1c3c5b932b0d6ad8881aa390e3"},
{file = "aiohttp-3.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ac9669990e2016d644ba8ae4758688534aabde8dbbc81f9af129c3f5f01ca9cd"},
{file = "aiohttp-3.9.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f8e05f5163528962ce1d1806fce763ab893b1c5b7ace0a3538cd81a90622f844"},
{file = "aiohttp-3.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4afa8f71dba3a5a2e1e1282a51cba7341ae76585345c43d8f0e624882b622218"},
{file = "aiohttp-3.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f929f4c9b9a00f3e6cc0587abb95ab9c05681f8b14e0fe1daecfa83ea90f8318"},
{file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28185e36a78d247c55e9fbea2332d16aefa14c5276a582ce7a896231c6b1c208"},
{file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a486ddf57ab98b6d19ad36458b9f09e6022de0381674fe00228ca7b741aacb2f"},
{file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70e851f596c00f40a2f00a46126c95c2e04e146015af05a9da3e4867cfc55911"},
{file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5b7bf8fe4d39886adc34311a233a2e01bc10eb4e842220235ed1de57541a896"},
{file = "aiohttp-3.9.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c67a51ea415192c2e53e4e048c78bab82d21955b4281d297f517707dc836bf3d"},
{file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:694df243f394629bcae2d8ed94c589a181e8ba8604159e6e45e7b22e58291113"},
{file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3dd8119752dd30dd7bca7d4bc2a92a59be6a003e4e5c2cf7e248b89751b8f4b7"},
{file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:eb6dfd52063186ac97b4caa25764cdbcdb4b10d97f5c5f66b0fa95052e744eb7"},
{file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:d97c3e286d0ac9af6223bc132dc4bad6540b37c8d6c0a15fe1e70fb34f9ec411"},
{file = "aiohttp-3.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:816f4db40555026e4cdda604a1088577c1fb957d02f3f1292e0221353403f192"},
{file = "aiohttp-3.9.0-cp311-cp311-win32.whl", hash = "sha256:3abf0551874fecf95f93b58f25ef4fc9a250669a2257753f38f8f592db85ddea"},
{file = "aiohttp-3.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:e18d92c3e9e22553a73e33784fcb0ed484c9874e9a3e96c16a8d6a1e74a0217b"},
{file = "aiohttp-3.9.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:99ae01fb13a618b9942376df77a1f50c20a281390dad3c56a6ec2942e266220d"},
{file = "aiohttp-3.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:05857848da443c8c12110d99285d499b4e84d59918a21132e45c3f0804876994"},
{file = "aiohttp-3.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:317719d7f824eba55857fe0729363af58e27c066c731bc62cd97bc9c3d9c7ea4"},
{file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1e3b3c107ccb0e537f309f719994a55621acd2c8fdf6d5ce5152aed788fb940"},
{file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45820ddbb276113ead8d4907a7802adb77548087ff5465d5c554f9aa3928ae7d"},
{file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05a183f1978802588711aed0dea31e697d760ce9055292db9dc1604daa9a8ded"},
{file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51a4cd44788ea0b5e6bb8fa704597af3a30be75503a7ed1098bc5b8ffdf6c982"},
{file = "aiohttp-3.9.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:673343fbc0c1ac44d0d2640addc56e97a052504beacd7ade0dc5e76d3a4c16e8"},
{file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e8a3b79b6d186a9c99761fd4a5e8dd575a48d96021f220ac5b5fa856e5dd029"},
{file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6777a390e41e78e7c45dab43a4a0196c55c3b8c30eebe017b152939372a83253"},
{file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7ae5f99a32c53731c93ac3075abd3e1e5cfbe72fc3eaac4c27c9dd64ba3b19fe"},
{file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:f1e4f254e9c35d8965d377e065c4a8a55d396fe87c8e7e8429bcfdeeb229bfb3"},
{file = "aiohttp-3.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11ca808f9a6b63485059f5f6e164ef7ec826483c1212a44f268b3653c91237d8"},
{file = "aiohttp-3.9.0-cp312-cp312-win32.whl", hash = "sha256:de3cc86f4ea8b4c34a6e43a7306c40c1275e52bfa9748d869c6b7d54aa6dad80"},
{file = "aiohttp-3.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca4fddf84ac7d8a7d0866664936f93318ff01ee33e32381a115b19fb5a4d1202"},
{file = "aiohttp-3.9.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f09960b5bb1017d16c0f9e9f7fc42160a5a49fa1e87a175fd4a2b1a1833ea0af"},
{file = "aiohttp-3.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8303531e2c17b1a494ffaeba48f2da655fe932c4e9a2626c8718403c83e5dd2b"},
{file = "aiohttp-3.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4790e44f46a4aa07b64504089def5744d3b6780468c4ec3a1a36eb7f2cae9814"},
{file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1d7edf74a36de0e5ca50787e83a77cf352f5504eb0ffa3f07000a911ba353fb"},
{file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94697c7293199c2a2551e3e3e18438b4cba293e79c6bc2319f5fd652fccb7456"},
{file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a1b66dbb8a7d5f50e9e2ea3804b01e766308331d0cac76eb30c563ac89c95985"},
{file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9623cfd9e85b76b83ef88519d98326d4731f8d71869867e47a0b979ffec61c73"},
{file = "aiohttp-3.9.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f32c86dc967ab8c719fd229ce71917caad13cc1e8356ee997bf02c5b368799bf"},
{file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f50b4663c3e0262c3a361faf440761fbef60ccdde5fe8545689a4b3a3c149fb4"},
{file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dcf71c55ec853826cd70eadb2b6ac62ec577416442ca1e0a97ad875a1b3a0305"},
{file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:42fe4fd9f0dfcc7be4248c162d8056f1d51a04c60e53366b0098d1267c4c9da8"},
{file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76a86a9989ebf82ee61e06e2bab408aec4ea367dc6da35145c3352b60a112d11"},
{file = "aiohttp-3.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f9e09a1c83521d770d170b3801eea19b89f41ccaa61d53026ed111cb6f088887"},
{file = "aiohttp-3.9.0-cp38-cp38-win32.whl", hash = "sha256:a00ce44c21612d185c5275c5cba4bab8d7c1590f248638b667ed8a782fa8cd6f"},
{file = "aiohttp-3.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:d5b9345ab92ebe6003ae11d8092ce822a0242146e6fa270889b9ba965457ca40"},
{file = "aiohttp-3.9.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98d21092bf2637c5fa724a428a69e8f5955f2182bff61f8036827cf6ce1157bf"},
{file = "aiohttp-3.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:35a68cd63ca6aaef5707888f17a70c36efe62b099a4e853d33dc2e9872125be8"},
{file = "aiohttp-3.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3d7f6235c7475658acfc1769d968e07ab585c79f6ca438ddfecaa9a08006aee2"},
{file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db04d1de548f7a62d1dd7e7cdf7c22893ee168e22701895067a28a8ed51b3735"},
{file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:536b01513d67d10baf6f71c72decdf492fb7433c5f2f133e9a9087379d4b6f31"},
{file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c8b0a6487e8109427ccf638580865b54e2e3db4a6e0e11c02639231b41fc0f"},
{file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7276fe0017664414fdc3618fca411630405f1aaf0cc3be69def650eb50441787"},
{file = "aiohttp-3.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23170247ef89ffa842a02bbfdc425028574d9e010611659abeb24d890bc53bb8"},
{file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b1a2ea8252cacc7fd51df5a56d7a2bb1986ed39be9397b51a08015727dfb69bd"},
{file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2d71abc15ff7047412ef26bf812dfc8d0d1020d664617f4913df2df469f26b76"},
{file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:2d820162c8c2bdbe97d328cd4f417c955ca370027dce593345e437b2e9ffdc4d"},
{file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:2779f5e7c70f7b421915fd47db332c81de365678180a9f3ab404088f87ba5ff9"},
{file = "aiohttp-3.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:366bc870d7ac61726f32a489fbe3d1d8876e87506870be66b01aeb84389e967e"},
{file = "aiohttp-3.9.0-cp39-cp39-win32.whl", hash = "sha256:1df43596b826022b14998f0460926ce261544fedefe0d2f653e1b20f49e96454"},
{file = "aiohttp-3.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:9c196b30f1b1aa3363a69dd69079ae9bec96c2965c4707eaa6914ba099fb7d4f"},
{file = "aiohttp-3.9.0.tar.gz", hash = "sha256:09f23292d29135025e19e8ff4f0a68df078fe4ee013bca0105b2e803989de92d"},
{file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1f80197f8b0b846a8d5cf7b7ec6084493950d0882cc5537fb7b96a69e3c8590"},
{file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72444d17777865734aa1a4d167794c34b63e5883abb90356a0364a28904e6c0"},
{file = "aiohttp-3.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b05d5cbe9dafcdc733262c3a99ccf63d2f7ce02543620d2bd8db4d4f7a22f83"},
{file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c4fa235d534b3547184831c624c0b7c1e262cd1de847d95085ec94c16fddcd5"},
{file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:289ba9ae8e88d0ba16062ecf02dd730b34186ea3b1e7489046fc338bdc3361c4"},
{file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bff7e2811814fa2271be95ab6e84c9436d027a0e59665de60edf44e529a42c1f"},
{file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81b77f868814346662c96ab36b875d7814ebf82340d3284a31681085c051320f"},
{file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b9c7426923bb7bd66d409da46c41e3fb40f5caf679da624439b9eba92043fa6"},
{file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8d44e7bf06b0c0a70a20f9100af9fcfd7f6d9d3913e37754c12d424179b4e48f"},
{file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22698f01ff5653fe66d16ffb7658f582a0ac084d7da1323e39fd9eab326a1f26"},
{file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ca7ca5abfbfe8d39e653870fbe8d7710be7a857f8a8386fc9de1aae2e02ce7e4"},
{file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8d7f98fde213f74561be1d6d3fa353656197f75d4edfbb3d94c9eb9b0fc47f5d"},
{file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5216b6082c624b55cfe79af5d538e499cd5f5b976820eac31951fb4325974501"},
{file = "aiohttp-3.9.1-cp310-cp310-win32.whl", hash = "sha256:0e7ba7ff228c0d9a2cd66194e90f2bca6e0abca810b786901a569c0de082f489"},
{file = "aiohttp-3.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:c7e939f1ae428a86e4abbb9a7c4732bf4706048818dfd979e5e2839ce0159f23"},
{file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:df9cf74b9bc03d586fc53ba470828d7b77ce51b0582d1d0b5b2fb673c0baa32d"},
{file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ecca113f19d5e74048c001934045a2b9368d77b0b17691d905af18bd1c21275e"},
{file = "aiohttp-3.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8cef8710fb849d97c533f259103f09bac167a008d7131d7b2b0e3a33269185c0"},
{file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea94403a21eb94c93386d559bce297381609153e418a3ffc7d6bf772f59cc35"},
{file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91c742ca59045dce7ba76cab6e223e41d2c70d79e82c284a96411f8645e2afff"},
{file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c93b7c2e52061f0925c3382d5cb8980e40f91c989563d3d32ca280069fd6a87"},
{file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee2527134f95e106cc1653e9ac78846f3a2ec1004cf20ef4e02038035a74544d"},
{file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11ff168d752cb41e8492817e10fb4f85828f6a0142b9726a30c27c35a1835f01"},
{file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b8c3a67eb87394386847d188996920f33b01b32155f0a94f36ca0e0c635bf3e3"},
{file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c7b5d5d64e2a14e35a9240b33b89389e0035e6de8dbb7ffa50d10d8b65c57449"},
{file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:69985d50a2b6f709412d944ffb2e97d0be154ea90600b7a921f95a87d6f108a2"},
{file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:c9110c06eaaac7e1f5562caf481f18ccf8f6fdf4c3323feab28a93d34cc646bd"},
{file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737e69d193dac7296365a6dcb73bbbf53bb760ab25a3727716bbd42022e8d7a"},
{file = "aiohttp-3.9.1-cp311-cp311-win32.whl", hash = "sha256:4ee8caa925aebc1e64e98432d78ea8de67b2272252b0a931d2ac3bd876ad5544"},
{file = "aiohttp-3.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:a34086c5cc285be878622e0a6ab897a986a6e8bf5b67ecb377015f06ed316587"},
{file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f800164276eec54e0af5c99feb9494c295118fc10a11b997bbb1348ba1a52065"},
{file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:500f1c59906cd142d452074f3811614be04819a38ae2b3239a48b82649c08821"},
{file = "aiohttp-3.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0b0a6a36ed7e164c6df1e18ee47afbd1990ce47cb428739d6c99aaabfaf1b3af"},
{file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69da0f3ed3496808e8cbc5123a866c41c12c15baaaead96d256477edf168eb57"},
{file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176df045597e674fa950bf5ae536be85699e04cea68fa3a616cf75e413737eb5"},
{file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b796b44111f0cab6bbf66214186e44734b5baab949cb5fb56154142a92989aeb"},
{file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f27fdaadce22f2ef950fc10dcdf8048407c3b42b73779e48a4e76b3c35bca26c"},
{file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcb6532b9814ea7c5a6a3299747c49de30e84472fa72821b07f5a9818bce0f66"},
{file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:54631fb69a6e44b2ba522f7c22a6fb2667a02fd97d636048478db2fd8c4e98fe"},
{file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4b4c452d0190c5a820d3f5c0f3cd8a28ace48c54053e24da9d6041bf81113183"},
{file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:cae4c0c2ca800c793cae07ef3d40794625471040a87e1ba392039639ad61ab5b"},
{file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:565760d6812b8d78d416c3c7cfdf5362fbe0d0d25b82fed75d0d29e18d7fc30f"},
{file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54311eb54f3a0c45efb9ed0d0a8f43d1bc6060d773f6973efd90037a51cd0a3f"},
{file = "aiohttp-3.9.1-cp312-cp312-win32.whl", hash = "sha256:85c3e3c9cb1d480e0b9a64c658cd66b3cfb8e721636ab8b0e746e2d79a7a9eed"},
{file = "aiohttp-3.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:11cb254e397a82efb1805d12561e80124928e04e9c4483587ce7390b3866d213"},
{file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8a22a34bc594d9d24621091d1b91511001a7eea91d6652ea495ce06e27381f70"},
{file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:598db66eaf2e04aa0c8900a63b0101fdc5e6b8a7ddd805c56d86efb54eb66672"},
{file = "aiohttp-3.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c9376e2b09895c8ca8b95362283365eb5c03bdc8428ade80a864160605715f1"},
{file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41473de252e1797c2d2293804e389a6d6986ef37cbb4a25208de537ae32141dd"},
{file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c5857612c9813796960c00767645cb5da815af16dafb32d70c72a8390bbf690"},
{file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffcd828e37dc219a72c9012ec44ad2e7e3066bec6ff3aaa19e7d435dbf4032ca"},
{file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:219a16763dc0294842188ac8a12262b5671817042b35d45e44fd0a697d8c8361"},
{file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f694dc8a6a3112059258a725a4ebe9acac5fe62f11c77ac4dcf896edfa78ca28"},
{file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bcc0ea8d5b74a41b621ad4a13d96c36079c81628ccc0b30cfb1603e3dfa3a014"},
{file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:90ec72d231169b4b8d6085be13023ece8fa9b1bb495e4398d847e25218e0f431"},
{file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:cf2a0ac0615842b849f40c4d7f304986a242f1e68286dbf3bd7a835e4f83acfd"},
{file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:0e49b08eafa4f5707ecfb321ab9592717a319e37938e301d462f79b4e860c32a"},
{file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2c59e0076ea31c08553e868cec02d22191c086f00b44610f8ab7363a11a5d9d8"},
{file = "aiohttp-3.9.1-cp38-cp38-win32.whl", hash = "sha256:4831df72b053b1eed31eb00a2e1aff6896fb4485301d4ccb208cac264b648db4"},
{file = "aiohttp-3.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:3135713c5562731ee18f58d3ad1bf41e1d8883eb68b363f2ffde5b2ea4b84cc7"},
{file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cfeadf42840c1e870dc2042a232a8748e75a36b52d78968cda6736de55582766"},
{file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70907533db712f7aa791effb38efa96f044ce3d4e850e2d7691abd759f4f0ae0"},
{file = "aiohttp-3.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cdefe289681507187e375a5064c7599f52c40343a8701761c802c1853a504558"},
{file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7481f581251bb5558ba9f635db70908819caa221fc79ee52a7f58392778c636"},
{file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49f0c1b3c2842556e5de35f122fc0f0b721334ceb6e78c3719693364d4af8499"},
{file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d406b01a9f5a7e232d1b0d161b40c05275ffbcbd772dc18c1d5a570961a1ca4"},
{file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d8e4450e7fe24d86e86b23cc209e0023177b6d59502e33807b732d2deb6975f"},
{file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c0266cd6f005e99f3f51e583012de2778e65af6b73860038b968a0a8888487a"},
{file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab221850108a4a063c5b8a70f00dd7a1975e5a1713f87f4ab26a46e5feac5a0e"},
{file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c88a15f272a0ad3d7773cf3a37cc7b7d077cbfc8e331675cf1346e849d97a4e5"},
{file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:237533179d9747080bcaad4d02083ce295c0d2eab3e9e8ce103411a4312991a0"},
{file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:02ab6006ec3c3463b528374c4cdce86434e7b89ad355e7bf29e2f16b46c7dd6f"},
{file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04fa38875e53eb7e354ece1607b1d2fdee2d175ea4e4d745f6ec9f751fe20c7c"},
{file = "aiohttp-3.9.1-cp39-cp39-win32.whl", hash = "sha256:82eefaf1a996060602f3cc1112d93ba8b201dbf5d8fd9611227de2003dddb3b7"},
{file = "aiohttp-3.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:9b05d33ff8e6b269e30a7957bd3244ffbce2a7a35a81b81c382629b80af1a8bf"},
{file = "aiohttp-3.9.1.tar.gz", hash = "sha256:8fc49a87ac269d4529da45871e2ffb6874e87779c3d0e2ccd813c0899221239d"},
]
[package.dependencies]
@ -411,32 +411,32 @@ files = [
[[package]]
name = "boto3"
version = "1.29.6"
version = "1.29.7"
description = "The AWS SDK for Python"
optional = false
python-versions = ">= 3.7"
files = [
{file = "boto3-1.29.6-py3-none-any.whl", hash = "sha256:f4d19e01d176c3a5a05e4af733185ff1891b08a3c38d4a439800fa132aa6e9be"},
{file = "boto3-1.29.6.tar.gz", hash = "sha256:d1d0d979a70bf9b0b13ae3b017f8523708ad953f62d16f39a602d67ee9b25554"},
{file = "boto3-1.29.7-py3-none-any.whl", hash = "sha256:96e9890ebe7cd823b5f4976dd676e112c000c6528c28e20a2f274590589dd18b"},
{file = "boto3-1.29.7.tar.gz", hash = "sha256:1eb4c548118b5fc5e018dee956fd33e6fb249cd1f2def85f1bba816aef4d9f3e"},
]
[package.dependencies]
botocore = ">=1.32.6,<1.33.0"
botocore = ">=1.32.7,<1.33.0"
jmespath = ">=0.7.1,<2.0.0"
s3transfer = ">=0.7.0,<0.8.0"
s3transfer = ">=0.8.0,<0.9.0"
[package.extras]
crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
version = "1.32.6"
version = "1.32.7"
description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">= 3.7"
files = [
{file = "botocore-1.32.6-py3-none-any.whl", hash = "sha256:4454f967a4d1a01e3e6205c070455bc4e8fd53b5b0753221581ae679c55a9dfd"},
{file = "botocore-1.32.6.tar.gz", hash = "sha256:ecec876103783b5efe6099762dda60c2af67e45f7c0ab4568e8265d11c6c449b"},
{file = "botocore-1.32.7-py3-none-any.whl", hash = "sha256:58b33d02cafa23461c8a9d211b30e8cded992380a84de409379fd02811fa3e11"},
{file = "botocore-1.32.7.tar.gz", hash = "sha256:c6795c731b04c8e3635588c44cfd1a4462fc5987859195522c96812cf3eceff9"},
]
[package.dependencies]
@ -448,7 +448,7 @@ urllib3 = [
]
[package.extras]
crt = ["awscrt (==0.19.12)"]
crt = ["awscrt (==0.19.17)"]
[[package]]
name = "brotli"
@ -1275,7 +1275,7 @@ graph = ["objgraph (>=1.7.2)"]
name = "diskcache"
version = "5.6.3"
description = "Disk Cache -- Disk and file backed persistent cache."
optional = true
optional = false
python-versions = ">=3"
files = [
{file = "diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19"},
@ -1565,13 +1565,13 @@ files = [
[[package]]
name = "fake-useragent"
version = "1.3.0"
version = "1.4.0"
description = "Up-to-date simple useragent faker with real world database"
optional = false
python-versions = "*"
files = [
{file = "fake-useragent-1.3.0.tar.gz", hash = "sha256:0b3a223b4c03e3df46b0e9ff53ad26cf4690f68871396b9c59a7fa6ee830c395"},
{file = "fake_useragent-1.3.0-py3-none-any.whl", hash = "sha256:73cee1d10bcd1deb25a15e916f6674c537d2d9088ecb4d7af98c2619f83827d1"},
{file = "fake-useragent-1.4.0.tar.gz", hash = "sha256:5426e4015d8ccc5bb25f64d3dfcfd3915eba30ffebd31b86b60dc7a4c5d65528"},
{file = "fake_useragent-1.4.0-py3-none-any.whl", hash = "sha256:9acce439ee2c6cf9c3772fa6c200f62dc8d56605063327a4d8c5d0e47f414b85"},
]
[package.dependencies]
@ -1664,6 +1664,43 @@ files = [
{file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"},
]
[[package]]
name = "flaml"
version = "2.1.1"
description = "A fast library for automated machine learning and tuning"
optional = false
python-versions = ">=3.6"
files = [
{file = "FLAML-2.1.1-py3-none-any.whl", hash = "sha256:ba34f1a06f3cbc6bb23a2ea4830a264375f6bba497f402122a73e42647a15535"},
{file = "FLAML-2.1.1.tar.gz", hash = "sha256:53e94aacc996da80fe779bc6833d3b25c80c77fe11667d0912798e49293282eb"},
]
[package.dependencies]
NumPy = ">=1.17.0rc1"
[package.extras]
autogen = ["diskcache", "openai (==0.27.8)", "termcolor"]
automl = ["lightgbm (>=2.3.1)", "pandas (>=1.1.4)", "scikit-learn (>=0.24)", "scipy (>=1.4.1)", "xgboost (>=0.90)"]
autozero = ["packaging", "pandas", "scikit-learn"]
azureml = ["azureml-mlflow"]
benchmark = ["catboost (>=0.26)", "pandas (==1.1.4)", "psutil (==5.8.0)", "xgboost (==1.3.3)"]
blendsearch = ["optuna (==2.8.0)", "packaging"]
catboost = ["catboost (>=0.26)"]
forecast = ["hcrystalball (==0.1.10)", "holidays (<0.14)", "prophet (>=1.0.1)", "pytorch-forecasting (>=0.9.0)", "pytorch-lightning (==1.9.0)", "statsmodels (>=0.12.2)", "tensorboardX (==2.6)"]
hf = ["datasets", "nltk", "rouge-score", "seqeval", "transformers[torch] (==4.26)"]
mathchat = ["diskcache", "openai (==0.27.8)", "pydantic (==1.10.9)", "sympy", "termcolor", "wolframalpha"]
nlp = ["datasets", "nltk", "rouge-score", "seqeval", "transformers[torch] (==4.26)"]
nni = ["nni"]
notebook = ["jupyter"]
openai = ["diskcache", "openai (==0.27.8)"]
ray = ["ray[tune] (>=1.13,<2.0)"]
retrievechat = ["chromadb", "diskcache", "openai (==0.27.8)", "sentence-transformers", "termcolor", "tiktoken"]
spark = ["joblib (<1.3.0)", "joblibspark (>=0.5.0)", "pyspark (>=3.2.0)"]
synapse = ["joblib (<1.3.0)", "joblibspark (>=0.5.0)", "optuna (==2.8.0)", "pyspark (>=3.2.0)"]
test = ["catboost (>=0.26,<1.2)", "coverage (>=5.3)", "dataclasses", "datasets", "hcrystalball (==0.1.10)", "ipykernel", "joblib (<1.3.0)", "joblibspark (>=0.5.0)", "lightgbm (>=2.3.1)", "mlflow", "nbconvert", "nbformat", "nltk", "openml", "optuna (==2.8.0)", "packaging", "pandas (>=1.1.4)", "pre-commit", "psutil (==5.8.0)", "pydantic (==1.10.9)", "pyspark (>=3.2.0)", "pytest (>=6.1.1)", "pytorch-forecasting (>=0.9.0,<=0.10.1)", "pytorch-lightning (<1.9.1)", "requests (<2.29.0)", "rgf-python", "rouge-score", "scikit-learn (>=0.24)", "scipy (>=1.4.1)", "seqeval", "statsmodels (>=0.12.2)", "sympy", "tensorboardX (==2.6)", "thop", "torch", "torchvision", "transformers[torch] (==4.26)", "wolframalpha", "xgboost (>=0.90)"]
ts-forecast = ["hcrystalball (==0.1.10)", "holidays (<0.14)", "prophet (>=1.0.1)", "statsmodels (>=0.12.2)"]
vw = ["scikit-learn", "vowpalwabbit (>=8.10.0,<9.0.0)"]
[[package]]
name = "flask"
version = "3.0.0"
@ -2888,13 +2925,13 @@ files = [
[[package]]
name = "idna"
version = "3.4"
version = "3.6"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.5"
files = [
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
{file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"},
{file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"},
]
[[package]]
@ -2965,13 +3002,13 @@ files = [
[[package]]
name = "ipykernel"
version = "6.27.0"
version = "6.26.0"
description = "IPython Kernel for Jupyter"
optional = false
python-versions = ">=3.8"
files = [
{file = "ipykernel-6.27.0-py3-none-any.whl", hash = "sha256:4388caa3c2cba0a381e20d289545e88a8aef1fe57a884d4c018718ec8c23c121"},
{file = "ipykernel-6.27.0.tar.gz", hash = "sha256:7f4986f606581be73bfb32dc7a1ac9fa0e804c9be50ddf1c7a119413e982693f"},
{file = "ipykernel-6.26.0-py3-none-any.whl", hash = "sha256:3ba3dc97424b87b31bb46586b5167b3161b32d7820b9201a9e698c71e271602c"},
{file = "ipykernel-6.26.0.tar.gz", hash = "sha256:553856658eb8430bbe9653ea041a41bff63e9606fc4628873fc92a6cf3abd404"},
]
[package.dependencies]
@ -2998,24 +3035,23 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio"
[[package]]
name = "ipython"
version = "8.17.2"
version = "8.18.1"
description = "IPython: Productive Interactive Computing"
optional = false
python-versions = ">=3.9"
files = [
{file = "ipython-8.17.2-py3-none-any.whl", hash = "sha256:1e4d1d666a023e3c93585ba0d8e962867f7a111af322efff6b9c58062b3e5444"},
{file = "ipython-8.17.2.tar.gz", hash = "sha256:126bb57e1895594bb0d91ea3090bbd39384f6fe87c3d57fd558d0670f50339bb"},
{file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"},
{file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"},
]
[package.dependencies]
appnope = {version = "*", markers = "sys_platform == \"darwin\""}
colorama = {version = "*", markers = "sys_platform == \"win32\""}
decorator = "*"
exceptiongroup = {version = "*", markers = "python_version < \"3.11\""}
jedi = ">=0.16"
matplotlib-inline = "*"
pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""}
prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0"
prompt-toolkit = ">=3.0.41,<3.1.0"
pygments = ">=2.4.0"
stack-data = "*"
traitlets = ">=5"
@ -3559,13 +3595,13 @@ six = "*"
[[package]]
name = "langfuse"
version = "1.7.5"
version = "1.9.2"
description = "A client library for accessing langfuse"
optional = false
python-versions = ">=3.8.1,<4.0"
files = [
{file = "langfuse-1.7.5-py3-none-any.whl", hash = "sha256:ebbcc52f454a9c7cfc9f382e66fddafddb0219f9233598317bbcb66c215b39b6"},
{file = "langfuse-1.7.5.tar.gz", hash = "sha256:99fc5a30b157a16cc3dcb82e84af13fabc2fd0d192be32ef2ad6d9a7fe27d130"},
{file = "langfuse-1.9.2-py3-none-any.whl", hash = "sha256:0a171830ec15b26e6b512db4612422a48402d422c1269091390d0b81fbdfe46b"},
{file = "langfuse-1.9.2.tar.gz", hash = "sha256:6542d38013e9ca21ab3d31840bdd517a79cb6db72ddb30fec8ad0d999d0e2bfd"},
]
[package.dependencies]
@ -4169,38 +4205,38 @@ dill = ">=0.3.7"
[[package]]
name = "mypy"
version = "1.7.0"
version = "1.7.1"
description = "Optional static typing for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "mypy-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5da84d7bf257fd8f66b4f759a904fd2c5a765f70d8b52dde62b521972a0a2357"},
{file = "mypy-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a3637c03f4025f6405737570d6cbfa4f1400eb3c649317634d273687a09ffc2f"},
{file = "mypy-1.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b633f188fc5ae1b6edca39dae566974d7ef4e9aaaae00bc36efe1f855e5173ac"},
{file = "mypy-1.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d6ed9a3997b90c6f891138e3f83fb8f475c74db4ccaa942a1c7bf99e83a989a1"},
{file = "mypy-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:1fe46e96ae319df21359c8db77e1aecac8e5949da4773c0274c0ef3d8d1268a9"},
{file = "mypy-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:df67fbeb666ee8828f675fee724cc2cbd2e4828cc3df56703e02fe6a421b7401"},
{file = "mypy-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a79cdc12a02eb526d808a32a934c6fe6df07b05f3573d210e41808020aed8b5d"},
{file = "mypy-1.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f65f385a6f43211effe8c682e8ec3f55d79391f70a201575def73d08db68ead1"},
{file = "mypy-1.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e81ffd120ee24959b449b647c4b2fbfcf8acf3465e082b8d58fd6c4c2b27e46"},
{file = "mypy-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:f29386804c3577c83d76520abf18cfcd7d68264c7e431c5907d250ab502658ee"},
{file = "mypy-1.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:87c076c174e2c7ef8ab416c4e252d94c08cd4980a10967754f91571070bf5fbe"},
{file = "mypy-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6cb8d5f6d0fcd9e708bb190b224089e45902cacef6f6915481806b0c77f7786d"},
{file = "mypy-1.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93e76c2256aa50d9c82a88e2f569232e9862c9982095f6d54e13509f01222fc"},
{file = "mypy-1.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cddee95dea7990e2215576fae95f6b78a8c12f4c089d7e4367564704e99118d3"},
{file = "mypy-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:d01921dbd691c4061a3e2ecdbfbfad029410c5c2b1ee88946bf45c62c6c91210"},
{file = "mypy-1.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:185cff9b9a7fec1f9f7d8352dff8a4c713b2e3eea9c6c4b5ff7f0edf46b91e41"},
{file = "mypy-1.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7a7b1e399c47b18feb6f8ad4a3eef3813e28c1e871ea7d4ea5d444b2ac03c418"},
{file = "mypy-1.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc9fe455ad58a20ec68599139ed1113b21f977b536a91b42bef3ffed5cce7391"},
{file = "mypy-1.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d0fa29919d2e720c8dbaf07d5578f93d7b313c3e9954c8ec05b6d83da592e5d9"},
{file = "mypy-1.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b53655a295c1ed1af9e96b462a736bf083adba7b314ae775563e3fb4e6795f5"},
{file = "mypy-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c1b06b4b109e342f7dccc9efda965fc3970a604db70f8560ddfdee7ef19afb05"},
{file = "mypy-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bf7a2f0a6907f231d5e41adba1a82d7d88cf1f61a70335889412dec99feeb0f8"},
{file = "mypy-1.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:551d4a0cdcbd1d2cccdcc7cb516bb4ae888794929f5b040bb51aae1846062901"},
{file = "mypy-1.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:55d28d7963bef00c330cb6461db80b0b72afe2f3c4e2963c99517cf06454e665"},
{file = "mypy-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:870bd1ffc8a5862e593185a4c169804f2744112b4a7c55b93eb50f48e7a77010"},
{file = "mypy-1.7.0-py3-none-any.whl", hash = "sha256:96650d9a4c651bc2a4991cf46f100973f656d69edc7faf91844e87fe627f7e96"},
{file = "mypy-1.7.0.tar.gz", hash = "sha256:1e280b5697202efa698372d2f39e9a6713a0395a756b1c6bd48995f8d72690dc"},
{file = "mypy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12cce78e329838d70a204293e7b29af9faa3ab14899aec397798a4b41be7f340"},
{file = "mypy-1.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1484b8fa2c10adf4474f016e09d7a159602f3239075c7bf9f1627f5acf40ad49"},
{file = "mypy-1.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31902408f4bf54108bbfb2e35369877c01c95adc6192958684473658c322c8a5"},
{file = "mypy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f2c2521a8e4d6d769e3234350ba7b65ff5d527137cdcde13ff4d99114b0c8e7d"},
{file = "mypy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:fcd2572dd4519e8a6642b733cd3a8cfc1ef94bafd0c1ceed9c94fe736cb65b6a"},
{file = "mypy-1.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b901927f16224d0d143b925ce9a4e6b3a758010673eeded9b748f250cf4e8f7"},
{file = "mypy-1.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7f6985d05a4e3ce8255396df363046c28bea790e40617654e91ed580ca7c51"},
{file = "mypy-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:944bdc21ebd620eafefc090cdf83158393ec2b1391578359776c00de00e8907a"},
{file = "mypy-1.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9c7ac372232c928fff0645d85f273a726970c014749b924ce5710d7d89763a28"},
{file = "mypy-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:f6efc9bd72258f89a3816e3a98c09d36f079c223aa345c659622f056b760ab42"},
{file = "mypy-1.7.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6dbdec441c60699288adf051f51a5d512b0d818526d1dcfff5a41f8cd8b4aaf1"},
{file = "mypy-1.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4fc3d14ee80cd22367caaaf6e014494415bf440980a3045bf5045b525680ac33"},
{file = "mypy-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c6e4464ed5f01dc44dc9821caf67b60a4e5c3b04278286a85c067010653a0eb"},
{file = "mypy-1.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d9b338c19fa2412f76e17525c1b4f2c687a55b156320acb588df79f2e6fa9fea"},
{file = "mypy-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:204e0d6de5fd2317394a4eff62065614c4892d5a4d1a7ee55b765d7a3d9e3f82"},
{file = "mypy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:84860e06ba363d9c0eeabd45ac0fde4b903ad7aa4f93cd8b648385a888e23200"},
{file = "mypy-1.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8c5091ebd294f7628eb25ea554852a52058ac81472c921150e3a61cdd68f75a7"},
{file = "mypy-1.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40716d1f821b89838589e5b3106ebbc23636ffdef5abc31f7cd0266db936067e"},
{file = "mypy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cf3f0c5ac72139797953bd50bc6c95ac13075e62dbfcc923571180bebb662e9"},
{file = "mypy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:78e25b2fd6cbb55ddfb8058417df193f0129cad5f4ee75d1502248e588d9e0d7"},
{file = "mypy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75c4d2a6effd015786c87774e04331b6da863fc3fc4e8adfc3b40aa55ab516fe"},
{file = "mypy-1.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2643d145af5292ee956aa0a83c2ce1038a3bdb26e033dadeb2f7066fb0c9abce"},
{file = "mypy-1.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75aa828610b67462ffe3057d4d8a4112105ed211596b750b53cbfe182f44777a"},
{file = "mypy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ee5d62d28b854eb61889cde4e1dbc10fbaa5560cb39780c3995f6737f7e82120"},
{file = "mypy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:72cf32ce7dd3562373f78bd751f73c96cfb441de147cc2448a92c1a308bd0ca6"},
{file = "mypy-1.7.1-py3-none-any.whl", hash = "sha256:f7c5d642db47376a0cc130f0de6d055056e010debdaf0707cd2b0fc7e7ef30ea"},
{file = "mypy-1.7.1.tar.gz", hash = "sha256:fcb6d9afb1b6208b4c712af0dafdc650f518836065df0d4fb1d800f5d6773db2"},
]
[package.dependencies]
@ -5092,13 +5128,13 @@ win-unicode-console = {version = "*", markers = "platform_system == \"Windows\"
[[package]]
name = "pexpect"
version = "4.8.0"
version = "4.9.0"
description = "Pexpect allows easy control of interactive console applications."
optional = false
python-versions = "*"
files = [
{file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"},
{file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"},
{file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"},
{file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"},
]
[package.dependencies]
@ -5106,12 +5142,12 @@ ptyprocess = ">=0.5"
[[package]]
name = "pgvector"
version = "0.2.3"
version = "0.2.4"
description = "pgvector support for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "pgvector-0.2.3-py2.py3-none-any.whl", hash = "sha256:9d53dc01138ecc7c9aca64e4680cfa9edf4c38f9cb8ed7098317871fdd211824"},
{file = "pgvector-0.2.4-py2.py3-none-any.whl", hash = "sha256:548e1f88d3c7433020c1c177feddad2f36915c262852d621f9018fcafff6870b"},
]
[package.dependencies]
@ -5733,6 +5769,34 @@ files = [
[package.dependencies]
pyasn1 = ">=0.4.6,<0.6.0"
[[package]]
name = "pyautogen"
version = "0.2.0"
description = "Enabling Next-Gen LLM Applications via Multi-Agent Conversation Framework"
optional = false
python-versions = ">=3.8, <3.12"
files = [
{file = "pyautogen-0.2.0-py3-none-any.whl", hash = "sha256:d7bf4d239f85152e191026d8173f649e256c431cf31b93ca3629cd2f0c525a46"},
{file = "pyautogen-0.2.0.tar.gz", hash = "sha256:858f2d15eaa68f043f7b67b975a6d27f738c98ca4d7e0e96b400061c0ac3e692"},
]
[package.dependencies]
diskcache = "*"
flaml = "*"
openai = ">=1.2,<2.0"
python-dotenv = "*"
termcolor = "*"
tiktoken = "*"
[package.extras]
blendsearch = ["flaml[blendsearch]"]
graphs = ["matplotlib (>=3.8.1,<3.9.0)", "networkx (>=3.2.1,<3.3.0)"]
lmm = ["pillow", "replicate"]
mathchat = ["pydantic (==1.10.9)", "sympy", "wolframalpha"]
retrievechat = ["chromadb", "ipython", "pypdf", "sentence-transformers"]
teachable = ["chromadb"]
test = ["coverage (>=5.3)", "ipykernel", "nbconvert", "nbformat", "pre-commit", "pytest (>=6.1.1)", "pytest-asyncio"]
[[package]]
name = "pycparser"
version = "2.21"
@ -7025,126 +7089,126 @@ files = [
[[package]]
name = "s3transfer"
version = "0.7.0"
version = "0.8.0"
description = "An Amazon S3 Transfer Manager"
optional = false
python-versions = ">= 3.7"
files = [
{file = "s3transfer-0.7.0-py3-none-any.whl", hash = "sha256:10d6923c6359175f264811ef4bf6161a3156ce8e350e705396a7557d6293c33a"},
{file = "s3transfer-0.7.0.tar.gz", hash = "sha256:fd3889a66f5fe17299fe75b82eae6cf722554edca744ca5d5fe308b104883d2e"},
{file = "s3transfer-0.8.0-py3-none-any.whl", hash = "sha256:baa479dc2e63e5c2ed51611b4d46cdf0295e2070d8d0b86b22f335ee5b954986"},
{file = "s3transfer-0.8.0.tar.gz", hash = "sha256:e8d6bd52ffd99841e3a57b34370a54841f12d3aab072af862cdcc50955288002"},
]
[package.dependencies]
botocore = ">=1.12.36,<2.0a.0"
botocore = ">=1.32.7,<2.0a.0"
[package.extras]
crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"]
crt = ["botocore[crt] (>=1.32.7,<2.0a.0)"]
[[package]]
name = "safetensors"
version = "0.4.0"
version = "0.4.1"
description = ""
optional = true
python-versions = ">=3.7"
files = [
{file = "safetensors-0.4.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:2289ae6dbe6d027ecee016b28ced13a2e21a0b3a3a757a23033a2d1c0b1bad55"},
{file = "safetensors-0.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bf6458959f310f551cbbeef2255527ade5f783f952738e73e4d0136198cc3bfe"},
{file = "safetensors-0.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6b60a58a8f7cc7aed3b5b73dce1f5259a53c83d9ba43a76a874e6ad868c1b4d"},
{file = "safetensors-0.4.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:491b3477e4d0d4599bb75d79da4b75af2e6ed9b1f6ec2b715991f0bc927bf09a"},
{file = "safetensors-0.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d2e10b7e0cd18bb73ed7c17c624a5957b003b81345e18159591771c26ee428"},
{file = "safetensors-0.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f667a4c12fb593f5f66ce966cb1b14a7148898b2b1a7f79e0761040ae1e3c51"},
{file = "safetensors-0.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f9909512bcb6f712bdd04c296cdfb0d8ff73d258ffc5af884bb62ea02d221e0"},
{file = "safetensors-0.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d33d29e846821f0e4f92614022949b09ccf063cb36fe2f9fe099cde1efbfbb87"},
{file = "safetensors-0.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4d512525a8e05a045ce6698066ba0c5378c174a83e0b3720a8c7799dc1bb06f3"},
{file = "safetensors-0.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0219cea445177f6ad1f9acd3a8d025440c8ff436d70a4a7c7ba9c36066aa9474"},
{file = "safetensors-0.4.0-cp310-none-win32.whl", hash = "sha256:67ab171eeaad6972d3971c53d29d53353c67f6743284c6d637b59fa3e54c8a94"},
{file = "safetensors-0.4.0-cp310-none-win_amd64.whl", hash = "sha256:7ffc736039f08a9ca1f09816a7481b8e4469c06e8f8a5ffa8cb67ddd79e6d77f"},
{file = "safetensors-0.4.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4fe9e3737b30de458225a23926219ca30b902ee779b6a3df96eaab2b6d625ec2"},
{file = "safetensors-0.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7916e814a90008de767b1c164a1d83803693c661ffe9af5a697b22e2752edb0"},
{file = "safetensors-0.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbc4a4da01143472323c145f3c289e5f6fabde0ac0a3414dabf912a21692fff4"},
{file = "safetensors-0.4.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a54c21654a47669b38e359e8f852af754b786c9da884bb61ad5e9af12bd71ccb"},
{file = "safetensors-0.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:25cd407955bad5340ba17f9f8ac789a0d751601a311e2f7b2733f9384478c95e"},
{file = "safetensors-0.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82e8fc4e3503cd738fd40718a430fe0e5ce6e7ff91a73d6ce628bbb89c41e8ce"},
{file = "safetensors-0.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48b92059b1a4ad163024d4f526e0e73ebe2bb3ae70537e15e347820b4de5dc27"},
{file = "safetensors-0.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5daa05058f7dce85b5f9f60c4eab483ed7859d63978f08a76e52e78859ff20ca"},
{file = "safetensors-0.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a86565a5c112dd855909e20144947b4f53abb78c4de207f36ca71ee63ba5b90d"},
{file = "safetensors-0.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38032078ed9fea52d06584e441bccc73fb475c4581600c6d6166de2fe2deb3d1"},
{file = "safetensors-0.4.0-cp311-none-win32.whl", hash = "sha256:2f99d90c91b7c76b40a862acd9085bc77f7974a27dee7cfcebe46149af5a99a1"},
{file = "safetensors-0.4.0-cp311-none-win_amd64.whl", hash = "sha256:74e2a448ffe19be188b457b130168190ee73b5a75e45ba96796320c1f5ae35d2"},
{file = "safetensors-0.4.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:1e2f9c69b41d03b4826ffb96b29e07444bb6b34a78a7bafd0b88d59e8ec75b8a"},
{file = "safetensors-0.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3910fb5bf747413b59f1a34e6d2a993b589fa7d919709518823c70efaaa350bd"},
{file = "safetensors-0.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf8fdca709b2470a35a59b1e6dffea75cbe1214b22612b5dd4c93947697aea8b"},
{file = "safetensors-0.4.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f27b8ef814c5fb43456caeb7f3cbb889b76115180aad1f42402839c14a47c5b"},
{file = "safetensors-0.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b2d6101eccc43c7be0cb052f13ceda64288b3d8b344b988ed08d7133cbce2f3"},
{file = "safetensors-0.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdc34027b545a69be3d4220c140b276129523e4e46db06ad1a0b60d6a4cf9214"},
{file = "safetensors-0.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db7bb48ca9e90bb9526c71b388d38d8de160c0354f4c5126df23e8701a870dcb"},
{file = "safetensors-0.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a78ffc0795d3595cd9e4d453502e35f764276c49e434b25556a15a337db4dafc"},
{file = "safetensors-0.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8e735b0f79090f6855b55e205e820b7b595502ffca0009a5c13eef3661ce465b"},
{file = "safetensors-0.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f8d2416734e850d5392afffbcb2b8985ea29fb171f1cb197e2ae51b8e35d6438"},
{file = "safetensors-0.4.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:e853e189ba7d47eaf561094586692ba2bbdd258c096f1755805cac098de0e6ab"},
{file = "safetensors-0.4.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:4b2aa57b5a4d576f3d1dd6e56980026340f156f8a13c13016bfac4e25295b53f"},
{file = "safetensors-0.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b6c1316ffde6cb4bf22c7445bc9fd224b4d1b9dd7320695f5611c89e802e4b6"},
{file = "safetensors-0.4.0-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:003077ec85261d00061058fa12e3c1d2055366b02ce8f2938929359ffbaff2b8"},
{file = "safetensors-0.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd63d83a92f1437a8b0431779320376030ae43ace980bea5686d515de0784100"},
{file = "safetensors-0.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2077801800b4b13301d8d6290c7fb5bd60737320001717153ebc4371776643b5"},
{file = "safetensors-0.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7abe0e157a49a75aeeccfbc4f3dac38d8f98512d3cdb35c200f8e628dc5773cf"},
{file = "safetensors-0.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bfed574f6b1e7e7fe1f17213278875ef6c6e8b1582ab6eda93947db1178cae6"},
{file = "safetensors-0.4.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:964ef166a286ce3b023d0d0bd0e21d440a1c8028981c8abdb136bc7872ba9b3d"},
{file = "safetensors-0.4.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:44f84373e42183bd56a13a1f2d8acb1db7fedaeffbd83e79cec861477eee1af4"},
{file = "safetensors-0.4.0-cp37-none-win32.whl", hash = "sha256:c68132727dd86fb641102e494d445f705efe402f4d5e24b278183a15499ab400"},
{file = "safetensors-0.4.0-cp37-none-win_amd64.whl", hash = "sha256:1db87155454c168aef118d5657a403aee48a4cb08d8851a981157f07351ea317"},
{file = "safetensors-0.4.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:9e583fa68e5a07cc859c4e13c1ebff12029904aa2e27185cf04a1f57fe9a81c4"},
{file = "safetensors-0.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73e7696dcf3f72f99545eb1abe6106ad65ff1f62381d6ce4b34be3272552897a"},
{file = "safetensors-0.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4936096a57c62e84e200f92620a536be067fc5effe46ecc7f230ebb496ecd579"},
{file = "safetensors-0.4.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87b328ee1591adac332543e1f5fc2c2d7f149b745ebb0d58d7850818ff9cee27"},
{file = "safetensors-0.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b69554c143336256260eceff1d3c0969172a641b54d4668489a711b05f92a2c0"},
{file = "safetensors-0.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ebf6bcece5d5d1bd6416472f94604d2c834ca752ac60ed42dba7157e595a990"},
{file = "safetensors-0.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6686ce01b8602d55a7d9903c90d4a6e6f90aeb6ddced7cf4605892d0ba94bcb8"},
{file = "safetensors-0.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9b8fd6cc2f3bda444a048b541c843c7b7fefc89c4120d7898ea7d5b026e93891"},
{file = "safetensors-0.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8a6abfe67692f81b8bdb99c837f28351c17e624ebf136970c850ee989c720446"},
{file = "safetensors-0.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:27a24ca8822c469ee452db4c13418ba983315a0d863c018a9af15f2305eac38c"},
{file = "safetensors-0.4.0-cp38-none-win32.whl", hash = "sha256:c4a0a47c8640167792d8261ee21b26430bbc39130a7edaad7f4c0bc05669d00e"},
{file = "safetensors-0.4.0-cp38-none-win_amd64.whl", hash = "sha256:a738970a367f39249e2abb900d9441a8a86d7ff50083e5eaa6e7760a9f216014"},
{file = "safetensors-0.4.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:806379f37e1abd5d302288c4b2f4186dd7ea7143d4c7811f90a8077f0ae8967b"},
{file = "safetensors-0.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b9b94133ed2ae9dda0e95dcace7b7556eba023ffa4c4ae6df8f99377f571d6a"},
{file = "safetensors-0.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b563a14c43614815a6b524d2e4edeaace50b717f7e7487bb227dd5b68350f5a"},
{file = "safetensors-0.4.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00a9b157be660fb7ba88fa2eedd05ec93793a5b61e43e783e10cb0b995372802"},
{file = "safetensors-0.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8f194f45ab6aa767993c24f0aeb950af169dbc5d611b94c9021a1d13b8a1a34"},
{file = "safetensors-0.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:469360b9451db10bfed3881378d5a71b347ecb1ab4f42367d77b8164a13af70b"},
{file = "safetensors-0.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5f75fa97ccf32a3c7af476c6a0e851023197d3c078f6de3612008fff94735f9"},
{file = "safetensors-0.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:acf0180283c2efae72f1d8c0a4a7974662091df01be3aa43b5237b1e52ed0a01"},
{file = "safetensors-0.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cd02b495ba0814619f40bda46771bb06dbbf1d42524b66fa03b2a736c77e4515"},
{file = "safetensors-0.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c42bdea183dbaa99e2f0e6120dc524df79cf4289a6f90f30a534444ef20f49fa"},
{file = "safetensors-0.4.0-cp39-none-win32.whl", hash = "sha256:cef7bb5d9feae7146c3c3c7b3aef7d2c8b39ba7f5ff4252d368eb69462a47076"},
{file = "safetensors-0.4.0-cp39-none-win_amd64.whl", hash = "sha256:79dd46fb1f19282fd12f544471efb97823ede927cedbf9cf35550d92b349fdd2"},
{file = "safetensors-0.4.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:002301c1afa32909f83745b0c124d002e7ae07e15671f3b43cbebd0ffc5e6037"},
{file = "safetensors-0.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:67762d36ae088c73d4a3c96bfc4ea8d31233554f35b6cace3a18533238d462ea"},
{file = "safetensors-0.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f45230f20a206e5e4c7f7bbf9342178410c6f8b0af889843aa99045a76f7691"},
{file = "safetensors-0.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f2ca939bbd8fb2f4dfa28e39a146dad03bc9325e9fc831b68f7b98f69a5a2f1"},
{file = "safetensors-0.4.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:61a00f281391fae5ce91df70918bb61c12d2d514a493fd8056e12114be729911"},
{file = "safetensors-0.4.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:435fd136a42492b280cb55126f9ce9535b35dd49df2c5d572a5945455a439448"},
{file = "safetensors-0.4.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f0daa788273d683258fb1e4a5e16bef4486b2fca536451a2591bc0f4a6488895"},
{file = "safetensors-0.4.0-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:0620ab0d41e390ccb1c4ea8f63dc00cb5f0b96a5cdd3cd0d64c21765720c074a"},
{file = "safetensors-0.4.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc1fa8d067733cb67f22926689ee808f08afacf7700d2ffb44efae90a0693eb1"},
{file = "safetensors-0.4.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaa40bc363edda145db75cd030f3b1822e5478d550c3500a42502ecef32c959"},
{file = "safetensors-0.4.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b561fbc044db7beff2ece0ec219a291809d45a38d30c6b38e7cc46482582f4ba"},
{file = "safetensors-0.4.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:79a983b09782dacf9a1adb19bb98f4a8f6c3144108939f572c047b5797e43cf5"},
{file = "safetensors-0.4.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:10b65cd3ad79f5d0daf281523b4146bc271a34bb7430d4e03212e0de8622dab8"},
{file = "safetensors-0.4.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:114decacc475a6a9e2f9102a00c171d113ddb5d35cb0bda0db2c0c82b2eaa9ce"},
{file = "safetensors-0.4.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:72ddb741dd5fe42521db76a70e012f76995516a12e7e0ef26be03ea9be77802a"},
{file = "safetensors-0.4.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c5556c2ec75f5a6134866eddd7341cb36062e6edaea343478a279591b63ddba"},
{file = "safetensors-0.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed50f239b0ce7ae85b078395593b4a351ede7e6f73af25f4873e3392336f64c9"},
{file = "safetensors-0.4.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:495dcaea8fbab70b927d2274e2547824462737acbf98ccd851a71124f779a5c6"},
{file = "safetensors-0.4.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3f4d90c79a65ba2fe2ff0876f6140748f0a3ce6a21e27a35190f4f96321803f8"},
{file = "safetensors-0.4.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7a524382b5c55b5fbb168e0e9d3f502450c8cf3fb81b93e880018437c206a482"},
{file = "safetensors-0.4.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:9849ea60c7e840bfdd6030ad454d4a6ba837b3398c902f15a30460dd6961c28c"},
{file = "safetensors-0.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:6c42623ae7045615d9eaa6877b9df1db4e9cc71ecc14bcc721ea1e475dddd595"},
{file = "safetensors-0.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80cb8342f00f3c41b3b93b1a599b84723280d3ac90829bc62262efc03ab28793"},
{file = "safetensors-0.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c4f5ed4ede384dea8c99bae76b0718a828dbf7b2c8ced1f44e3b9b1a124475"},
{file = "safetensors-0.4.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40d7cf03493bfe75ef62e2c716314474b28d9ba5bf4909763e4b8dd14330c01a"},
{file = "safetensors-0.4.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:232029f0a9fa6fa1f737324eda98a700409811186888536a2333cbbf64e41741"},
{file = "safetensors-0.4.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:9ed55f4a20c78ff3e8477efb63c8303c2152cdfb3bfea4d025a80f54d38fd628"},
{file = "safetensors-0.4.0.tar.gz", hash = "sha256:b985953c3cf11e942eac4317ef3db3da713e274109cf7cfb6076d877054f013e"},
{file = "safetensors-0.4.1-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:cba01c6b76e01ec453933b3b3c0157c59b52881c83eaa0f7666244e71aa75fd1"},
{file = "safetensors-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a8f6f679d97ea0135c7935c202feefbd042c149aa70ee759855e890c01c7814"},
{file = "safetensors-0.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbc2ce1f5ae5143a7fb72b71fa71db6a42b4f6cf912aa3acdc6b914084778e68"},
{file = "safetensors-0.4.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d87d993eaefe6611a9c241a8bd364a5f1ffed5771c74840363a6c4ed8d868f6"},
{file = "safetensors-0.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:097e9af2efa8778cd2f0cba451784253e62fa7cc9fc73c0744d27212f7294e25"},
{file = "safetensors-0.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d10a9f7bae608ccfdc009351f01dc3d8535ff57f9488a58a4c38e45bf954fe93"},
{file = "safetensors-0.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:270b99885ec14abfd56c1d7f28ada81740a9220b4bae960c3de1c6fe84af9e4d"},
{file = "safetensors-0.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:285b52a481e7ba93e29ad4ec5841ef2c4479ef0a6c633c4e2629e0508453577b"},
{file = "safetensors-0.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c3c9f0ca510e0de95abd6424789dcbc879942a3a4e29b0dfa99d9427bf1da75c"},
{file = "safetensors-0.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:88b4653059c903015284a9722f9a46838c654257173b279c8f6f46dbe80b612d"},
{file = "safetensors-0.4.1-cp310-none-win32.whl", hash = "sha256:2fe6926110e3d425c4b684a4379b7796fdc26ad7d16922ea1696c8e6ea7e920f"},
{file = "safetensors-0.4.1-cp310-none-win_amd64.whl", hash = "sha256:a79e16222106b2f5edbca1b8185661477d8971b659a3c814cc6f15181a9b34c8"},
{file = "safetensors-0.4.1-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:d93321eea0dd7e81b283e47a1d20dee6069165cc158286316d0d06d340de8fe8"},
{file = "safetensors-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8ff8e41c8037db17de0ea2a23bc684f43eaf623be7d34906fe1ac10985b8365e"},
{file = "safetensors-0.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39d36f1d88468a87c437a1bc27c502e71b6ca44c385a9117a9f9ba03a75cc9c6"},
{file = "safetensors-0.4.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7ef010e9afcb4057fb6be3d0a0cfa07aac04fe97ef73fe4a23138d8522ba7c17"},
{file = "safetensors-0.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b287304f2b2220d51ccb51fd857761e78bcffbeabe7b0238f8dc36f2edfd9542"},
{file = "safetensors-0.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e09000b2599e1836314430f81a3884c66a5cbabdff5d9f175b5d560d4de38d78"},
{file = "safetensors-0.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9c80ce0001efa16066358d2dd77993adc25f5a6c61850e4ad096a2232930bce"},
{file = "safetensors-0.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:413e1f6ac248f7d1b755199a06635e70c3515493d3b41ba46063dec33aa2ebb7"},
{file = "safetensors-0.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3ac139377cfe71ba04573f1cda66e663b7c3e95be850e9e6c2dd4b5984bd513"},
{file = "safetensors-0.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:04157d008385bea66d12fe90844a80d4a76dc25ec5230b5bd9a630496d1b7c03"},
{file = "safetensors-0.4.1-cp311-none-win32.whl", hash = "sha256:5f25297148ec665f0deb8bd67e9564634d8d6841041ab5393ccfe203379ea88b"},
{file = "safetensors-0.4.1-cp311-none-win_amd64.whl", hash = "sha256:b2f8877990a72ff595507b80f4b69036a9a1986a641f8681adf3425d97d3d2a5"},
{file = "safetensors-0.4.1-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:eb2c1da1cc39509d1a55620a5f4d14f8911c47a89c926a96e6f4876e864375a3"},
{file = "safetensors-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:303d2c0415cf15a28f8d7f17379ea3c34c2b466119118a34edd9965983a1a8a6"},
{file = "safetensors-0.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb4cb3e37a9b961ddd68e873b29fe9ab4a081e3703412e34aedd2b7a8e9cafd9"},
{file = "safetensors-0.4.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae5497adc68669db2fed7cb2dad81e6a6106e79c9a132da3efdb6af1db1014fa"},
{file = "safetensors-0.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b30abd0cddfe959d1daedf92edcd1b445521ebf7ddefc20860ed01486b33c90"},
{file = "safetensors-0.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d784a98c492c751f228a4a894c3b8a092ff08b24e73b5568938c28b8c0e8f8df"},
{file = "safetensors-0.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57a5ab08b0ec7a7caf30d2ac79bb30c89168431aca4f8854464bb9461686925"},
{file = "safetensors-0.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:edcf3121890b5f0616aa5a54683b1a5d2332037b970e507d6bb7841a3a596556"},
{file = "safetensors-0.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fdb58dee173ef33634c3016c459d671ca12d11e6acf9db008261cbe58107e579"},
{file = "safetensors-0.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:780dc21eb3fd32ddd0e8c904bdb0290f2454f4ac21ae71e94f9ce72db1900a5a"},
{file = "safetensors-0.4.1-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:48901bd540f8a3c1791314bc5c8a170927bf7f6acddb75bf0a263d081a3637d4"},
{file = "safetensors-0.4.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:3b0b7b2d5976fbed8a05e2bbdce5816a59e6902e9e7c7e07dc723637ed539787"},
{file = "safetensors-0.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f69903ff49cb30b9227fb5d029bea276ea20d04b06803877a420c5b1b74c689"},
{file = "safetensors-0.4.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0ddd050e01f3e843aa8c1c27bf68675b8a08e385d0045487af4d70418c3cb356"},
{file = "safetensors-0.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a82bc2bd7a9a0e08239bdd6d7774d64121f136add93dfa344a2f1a6d7ef35fa"},
{file = "safetensors-0.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6ace9e66a40f98a216ad661245782483cf79cf56eb2b112650bb904b0baa9db5"},
{file = "safetensors-0.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82cbb8f4d022f2e94498cbefca900698b8ded3d4f85212f47da614001ff06652"},
{file = "safetensors-0.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:791edc10a3c359a2f5f52d5cddab0df8a45107d91027d86c3d44e57162e5d934"},
{file = "safetensors-0.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:83c2cfbe8c6304f0891e7bb378d56f66d2148972eeb5f747cd8a2246886f0d8c"},
{file = "safetensors-0.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:04dd14f53f5500eb4c4149674216ba1000670efbcf4b1b5c2643eb244e7882ea"},
{file = "safetensors-0.4.1-cp37-none-win32.whl", hash = "sha256:d5b3defa74f3723a388bfde2f5d488742bc4879682bd93267c09a3bcdf8f869b"},
{file = "safetensors-0.4.1-cp37-none-win_amd64.whl", hash = "sha256:25a043cbb59d4f75e9dd87fdf5c009dd8830105a2c57ace49b72167dd9808111"},
{file = "safetensors-0.4.1-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:3f6a520af7f2717c5ecba112041f2c8af1ca6480b97bf957aba81ed9642e654c"},
{file = "safetensors-0.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c3807ac3b16288dffebb3474b555b56fe466baa677dfc16290dcd02dca1ab228"},
{file = "safetensors-0.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b58ba13a9e82b4bc3fc221914f6ef237fe6c2adb13cede3ace64d1aacf49610"},
{file = "safetensors-0.4.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dac4bb42f8679aadc59bd91a4c5a1784a758ad49d0912995945cd674089f628e"},
{file = "safetensors-0.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911b48dc09e321a194def3a7431662ff4f03646832f3a8915bbf0f449b8a5fcb"},
{file = "safetensors-0.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82571d20288c975c1b30b08deb9b1c3550f36b31191e1e81fae87669a92217d0"},
{file = "safetensors-0.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da52ee0dc8ba03348ffceab767bd8230842fdf78f8a996e2a16445747143a778"},
{file = "safetensors-0.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2536b11ce665834201072e9397404170f93f3be10cca9995b909f023a04501ee"},
{file = "safetensors-0.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:998fbac99ca956c3a09fe07cc0b35fac26a521fa8865a690686d889f0ff4e4a6"},
{file = "safetensors-0.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:845be0aafabf2a60c2d482d4e93023fecffe5e5443d801d7a7741bae9de41233"},
{file = "safetensors-0.4.1-cp38-none-win32.whl", hash = "sha256:ce7a28bc8af685a69d7e869d09d3e180a275e3281e29cf5f1c7319e231932cc7"},
{file = "safetensors-0.4.1-cp38-none-win_amd64.whl", hash = "sha256:e056fb9e22d118cc546107f97dc28b449d88274207dd28872bd668c86216e4f6"},
{file = "safetensors-0.4.1-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:bdc0d039e44a727824639824090bd8869535f729878fa248addd3dc01db30eae"},
{file = "safetensors-0.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c1b1d510c7aba71504ece87bf393ea82638df56303e371e5e2cf09d18977dd7"},
{file = "safetensors-0.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bd0afd95c1e497f520e680ea01e0397c0868a3a3030e128438cf6e9e3fcd671"},
{file = "safetensors-0.4.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f603bdd8deac6726d39f41688ed353c532dd53935234405d79e9eb53f152fbfb"},
{file = "safetensors-0.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d8a85e3e47e0d4eebfaf9a58b40aa94f977a56050cb5598ad5396a9ee7c087c6"},
{file = "safetensors-0.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0ccb5aa0f3be2727117e5631200fbb3a5b3a2b3757545a92647d6dd8be6658f"},
{file = "safetensors-0.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d784938534e255473155e4d9f276ee69eb85455b6af1292172c731409bf9adee"},
{file = "safetensors-0.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a257de175c254d39ccd6a21341cd62eb7373b05c1e618a78096a56a857e0c316"},
{file = "safetensors-0.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6fd80f7794554091836d4d613d33a7d006e2b8d6ba014d06f97cebdfda744f64"},
{file = "safetensors-0.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:35803201d980efcf964b75a0a2aee97fe5e9ecc5f3ad676b38fafdfe98e0620d"},
{file = "safetensors-0.4.1-cp39-none-win32.whl", hash = "sha256:7ff8a36e0396776d3ed9a106fc9a9d7c55d4439ca9a056a24bf66d343041d3e6"},
{file = "safetensors-0.4.1-cp39-none-win_amd64.whl", hash = "sha256:bfa2e20342b81921b98edba52f8deb68843fa9c95250739a56b52ceda5ea5c61"},
{file = "safetensors-0.4.1-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ae2d5a31cfb8a973a318f7c4d2cffe0bd1fe753cdf7bb41a1939d45a0a06f964"},
{file = "safetensors-0.4.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1a45dbf03e8334d3a5dc93687d98b6dc422f5d04c7d519dac09b84a3c87dd7c6"},
{file = "safetensors-0.4.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2297b359d91126c0f9d4fd17bae3cfa2fe3a048a6971b8db07db746ad92f850c"},
{file = "safetensors-0.4.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda3d98e2bcece388232cfc551ebf063b55bdb98f65ab54df397da30efc7dcc5"},
{file = "safetensors-0.4.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8934bdfd202ebd0697040a3dff40dd77bc4c5bbf3527ede0532f5e7fb4d970f"},
{file = "safetensors-0.4.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:42c3710cec7e5c764c7999697516370bee39067de0aa089b7e2cfb97ac8c6b20"},
{file = "safetensors-0.4.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:53134226053e56bd56e73f7db42596e7908ed79f3c9a1016e4c1dade593ac8e5"},
{file = "safetensors-0.4.1-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:257d59e40a1b367cb544122e7451243d65b33c3f34d822a347f4eea6fdf97fdf"},
{file = "safetensors-0.4.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d54c2f1826e790d1eb2d2512bfd0ee443f0206b423d6f27095057c7f18a0687"},
{file = "safetensors-0.4.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:645b3f1138fce6e818e79d4128afa28f0657430764cc045419c1d069ff93f732"},
{file = "safetensors-0.4.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e9a7ffb1e551c6df51d267f5a751f042b183df22690f6feceac8d27364fd51d7"},
{file = "safetensors-0.4.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:44e230fbbe120de564b64f63ef3a8e6ff02840fa02849d9c443d56252a1646d4"},
{file = "safetensors-0.4.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:9d16b3b2fcc6fca012c74bd01b5619c655194d3e3c13e4d4d0e446eefa39a463"},
{file = "safetensors-0.4.1-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:5d95ea4d8b32233910734a904123bdd3979c137c461b905a5ed32511defc075f"},
{file = "safetensors-0.4.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:dab431699b5d45e0ca043bc580651ce9583dda594e62e245b7497adb32e99809"},
{file = "safetensors-0.4.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16d8bbb7344e39cb9d4762e85c21df94ebeb03edac923dd94bb9ed8c10eac070"},
{file = "safetensors-0.4.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1faf5111c66a6ba91f85dff2e36edaaf36e6966172703159daeef330de4ddc7b"},
{file = "safetensors-0.4.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:660ca1d8bff6c7bc7c6b30b9b32df74ef3ab668f5df42cefd7588f0d40feadcb"},
{file = "safetensors-0.4.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ae2f67f04ed0bb2e56fd380a8bd3eef03f609df53f88b6f5c7e89c08e52aae00"},
{file = "safetensors-0.4.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c8ed5d2c04cdc1afc6b3c28d59580448ac07732c50d94c15e14670f9c473a2ce"},
{file = "safetensors-0.4.1-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:2b6a2814278b6660261aa9a9aae524616de9f1ec364e3716d219b6ed8f91801f"},
{file = "safetensors-0.4.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3cfd1ca35eacc635f0eaa894e5c5ed83ffebd0f95cac298fd430014fa7323631"},
{file = "safetensors-0.4.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4177b456c6b0c722d82429127b5beebdaf07149d265748e97e0a34ff0b3694c8"},
{file = "safetensors-0.4.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:313e8472197bde54e3ec54a62df184c414582979da8f3916981b6a7954910a1b"},
{file = "safetensors-0.4.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fdb4adb76e21bad318210310590de61c9f4adcef77ee49b4a234f9dc48867869"},
{file = "safetensors-0.4.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1d568628e9c43ca15eb96c217da73737c9ccb07520fafd8a1eba3f2750614105"},
{file = "safetensors-0.4.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:573b6023a55a2f28085fc0a84e196c779b6cbef4d9e73acea14c8094fee7686f"},
{file = "safetensors-0.4.1.tar.gz", hash = "sha256:2304658e6ada81a5223225b4efe84748e760c46079bffedf7e321763cafb36c9"},
]
[package.extras]
@ -7565,13 +7629,13 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyam
[[package]]
name = "storage3"
version = "0.6.1"
version = "0.7.0"
description = "Supabase Storage client for Python."
optional = false
python-versions = ">=3.8,<4.0"
files = [
{file = "storage3-0.6.1-py3-none-any.whl", hash = "sha256:0a8b8dc08f4d2268c8f46035fffcb13be99ed489bd0be29786f979c42f5a7169"},
{file = "storage3-0.6.1.tar.gz", hash = "sha256:7f50c2279da604c3c088fc72f6d10fee146e30fe9ecbf9d505cea5c884622700"},
{file = "storage3-0.7.0-py3-none-any.whl", hash = "sha256:dd2d6e68f7a3dc038047ed62fa8bdc5c2e3d6b6e56ee2951195d084bcce71605"},
{file = "storage3-0.7.0.tar.gz", hash = "sha256:9ddecc775cdc04514413bd44b9ec61bc25aad9faadabefdb6e6e88b33756f5fd"},
]
[package.dependencies]
@ -7597,13 +7661,13 @@ test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"]
[[package]]
name = "supabase"
version = "2.0.3"
version = "2.1.0"
description = "Supabase client for Python."
optional = false
python-versions = ">=3.8,<4.0"
files = [
{file = "supabase-2.0.3-py3-none-any.whl", hash = "sha256:7385c1bd7897d93ba0fb1c5f33496efcbab2264eb44738c4c14284adbfd97099"},
{file = "supabase-2.0.3.tar.gz", hash = "sha256:89b6556bf4f5f2e3dd1255f5840ceb12c3dc187ce8947b9bc0f5f7b0ad010971"},
{file = "supabase-2.1.0-py3-none-any.whl", hash = "sha256:7467261d6b7c3e9b62a396a1718c2c73fb07d51b70e4bb0fe0cde7e029c3b320"},
{file = "supabase-2.1.0.tar.gz", hash = "sha256:44facb425162061176d22ff0baed3055503c54a61cfd84c47eb438df44052ee3"},
]
[package.dependencies]
@ -7611,7 +7675,7 @@ gotrue = ">=1.3.0,<2.0.0"
httpx = ">=0.24.0,<0.25.0"
postgrest = ">=0.10.8,<0.14.0"
realtime = ">=1.0.0,<2.0.0"
storage3 = ">=0.5.3,<0.7.0"
storage3 = ">=0.5.3,<0.8.0"
supafunc = ">=0.3.1,<0.4.0"
[[package]]
@ -8244,13 +8308,13 @@ files = [
[[package]]
name = "types-redis"
version = "4.6.0.10"
version = "4.6.0.11"
description = "Typing stubs for redis"
optional = false
python-versions = ">=3.7"
files = [
{file = "types-redis-4.6.0.10.tar.gz", hash = "sha256:aa7fb5f743534500f274ddf11ab1c910aae1020481865a36b799e1d67de2aaf3"},
{file = "types_redis-4.6.0.10-py3-none-any.whl", hash = "sha256:00f003da884ec3d1d54633186b4cbd587b39782595c5603330cc46a51f9bcf6e"},
{file = "types-redis-4.6.0.11.tar.gz", hash = "sha256:c8cfc84635183deca2db4a528966c5566445fd3713983f0034fb0f5a09e0890d"},
{file = "types_redis-4.6.0.11-py3-none-any.whl", hash = "sha256:94fc61118601fb4f79206b33b9f4344acff7ca1d7bba67834987fb0efcf6a770"},
]
[package.dependencies]
@ -9096,4 +9160,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<3.11"
content-hash = "62e47482eefda134f0801744360624549d7024861380f51f280f60450d768615"
content-hash = "1930ee9350f8d29899117ba0ffce4eff58b10b99e4d135b8d4ba52bca9e50d88"

View file

@ -102,11 +102,12 @@ boto3 = "^1.28.63"
numexpr = "^2.8.6"
qianfan = "0.0.5"
pgvector = "^0.2.3"
pyautogen = "^0.2.0"
[tool.poetry.group.dev.dependencies]
pytest-asyncio = "^0.21.1"
types-redis = "^4.6.0.5"
ipykernel = "^6.21.2"
ipykernel = "^6.26.0"
mypy = "^1.1.1"
ruff = "^0.1.5"
httpx = "*"

View file

@ -313,7 +313,7 @@ def superuser(
if create_super_user(db=session, username=username, password=password):
# Verify that the superuser was created
from langflow.services.database.models.user.user import User
from langflow.services.database.models.user.model import User
user: User = session.query(User).filter(User.username == username).first()
if user is None or not user.is_superuser:

View file

@ -0,0 +1,45 @@
"""Adds Credential table
Revision ID: c1c8e217a069
Revises: 7d2162acc8b2
Create Date: 2023-11-24 10:45:38.465302
"""
from typing import Sequence, Union
import sqlalchemy as sa
import sqlmodel
from alembic import op
# revision identifiers, used by Alembic.
revision: str = '2ac71eb9c3ae'
down_revision: Union[str, None] = '7d2162acc8b2'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
op.create_table('credential',
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('value', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('provider', sqlmodel.sql.sqltypes.AutoString(), nullable=True),
sa.Column('user_id', sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column('id', sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
except Exception:
pass
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
op.drop_table('credential')
except Exception:
pass
# ### end Alembic commands ###

View file

@ -0,0 +1,38 @@
"""Fix Credential table
Revision ID: fd531f8868b1
Revises: 2ac71eb9c3ae
Create Date: 2023-11-24 15:07:37.566516
"""
from typing import Sequence, Union
from alembic import op
# revision identifiers, used by Alembic.
revision: str = 'fd531f8868b1'
down_revision: Union[str, None] = '2ac71eb9c3ae'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table('credential', schema=None) as batch_op:
batch_op.create_foreign_key("fk_credential_user_id", 'user', ['user_id'], ['id'])
except Exception:
pass
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table('credential', schema=None) as batch_op:
batch_op.drop_constraint("fk_credential_user_id", type_='foreignkey')
except Exception:
pass
# ### end Alembic commands ###

View file

@ -1,14 +1,16 @@
# Router for base api
from fastapi import APIRouter
from langflow.api.v1 import (
api_key_router,
chat_router,
credentials_router,
endpoints_router,
validate_router,
flows_router,
login_router,
store_router,
users_router,
api_key_router,
login_router,
validate_router,
)
router = APIRouter(
@ -22,3 +24,4 @@ router.include_router(flows_router)
router.include_router(users_router)
router.include_router(api_key_router)
router.include_router(login_router)
router.include_router(credentials_router)

View file

@ -1,11 +1,12 @@
from langflow.api.v1.endpoints import router as endpoints_router
from langflow.api.v1.validate import router as validate_router
from langflow.api.v1.api_key import router as api_key_router
from langflow.api.v1.chat import router as chat_router
from langflow.api.v1.credential import router as credentials_router
from langflow.api.v1.endpoints import router as endpoints_router
from langflow.api.v1.flows import router as flows_router
from langflow.api.v1.login import router as login_router
from langflow.api.v1.store import router as store_router
from langflow.api.v1.users import router as users_router
from langflow.api.v1.api_key import router as api_key_router
from langflow.api.v1.login import router as login_router
from langflow.api.v1.validate import router as validate_router
__all__ = [
"chat_router",
@ -16,4 +17,5 @@ __all__ = [
"users_router",
"api_key_router",
"login_router",
"credentials_router",
]

View file

@ -1,27 +1,27 @@
from typing import TYPE_CHECKING
from uuid import UUID
from fastapi import APIRouter, HTTPException, Depends
from langflow.api.v1.schemas import ApiKeysResponse, ApiKeyCreateRequest
from fastapi import APIRouter, Depends, HTTPException
from sqlmodel import Session
from langflow.api.v1.schemas import ApiKeyCreateRequest, ApiKeysResponse
from langflow.services.auth import utils as auth_utils
from langflow.services.database.models.api_key.api_key import (
ApiKeyCreate,
UnmaskedApiKeyRead,
)
# Assuming you have these methods in your service layer
from langflow.services.database.models.api_key.crud import (
get_api_keys,
create_api_key,
delete_api_key,
get_api_keys,
)
from langflow.services.database.models.user.user import User
from langflow.services.database.models.api_key.model import (
ApiKeyCreate,
UnmaskedApiKeyRead,
)
from langflow.services.database.models.user.model import User
from langflow.services.deps import (
get_session,
get_settings_service,
)
from typing import TYPE_CHECKING
from sqlmodel import Session
if TYPE_CHECKING:
pass
@ -85,3 +85,16 @@ def save_store_api_key(
return {"detail": "API Key saved"}
except Exception as e:
raise HTTPException(status_code=400, detail=str(e)) from e
@router.delete("/store")
def delete_store_api_key(
current_user: User = Depends(auth_utils.get_current_active_user),
db: Session = Depends(get_session),
):
try:
current_user.store_api_key = None
db.commit()
return {"detail": "API Key deleted"}
except Exception as e:
raise HTTPException(status_code=400, detail=str(e)) from e

View file

@ -1,27 +1,15 @@
from fastapi import (
APIRouter,
Depends,
HTTPException,
Query,
WebSocket,
WebSocketException,
status,
)
from fastapi import APIRouter, Depends, HTTPException, Query, WebSocket, WebSocketException, status
from fastapi.responses import StreamingResponse
from loguru import logger
from sqlmodel import Session
from langflow.api.utils import build_input_keys_response
from langflow.api.v1.schemas import BuildStatus, BuiltResponse, InitResponse, StreamData
from langflow.graph.graph.base import Graph
from langflow.services.auth.utils import (
get_current_active_user,
get_current_user_by_jwt,
)
from langflow.services.auth.utils import get_current_active_user, get_current_user_by_jwt
from langflow.services.cache.service import BaseCacheService
from langflow.services.cache.utils import update_build_status
from langflow.services.chat.service import ChatService
from langflow.services.deps import get_cache_service, get_chat_service, get_session
from loguru import logger
from sqlmodel import Session
router = APIRouter(tags=["Chat"])
@ -148,7 +136,7 @@ async def stream_build(
# Some error could happen when building the graph
graph = Graph.from_payload(graph_data)
number_of_nodes = len(graph.nodes)
number_of_nodes = len(graph.vertices)
update_build_status(cache_service, flow_id, BuildStatus.IN_PROGRESS)
try:

View file

@ -0,0 +1,90 @@
from datetime import datetime
from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException
from langflow.services.auth import utils as auth_utils
from langflow.services.auth.utils import get_current_active_user
from langflow.services.database.models.credential import Credential, CredentialCreate, CredentialRead, CredentialUpdate
from langflow.services.database.models.user.model import User
from langflow.services.deps import get_session, get_settings_service
from sqlmodel import Session
router = APIRouter(prefix="/credentials", tags=["Credentials"])
@router.post("/", response_model=CredentialRead, status_code=201)
def create_credential(
*,
session: Session = Depends(get_session),
credential: CredentialCreate,
current_user: User = Depends(get_current_active_user),
settings_service=Depends(get_settings_service),
):
"""Create a new credential."""
try:
# check if credential name already exists
credential_exists = (
session.query(Credential)
.filter(Credential.name == credential.name, Credential.user_id == current_user.id)
.first()
)
if credential_exists:
raise HTTPException(status_code=400, detail="Credential name already exists")
db_credential = Credential.model_validate(credential, from_attributes=True)
if not db_credential.value:
raise HTTPException(status_code=400, detail="Credential value cannot be empty")
encrypted = auth_utils.encrypt_api_key(db_credential.value, settings_service=settings_service)
db_credential.value = encrypted
db_credential.user_id = current_user.id
session.add(db_credential)
session.commit()
session.refresh(db_credential)
return db_credential
except Exception as e:
if isinstance(e, HTTPException):
raise e
raise HTTPException(status_code=500, detail=str(e)) from e
@router.get("/", response_model=list[CredentialRead], status_code=200)
def read_credentials(
*,
session: Session = Depends(get_session),
current_user: User = Depends(get_current_active_user),
):
"""Read all credentials."""
try:
credentials = session.query(Credential).filter(Credential.user_id == current_user.id).all()
return credentials
except Exception as e:
raise HTTPException(status_code=500, detail=str(e)) from e
@router.patch("/{credential_id}", response_model=CredentialRead, status_code=200)
def update_credential(
*,
session: Session = Depends(get_session),
credential_id: UUID,
credential: CredentialUpdate,
current_user: User = Depends(get_current_active_user),
):
"""Update a credential."""
try:
db_credential = (
session.query(Credential)
.filter(Credential.id == credential_id, Credential.user_id == current_user.id)
.first()
)
if not db_credential:
raise HTTPException(status_code=404, detail="Credential not found")
credential_data = credential.model_dump(exclude_unset=True)
for key, value in credential_data.items():
setattr(db_credential, key, value)
db_credential.updated_at = datetime.utcnow()
session.commit()
session.refresh(db_credential)
return db_credential
except Exception as e:
raise HTTPException(status_code=500, detail=str(e)) from e

View file

@ -3,8 +3,6 @@ from typing import Annotated, Optional, Union
import sqlalchemy as sa
from fastapi import APIRouter, Body, Depends, HTTPException, UploadFile, status
from loguru import logger
from langflow.api.v1.schemas import (
CustomComponentCode,
ProcessResponse,
@ -14,17 +12,14 @@ from langflow.api.v1.schemas import (
)
from langflow.interface.custom.custom_component import CustomComponent
from langflow.interface.custom.directory_reader import DirectoryReader
from langflow.interface.types import build_langchain_template_custom_component, create_and_validate_component
from langflow.processing.process import process_graph_cached, process_tweaks
from langflow.services.auth.utils import api_key_security, get_current_active_user
from langflow.services.cache.utils import save_uploaded_file
from langflow.services.database.models.flow import Flow
from langflow.services.database.models.user.user import User
from langflow.services.deps import (
get_session,
get_session_service,
get_settings_service,
get_task_service,
)
from langflow.services.database.models.user.model import User
from langflow.services.deps import get_session, get_session_service, get_settings_service, get_task_service
from loguru import logger
try:
from langflow.worker import process_graph_cached_task
@ -34,9 +29,8 @@ except ImportError:
raise NotImplementedError("Celery is not installed")
from sqlmodel import Session
from langflow.services.task.service import TaskService
from sqlmodel import Session
# build router
router = APIRouter(tags=["Base"])
@ -208,9 +202,7 @@ async def custom_component(
raw_code: CustomComponentCode,
user: User = Depends(get_current_active_user),
):
from langflow.interface.types import (
build_langchain_template_custom_component,
)
component = create_and_validate_component(raw_code.code)
extractor = CustomComponent(code=raw_code.code)
extractor.validate()
@ -219,10 +211,8 @@ async def custom_component(
@router.post("/custom_component/reload", status_code=HTTPStatus.OK)
async def reload_custom_component(path: str):
from langflow.interface.types import (
build_langchain_template_custom_component,
)
async def reload_custom_component(path: str, user: User = Depends(get_current_active_user)):
from langflow.interface.types import build_langchain_template_custom_component
try:
reader = DirectoryReader("")
@ -235,3 +225,15 @@ async def reload_custom_component(path: str):
return build_langchain_template_custom_component(extractor, user_id=user.id)
except Exception as exc:
raise HTTPException(status_code=400, detail=str(exc))
@router.post("/custom_component/update", status_code=HTTPStatus.OK)
async def custom_component_update(
raw_code: CustomComponentCode,
user: User = Depends(get_current_active_user),
):
component = create_and_validate_component(raw_code.code)
component_node = build_langchain_template_custom_component(component, user_id=user.id, update_field=raw_code.field)
# Update the field
return component_node

View file

@ -11,7 +11,7 @@ from langflow.api.utils import remove_api_keys
from langflow.api.v1.schemas import FlowListCreate, FlowListRead
from langflow.services.auth.utils import get_current_active_user
from langflow.services.database.models.flow import Flow, FlowCreate, FlowRead, FlowUpdate
from langflow.services.database.models.user.user import User
from langflow.services.database.models.user.model import User
from langflow.services.deps import get_session, get_settings_service
# build router

View file

@ -2,13 +2,14 @@ from enum import Enum
from pathlib import Path
from typing import Any, Dict, List, Optional, Union
from uuid import UUID
from langflow.services.database.models.api_key.api_key import ApiKeyRead
from langflow.services.database.models.flow import FlowCreate, FlowRead
from langflow.services.database.models.user import UserRead
from langflow.services.database.models.base import orjson_dumps
from pydantic import BaseModel, Field, field_validator
from langflow.services.database.models.api_key.model import ApiKeyRead
from langflow.services.database.models.base import orjson_dumps
from langflow.services.database.models.flow import FlowCreate, FlowRead
from langflow.services.database.models.user import UserRead
class BuildStatus(Enum):
"""Status of the build."""
@ -156,6 +157,7 @@ class StreamData(BaseModel):
class CustomComponentCode(BaseModel):
code: str
field: Optional[str] = None
class CustomComponentResponseError(BaseModel):

View file

@ -4,7 +4,7 @@ from uuid import UUID
from fastapi import APIRouter, Depends, HTTPException, Query
from langflow.services.auth import utils as auth_utils
from langflow.services.database.models.user.user import User
from langflow.services.database.models.user.model import User
from langflow.services.deps import get_settings_service, get_store_service
from langflow.services.store.exceptions import CustomException
from langflow.services.store.schema import (

View file

@ -0,0 +1,37 @@
from typing import Callable, List, Union
from langchain.agents import AgentExecutor, AgentType, initialize_agent, types
from langflow import CustomComponent
from langflow.field_typing import BaseChatMemory, BaseLanguageModel, Tool
class AgentInitializerComponent(CustomComponent):
display_name: str = "Agent Initializer"
description: str = "Initialize a Langchain Agent."
documentation: str = "https://python.langchain.com/docs/modules/agents/agent_types/"
def build_config(self):
agents = list(types.AGENT_TO_CLASS.keys())
# field_type and required are optional
return {
"agent": {"options": agents, "value": agents[0], "display_name": "Agent Type"},
"max_iterations": {"display_name": "Max Iterations", "value": 10},
"memory": {"display_name": "Memory"},
"tools": {"display_name": "Tools"},
"llm": {"display_name": "Language Model"},
}
def build(
self, agent: str, llm: BaseLanguageModel, memory: BaseChatMemory, tools: List[Tool], max_iterations: int
) -> Union[AgentExecutor, Callable]:
agent = AgentType(agent)
return initialize_agent(
tools=tools,
llm=llm,
agent=agent,
memory=memory,
return_intermediate_steps=True,
handle_parsing_errors=True,
max_iterations=max_iterations,
)

View file

@ -1,10 +1,12 @@
from langflow import CustomComponent
from typing import Callable, Optional, Union
from langchain.chains import LLMChain
from typing import Optional, Union, Callable
from langflow import CustomComponent
from langflow.field_typing import (
BasePromptTemplate,
BaseLanguageModel,
BaseMemory,
BasePromptTemplate,
Chain,
)

View file

@ -5,8 +5,6 @@ agents:
documentation: "https://python.langchain.com/docs/modules/agents/toolkits/openapi"
CSVAgent:
documentation: "https://python.langchain.com/docs/modules/agents/toolkits/csv"
AgentInitializer:
documentation: "https://python.langchain.com/docs/modules/agents/agent_types/"
VectorStoreAgent:
documentation: ""
VectorStoreRouterAgent:

View file

@ -3,11 +3,12 @@ from typing import Callable, Dict, Union
from langchain.agents.agent import AgentExecutor
from langchain.chains.base import Chain
from langchain.document_loaders.base import BaseLoader
from langchain.llms.base import BaseLanguageModel, BaseLLM
from langchain.llms.base import BaseLLM
from langchain.memory.chat_memory import BaseChatMemory
from langchain.prompts import BasePromptTemplate, ChatPromptTemplate, PromptTemplate
from langchain.schema import BaseOutputParser, BaseRetriever, Document
from langchain.schema.embeddings import Embeddings
from langchain.schema.language_model import BaseLanguageModel
from langchain.schema.memory import BaseMemory
from langchain.text_splitter import TextSplitter
from langchain.tools import Tool

View file

@ -1,7 +1,7 @@
from typing import TYPE_CHECKING, List, Optional
from loguru import logger
from typing import TYPE_CHECKING
from pydantic import BaseModel, Field
from typing import List, Optional
if TYPE_CHECKING:
from langflow.graph.vertex.base import Vertex
@ -22,8 +22,8 @@ class TargetHandle(BaseModel):
class Edge:
def __init__(self, source: "Vertex", target: "Vertex", edge: dict):
self.source: "Vertex" = source
self.target: "Vertex" = target
self.source_id: str = source.id
self.target_id: str = target.id
if data := edge.get("data", {}):
self._source_handle = data.get("sourceHandle", {})
self._target_handle = data.get("targetHandle", {})
@ -31,7 +31,7 @@ class Edge:
self.target_handle: TargetHandle = TargetHandle(**self._target_handle)
self.target_param = self.target_handle.fieldName
# validate handles
self.validate_handles()
self.validate_handles(source, target)
else:
# Logging here because this is a breaking change
logger.error("Edge data is empty")
@ -41,9 +41,9 @@ class Edge:
# target_param is documents
self.target_param = self._target_handle.split("|")[1]
# Validate in __init__ to fail fast
self.validate_edge()
self.validate_edge(source, target)
def validate_handles(self) -> None:
def validate_handles(self, source, target) -> None:
if self.target_handle.inputTypes is None:
self.valid_handles = self.target_handle.type in self.source_handle.baseClasses
else:
@ -54,26 +54,20 @@ class Edge:
if not self.valid_handles:
logger.debug(self.source_handle)
logger.debug(self.target_handle)
raise ValueError(
f"Edge between {self.source.vertex_type} and {self.target.vertex_type} " f"has invalid handles"
)
raise ValueError(f"Edge between {source.vertex_type} and {target.vertex_type} " f"has invalid handles")
def __setstate__(self, state):
self.source = state["source"]
self.target = state["target"]
self.source_id = state["source_id"]
self.target_id = state["target_id"]
self.target_param = state["target_param"]
self.source_handle = state.get("source_handle")
self.target_handle = state.get("target_handle")
def reset(self) -> None:
self.source._build_params()
self.target._build_params()
def validate_edge(self) -> None:
def validate_edge(self, source, target) -> None:
# Validate that the outputs of the source node are valid inputs
# for the target node
self.source_types = self.source.output
self.target_reqs = self.target.required_inputs + self.target.optional_inputs
self.source_types = source.output
self.target_reqs = target.required_inputs + target.optional_inputs
# Both lists contain strings and sometimes a string contains the value we are
# looking for e.g. comgin_out=["Chain"] and target_reqs=["LLMChain"]
# so we need to check if any of the strings in source_types is in target_reqs
@ -88,13 +82,11 @@ class Edge:
if no_matched_type:
logger.debug(self.source_types)
logger.debug(self.target_reqs)
raise ValueError(
f"Edge between {self.source.vertex_type} and {self.target.vertex_type} " f"has no matched type"
)
raise ValueError(f"Edge between {source.vertex_type} and {target.vertex_type} " f"has no matched type")
def __repr__(self) -> str:
return (
f"Edge(source={self.source.id}, target={self.target.id}, target_param={self.target_param}"
f"Edge(source={self.source_id}, target={self.target_id}, target_param={self.target_param}"
f", matched_type={self.matched_type})"
)

View file

@ -13,32 +13,32 @@ from langflow.utils import payload
class Graph:
"""A class representing a graph of nodes and edges."""
"""A class representing a graph of vertices and edges."""
def __init__(
self,
nodes: List[Dict],
edges: List[Dict[str, str]],
) -> None:
self._nodes = nodes
self._vertices = nodes
self._edges = edges
self.raw_graph_data = {"nodes": nodes, "edges": edges}
self.top_level_nodes = []
for node in self._nodes:
if node_id := node.get("id"):
self.top_level_nodes.append(node_id)
self.top_level_vertices = []
for vertex in self._vertices:
if vertex_id := vertex.get("id"):
self.top_level_vertices.append(vertex_id)
self._graph_data = process_flow(self.raw_graph_data)
self._nodes = self._graph_data["nodes"]
self._vertices = self._graph_data["nodes"]
self._edges = self._graph_data["edges"]
self._build_graph()
def __getstate__(self):
return self.raw_graph_data
def __setstate__(self, state):
self.__dict__.update(state)
for edge in self.edges:
edge.reset()
edge.validate_edge()
self.__init__(**state)
@classmethod
def from_payload(cls, payload: Dict) -> "Graph":
@ -54,9 +54,9 @@ class Graph:
if "data" in payload:
payload = payload["data"]
try:
nodes = payload["nodes"]
vertices = payload["nodes"]
edges = payload["edges"]
return cls(nodes, edges)
return cls(vertices, edges)
except KeyError as exc:
logger.exception(exc)
raise ValueError(
@ -69,61 +69,69 @@ class Graph:
return self.__repr__() == other.__repr__()
def _build_graph(self) -> None:
"""Builds the graph from the nodes and edges."""
self.nodes = self._build_vertices()
"""Builds the graph from the vertices and edges."""
self.vertices = self._build_vertices()
self.vertex_ids = [vertex.id for vertex in self.vertices]
self.edges = self._build_edges()
for edge in self.edges:
edge.source.add_edge(edge)
edge.target.add_edge(edge)
# This is a hack to make sure that the LLM node is sent to
# the toolkit node
self._build_node_params()
# remove invalid nodes
self._validate_nodes()
# This is a hack to make sure that the LLM vertex is sent to
# the toolkit vertex
self._build_vertex_params()
# remove invalid vertices
self._validate_vertices()
def _build_node_params(self) -> None:
"""Identifies and handles the LLM node within the graph."""
llm_node = None
for node in self.nodes:
node._build_params()
if isinstance(node, LLMVertex):
llm_node = node
def _build_vertex_params(self) -> None:
"""Identifies and handles the LLM vertex within the graph."""
llm_vertex = None
for vertex in self.vertices:
vertex._build_params()
if isinstance(vertex, LLMVertex):
llm_vertex = vertex
if llm_node:
for node in self.nodes:
if isinstance(node, ToolkitVertex):
node.params["llm"] = llm_node
if llm_vertex:
for vertex in self.vertices:
if isinstance(vertex, ToolkitVertex):
vertex.params["llm"] = llm_vertex
def _validate_nodes(self) -> None:
"""Check that all nodes have edges"""
if len(self.nodes) == 1:
def _validate_vertices(self) -> None:
"""Check that all vertices have edges"""
if len(self.vertices) == 1:
return
for node in self.nodes:
if not self._validate_node(node):
raise ValueError(f"{node.vertex_type} is not connected to any other components")
for vertex in self.vertices:
if not self._validate_vertex(vertex):
raise ValueError(f"{vertex.vertex_type} is not connected to any other components")
def _validate_node(self, node: Vertex) -> bool:
"""Validates a node."""
# All nodes that do not have edges are invalid
return len(node.edges) > 0
def _validate_vertex(self, vertex: Vertex) -> bool:
"""Validates a vertex."""
# All vertices that do not have edges are invalid
return len(self.get_vertex_edges(vertex.id)) > 0
def get_node(self, node_id: str) -> Union[None, Vertex]:
"""Returns a node by id."""
return next((node for node in self.nodes if node.id == node_id), None)
def get_vertex(self, vertex_id: str) -> Union[None, Vertex]:
"""Returns a vertex by id."""
return next((vertex for vertex in self.vertices if vertex.id == vertex_id), None)
def get_nodes_with_target(self, node: Vertex) -> List[Vertex]:
"""Returns the nodes connected to a node."""
connected_nodes: List[Vertex] = [edge.source for edge in self.edges if edge.target == node]
return connected_nodes
def get_vertex_edges(self, vertex_id: str) -> List[Edge]:
"""Returns a list of edges for a given vertex."""
return [edge for edge in self.edges if edge.source_id == vertex_id or edge.target_id == vertex_id]
def get_vertices_with_target(self, vertex_id: str) -> List[Vertex]:
"""Returns the vertices connected to a vertex."""
vertices: List[Vertex] = []
for edge in self.edges:
if edge.target_id == vertex_id:
vertex = self.get_vertex(edge.source_id)
if vertex is None:
continue
vertices.append(vertex)
return vertices
async def build(self) -> Chain:
"""Builds the graph."""
# Get root node
root_node = payload.get_root_node(self)
if root_node is None:
raise ValueError("No root node found")
return await root_node.build()
# Get root vertex
root_vertex = payload.get_root_vertex(self)
if root_vertex is None:
raise ValueError("No root vertex found")
return await root_vertex.build()
def topological_sort(self) -> List[Vertex]:
"""
@ -136,25 +144,25 @@ class Graph:
ValueError: If the graph contains a cycle.
"""
# States: 0 = unvisited, 1 = visiting, 2 = visited
state = {node: 0 for node in self.nodes}
state = {vertex: 0 for vertex in self.vertices}
sorted_vertices = []
def dfs(node):
if state[node] == 1:
def dfs(vertex):
if state[vertex] == 1:
# We have a cycle
raise ValueError("Graph contains a cycle, cannot perform topological sort")
if state[node] == 0:
state[node] = 1
for edge in node.edges:
if edge.source == node:
if state[vertex] == 0:
state[vertex] = 1
for edge in vertex.edges:
if edge.source == vertex:
dfs(edge.target)
state[node] = 2
sorted_vertices.append(node)
state[vertex] = 2
sorted_vertices.append(vertex)
# Visit each node
for node in self.nodes:
if state[node] == 0:
dfs(node)
# Visit each vertex
for vertex in self.vertices:
if state[vertex] == 0:
dfs(vertex)
return list(reversed(sorted_vertices))
@ -164,17 +172,21 @@ class Graph:
logger.debug("There are %s vertices in the graph", len(sorted_vertices))
yield from sorted_vertices
def get_node_neighbors(self, node: Vertex) -> Dict[Vertex, int]:
"""Returns the neighbors of a node."""
def get_vertex_neighbors(self, vertex: Vertex) -> Dict[Vertex, int]:
"""Returns the neighbors of a vertex."""
neighbors: Dict[Vertex, int] = {}
for edge in self.edges:
if edge.source == node:
neighbor = edge.target
if edge.source_id == vertex.id:
neighbor = self.get_vertex(edge.target_id)
if neighbor is None:
continue
if neighbor not in neighbors:
neighbors[neighbor] = 0
neighbors[neighbor] += 1
elif edge.target == node:
neighbor = edge.source
elif edge.target_id == vertex.id:
neighbor = self.get_vertex(edge.source_id)
if neighbor is None:
continue
if neighbor not in neighbors:
neighbors[neighbor] = 0
neighbors[neighbor] += 1
@ -182,59 +194,59 @@ class Graph:
def _build_edges(self) -> List[Edge]:
"""Builds the edges of the graph."""
# Edge takes two nodes as arguments, so we need to build the nodes first
# Edge takes two vertices as arguments, so we need to build the vertices first
# and then build the edges
# if we can't find a node, we raise an error
# if we can't find a vertex, we raise an error
edges: List[Edge] = []
for edge in self._edges:
source = self.get_node(edge["source"])
target = self.get_node(edge["target"])
source = self.get_vertex(edge["source"])
target = self.get_vertex(edge["target"])
if source is None:
raise ValueError(f"Source node {edge['source']} not found")
raise ValueError(f"Source vertex {edge['source']} not found")
if target is None:
raise ValueError(f"Target node {edge['target']} not found")
raise ValueError(f"Target vertex {edge['target']} not found")
edges.append(Edge(source, target, edge))
return edges
def _get_vertex_class(self, node_type: str, node_lc_type: str) -> Type[Vertex]:
"""Returns the node class based on the node type."""
if node_type in FILE_TOOLS:
def _get_vertex_class(self, vertex_type: str, vertex_lc_type: str) -> Type[Vertex]:
"""Returns the vertex class based on the vertex type."""
if vertex_type in FILE_TOOLS:
return FileToolVertex
if node_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP:
return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_type]
if vertex_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP:
return lazy_load_vertex_dict.VERTEX_TYPE_MAP[vertex_type]
return (
lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_lc_type]
if node_lc_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP
lazy_load_vertex_dict.VERTEX_TYPE_MAP[vertex_lc_type]
if vertex_lc_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP
else Vertex
)
def _build_vertices(self) -> List[Vertex]:
"""Builds the vertices of the graph."""
nodes: List[Vertex] = []
for node in self._nodes:
node_data = node["data"]
node_type: str = node_data["type"] # type: ignore
node_lc_type: str = node_data["node"]["template"]["_type"] # type: ignore
vertices: List[Vertex] = []
for vertex in self._vertices:
vertex_data = vertex["data"]
vertex_type: str = vertex_data["type"] # type: ignore
vertex_lc_type: str = vertex_data["node"]["template"]["_type"] # type: ignore
VertexClass = self._get_vertex_class(node_type, node_lc_type)
vertex = VertexClass(node)
vertex.set_top_level(self.top_level_nodes)
nodes.append(vertex)
VertexClass = self._get_vertex_class(vertex_type, vertex_lc_type)
vertex = VertexClass(vertex, graph=self)
vertex.set_top_level(self.top_level_vertices)
vertices.append(vertex)
return nodes
return vertices
def get_children_by_node_type(self, node: Vertex, node_type: str) -> List[Vertex]:
"""Returns the children of a node based on the node type."""
def get_children_by_vertex_type(self, vertex: Vertex, vertex_type: str) -> List[Vertex]:
"""Returns the children of a vertex based on the vertex type."""
children = []
node_types = [node.data["type"]]
if "node" in node.data:
node_types += node.data["node"]["base_classes"]
if node_type in node_types:
children.append(node)
vertex_types = [vertex.data["type"]]
if "node" in vertex.data:
vertex_types += vertex.data["node"]["base_classes"]
if vertex_type in vertex_types:
children.append(vertex)
return children
def __repr__(self):
node_ids = [node.id for node in self.nodes]
edges_repr = "\n".join([f"{edge.source.id} --> {edge.target.id}" for edge in self.edges])
return f"Graph:\nNodes: {node_ids}\nConnections:\n{edges_repr}"
vertex_ids = [vertex.id for vertex in self.vertices]
edges_repr = "\n".join([f"{edge.source_id} --> {edge.target_id}" for edge in self.edges])
return f"Graph:\nNodes: {vertex_ids}\nConnections:\n{edges_repr}"

View file

@ -1,33 +1,32 @@
import ast
import inspect
import pickle
import types
from typing import TYPE_CHECKING, Any, Dict, List, Optional
from loguru import logger
from langflow.graph.utils import UnbuiltObject
from langflow.graph.vertex.utils import is_basic_type
from langflow.interface.initialize import loading
from langflow.interface.listing import lazy_load_dict
from langflow.utils.constants import DIRECT_TYPES
from langflow.utils.util import sync_to_async
from loguru import logger
if TYPE_CHECKING:
from langflow.graph.edge.base import Edge
from langflow.graph.graph.base import Graph
class Vertex:
def __init__(
self,
data: Dict,
graph: "Graph",
base_type: Optional[str] = None,
is_task: bool = False,
params: Optional[Dict] = None,
) -> None:
self.graph = graph
self.id: str = data["id"]
self._data = data
self.edges: List["Edge"] = []
self.base_type: Optional[str] = base_type
self._parse_data()
self._built_object = UnbuiltObject()
@ -39,43 +38,28 @@ class Vertex:
self.parent_node_id: Optional[str] = self._data.get("parent_node_id")
self.parent_is_top_level = False
def reset_params(self):
for edge in self.edges:
if edge.source != self:
target_param = edge.target_param
if target_param in ["document", "texts"]:
# this means they got data and have already ingested it
# so we continue after removing the param
self.params.pop(target_param, None)
continue
if target_param in self.params and not is_basic_type(self.params[target_param]):
# edge.source.params = {}
edge.source._build_params()
edge.source._built_object = UnbuiltObject()
edge.source._built = False
self.params[target_param] = edge.source
@property
def edges(self) -> List["Edge"]:
return self.graph.get_vertex_edges(self.id)
def __getstate__(self):
state_dict = self.__dict__.copy()
try:
# try pickling the built object
# if it fails, then we need to delete it
# and build it again
pickle.dumps(state_dict["_built_object"])
except Exception:
self.reset_params()
del state_dict["_built_object"]
del state_dict["_built"]
return state_dict
return {
"_data": self._data,
"params": {},
"base_type": self.base_type,
"is_task": self.is_task,
"id": self.id,
"_built_object": UnbuiltObject(),
"_built": False,
"parent_node_id": self.parent_node_id,
"parent_is_top_level": self.parent_is_top_level,
}
def __setstate__(self, state):
self._data = state["_data"]
self.params = state["params"]
self.base_type = state["base_type"]
self.is_task = state["is_task"]
self.edges = state["edges"]
self.id = state["id"]
self._parse_data()
if "_built_object" in state:
@ -144,6 +128,10 @@ class Vertex:
# and use that as the value for the param
# If the type is "str", then we need to get the value of the "value" key
# and use that as the value for the param
if self.graph is None:
raise ValueError("Graph not found")
template_dict = {key: value for key, value in self.data["node"]["template"].items() if isinstance(value, dict)}
params = self.params.copy() if self.params else {}
@ -155,9 +143,9 @@ class Vertex:
if template_dict[param_key]["list"]:
if param_key not in params:
params[param_key] = []
params[param_key].append(edge.source)
elif edge.target.id == self.id:
params[param_key] = edge.source
params[param_key].append(self.graph.get_vertex(edge.source_id))
elif edge.target_id == self.id:
params[param_key] = self.graph.get_vertex(edge.source_id)
for key, value in template_dict.items():
if key in params:
@ -177,33 +165,33 @@ class Vertex:
else:
raise ValueError(f"File path not found for {self.vertex_type}")
elif value.get("type") in DIRECT_TYPES and params.get(key) is None:
val = value.get("value")
if value.get("type") == "code":
try:
params[key] = ast.literal_eval(value.get("value"))
params[key] = ast.literal_eval(val) if val else None
except Exception as exc:
logger.debug(f"Error parsing code: {exc}")
params[key] = value.get("value")
params[key] = val
elif value.get("type") in ["dict", "NestedDict"]:
# When dict comes from the frontend it comes as a
# list of dicts, so we need to convert it to a dict
# before passing it to the build method
_value = value.get("value")
if isinstance(_value, list):
if isinstance(val, list):
params[key] = {k: v for item in value.get("value", []) for k, v in item.items()}
elif isinstance(_value, dict):
params[key] = _value
elif value.get("type") == "int" and value.get("value") is not None:
elif isinstance(val, dict):
params[key] = val
elif value.get("type") == "int" and val is not None:
try:
params[key] = int(value.get("value"))
params[key] = int(val)
except ValueError:
params[key] = value.get("value")
elif value.get("type") == "float" and value.get("value") is not None:
params[key] = val
elif value.get("type") == "float" and val is not None:
try:
params[key] = float(value.get("value"))
params[key] = float(val)
except ValueError:
params[key] = value.get("value")
params[key] = val
else:
params[key] = value.get("value")
params[key] = val
if not value.get("required") and params.get(key) is None:
if value.get("default"):
@ -266,7 +254,7 @@ class Vertex:
pass
# If there's no task_id, build the vertex locally
await self.build(user_id)
await self.build(user_id=user_id)
return self._built_object
async def _build_node_and_update_params(self, key, node, user_id=None):

View file

@ -1,14 +1,14 @@
import ast
from typing import Any, Dict, List, Optional, Union
from langflow.graph.utils import flatten_list
from langflow.graph.utils import UnbuiltObject, flatten_list
from langflow.graph.vertex.base import Vertex
from langflow.interface.utils import extract_input_variables_from_prompt
class AgentVertex(Vertex):
def __init__(self, data: Dict, params: Optional[Dict] = None):
super().__init__(data, base_type="agents", params=params)
def __init__(self, data: Dict, graph, params: Optional[Dict] = None):
super().__init__(data, graph=graph, base_type="agents", params=params)
self.tools: List[Union[ToolkitVertex, ToolVertex]] = []
self.chains: List[ChainVertex] = []
@ -28,7 +28,7 @@ class AgentVertex(Vertex):
for edge in self.edges:
if not hasattr(edge, "source"):
continue
source_node = edge.source
source_node = self.graph.get_vertex(edge.source_id)
if isinstance(source_node, (ToolVertex, ToolkitVertex)):
self.tools.append(source_node)
elif isinstance(source_node, ChainVertex):
@ -51,16 +51,21 @@ class AgentVertex(Vertex):
class ToolVertex(Vertex):
def __init__(self, data: Dict, params: Optional[Dict] = None):
super().__init__(data, base_type="tools", params=params)
def __init__(
self,
data: Dict,
graph,
params: Optional[Dict] = None,
):
super().__init__(data, graph=graph, base_type="tools", params=params)
class LLMVertex(Vertex):
built_node_type = None
class_built_object = None
def __init__(self, data: Dict, params: Optional[Dict] = None):
super().__init__(data, base_type="llms", params=params)
def __init__(self, data: Dict, graph, params: Optional[Dict] = None):
super().__init__(data, graph=graph, base_type="llms", params=params)
async def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any:
# LLM is different because some models might take up too much memory
@ -77,18 +82,18 @@ class LLMVertex(Vertex):
class ToolkitVertex(Vertex):
def __init__(self, data: Dict, params=None):
super().__init__(data, base_type="toolkits", params=params)
def __init__(self, data: Dict, graph, params=None):
super().__init__(data, graph=graph, base_type="toolkits", params=params)
class FileToolVertex(ToolVertex):
def __init__(self, data: Dict, params=None):
super().__init__(data, params=params)
def __init__(self, data: Dict, graph, params=None):
super().__init__(data, graph=graph, params=params)
class WrapperVertex(Vertex):
def __init__(self, data: Dict):
super().__init__(data, base_type="wrappers")
def __init__(self, data: Dict, graph):
super().__init__(data, graph=graph, base_type="wrappers")
async def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any:
if not self._built or force:
@ -99,14 +104,14 @@ class WrapperVertex(Vertex):
class DocumentLoaderVertex(Vertex):
def __init__(self, data: Dict, params: Optional[Dict] = None):
super().__init__(data, base_type="documentloaders", params=params)
def __init__(self, data: Dict, graph, params: Optional[Dict] = None):
super().__init__(data, graph=graph, base_type="documentloaders", params=params)
def _built_object_repr(self):
# This built_object is a list of documents. Maybe we should
# show how many documents are in the list?
if self._built_object:
if self._built_object and not isinstance(self._built_object, UnbuiltObject):
avg_length = sum(len(doc.page_content) for doc in self._built_object if hasattr(doc, "page_content")) / len(
self._built_object
)
@ -117,28 +122,19 @@ class DocumentLoaderVertex(Vertex):
class EmbeddingVertex(Vertex):
def __init__(self, data: Dict, params: Optional[Dict] = None):
super().__init__(data, base_type="embeddings", params=params)
def __init__(self, data: Dict, graph, params: Optional[Dict] = None):
super().__init__(data, graph=graph, base_type="embeddings", params=params)
class VectorStoreVertex(Vertex):
def __init__(self, data: Dict, params=None):
super().__init__(data, base_type="vectorstores")
def __init__(self, data: Dict, graph, params=None):
super().__init__(data, graph=graph, base_type="vectorstores")
self.params = params or {}
# VectorStores may contain databse connections
# so we need to define the __reduce__ method and the __setstate__ method
# to avoid pickling errors
def clean_edges_for_pickling(self):
# for each edge that has self as source
# we need to clear the _built_object of the target
# so that we don't try to pickle a database connection
for edge in self.edges:
if edge.source == self:
edge.target._built_object = None
edge.target._built = False
edge.target.params[edge.target_param] = self
def remove_docs_and_texts_from_params(self):
# remove documents and texts from params
@ -146,17 +142,16 @@ class VectorStoreVertex(Vertex):
self.params.pop("documents", None)
self.params.pop("texts", None)
def __getstate__(self):
# We want to save the params attribute
# and if "documents" or "texts" are in the params
# we want to remove them because they have already
# been processed.
params = self.params.copy()
params.pop("documents", None)
params.pop("texts", None)
self.clean_edges_for_pickling()
# def __getstate__(self):
# # We want to save the params attribute
# # and if "documents" or "texts" are in the params
# # we want to remove them because they have already
# # been processed.
# params = self.params.copy()
# params.pop("documents", None)
# params.pop("texts", None)
return super().__getstate__()
# return super().__getstate__()
def __setstate__(self, state):
super().__setstate__(state)
@ -164,24 +159,24 @@ class VectorStoreVertex(Vertex):
class MemoryVertex(Vertex):
def __init__(self, data: Dict):
super().__init__(data, base_type="memory")
def __init__(self, data: Dict, graph):
super().__init__(data, graph=graph, base_type="memory")
class RetrieverVertex(Vertex):
def __init__(self, data: Dict):
super().__init__(data, base_type="retrievers")
def __init__(self, data: Dict, graph):
super().__init__(data, graph=graph, base_type="retrievers")
class TextSplitterVertex(Vertex):
def __init__(self, data: Dict, params: Optional[Dict] = None):
super().__init__(data, base_type="textsplitters", params=params)
def __init__(self, data: Dict, graph, params: Optional[Dict] = None):
super().__init__(data, graph=graph, base_type="textsplitters", params=params)
def _built_object_repr(self):
# This built_object is a list of documents. Maybe we should
# show how many documents are in the list?
if self._built_object:
if self._built_object and not isinstance(self._built_object, UnbuiltObject):
avg_length = sum(len(doc.page_content) for doc in self._built_object) / len(self._built_object)
return f"""{self.vertex_type}({len(self._built_object)} documents)
\nAvg. Document Length (characters): {int(avg_length)}
@ -190,8 +185,8 @@ class TextSplitterVertex(Vertex):
class ChainVertex(Vertex):
def __init__(self, data: Dict):
super().__init__(data, base_type="chains")
def __init__(self, data: Dict, graph):
super().__init__(data, graph=graph, base_type="chains")
async def build(
self,
@ -220,8 +215,8 @@ class ChainVertex(Vertex):
class PromptVertex(Vertex):
def __init__(self, data: Dict):
super().__init__(data, base_type="prompts")
def __init__(self, data: Dict, graph):
super().__init__(data, graph=graph, base_type="prompts")
async def build(
self,
@ -271,9 +266,13 @@ class PromptVertex(Vertex):
# so the prompt format doesn't break
artifacts.pop("handle_keys", None)
try:
if not hasattr(self._built_object, "template") and hasattr(self._built_object, "prompt"):
if (
not hasattr(self._built_object, "template")
and hasattr(self._built_object, "prompt")
and not isinstance(self._built_object, UnbuiltObject)
):
template = self._built_object.prompt.template
else:
elif not isinstance(self._built_object, UnbuiltObject) and hasattr(self._built_object, "template"):
template = self._built_object.template
for key, value in artifacts.items():
if value:
@ -285,13 +284,13 @@ class PromptVertex(Vertex):
class OutputParserVertex(Vertex):
def __init__(self, data: Dict):
super().__init__(data, base_type="output_parsers")
def __init__(self, data: Dict, graph):
super().__init__(data, graph=graph, base_type="output_parsers")
class CustomComponentVertex(Vertex):
def __init__(self, data: Dict):
super().__init__(data, base_type="custom_components", is_task=True)
def __init__(self, data: Dict, graph):
super().__init__(data, graph=graph, base_type="custom_components", is_task=True)
def _built_object_repr(self):
if self.task_id and self.is_task:

View file

@ -1,9 +1,11 @@
import ast
import operator
import warnings
from typing import Any, ClassVar, Optional
from cachetools import TTLCache, cachedmethod
from fastapi import HTTPException
from langflow.interface.custom.code_parser import CodeParser
from langflow.utils import validate
@ -23,11 +25,20 @@ class Component:
code: Optional[str] = None
_function_entrypoint_name: str = "build"
field_config: dict = {}
_user_id: Optional[str]
def __init__(self, **data):
self.cache = TTLCache(maxsize=1024, ttl=60)
for key, value in data.items():
setattr(self, key, value)
if key == "user_id":
setattr(self, "_user_id", value)
else:
setattr(self, key, value)
def __setattr__(self, key, value):
if key == "_user_id" and hasattr(self, "_user_id"):
warnings.warn("user_id is immutable and cannot be changed.")
super().__setattr__(key, value)
@cachedmethod(cache=operator.attrgetter("cache"))
def get_code_tree(self, code: str):

View file

@ -5,7 +5,6 @@ from uuid import UUID
import yaml
from cachetools import TTLCache, cachedmethod
from fastapi import HTTPException
from langflow.field_typing.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES
from langflow.interface.custom.component import Component
from langflow.interface.custom.directory_reader import DirectoryReader
@ -15,7 +14,7 @@ from langflow.interface.custom.utils import (
)
from langflow.services.database.models.flow import Flow
from langflow.services.database.utils import session_getter
from langflow.services.deps import get_db_service
from langflow.services.deps import get_credential_service, get_db_service
from langflow.utils import validate
@ -184,6 +183,37 @@ class CustomComponent(Component):
return super().build_template_config(attributes)
@property
def keys(self):
def get_credential(name: str):
if hasattr(self, "_user_id") and not self._user_id:
raise ValueError(f"User id is not set for {self.__class__.__name__}")
credential_service = get_credential_service() # Get service instance
# Retrieve and decrypt the credential by name for the current user
db_service = get_db_service()
with session_getter(db_service) as session:
return credential_service.get_credential(user_id=self._user_id, name=name, session=session)
return get_credential
def list_key_names(self):
if hasattr(self, "_user_id") and not self._user_id:
raise ValueError(f"User id is not set for {self.__class__.__name__}")
credential_service = get_credential_service()
db_service = get_db_service()
with session_getter(db_service) as session:
return credential_service.list_credentials(user_id=self._user_id, session=session)
def index(self, value: int = 0):
"""Returns a function that returns the value at the given index in the iterable."""
def get_index(iterable: List[Any]):
if iterable:
return iterable[value]
return iterable
return get_index
@property
def get_function(self):
return validate.create_function(self.code, self.function_entrypoint_name)
@ -201,7 +231,7 @@ class CustomComponent(Component):
return await build_sorted_vertices(graph_data, self.user_id)
def list_flows(self, *, get_session: Optional[Callable] = None) -> List[Flow]:
if not self.user_id:
if not self._user_id:
raise ValueError("Session is invalid")
try:
get_session = get_session or session_getter

View file

@ -3,13 +3,11 @@ import contextlib
import re
import traceback
import warnings
from typing import Any, List, Optional, Union
from typing import Any, Dict, List, Optional, Union
from uuid import UUID
from cachetools import LRUCache, cached
from fastapi import HTTPException
from loguru import logger
from langflow.api.utils import get_new_key
from langflow.interface.agents.base import agent_creator
from langflow.interface.chains.base import chain_creator
@ -35,6 +33,7 @@ from langflow.template.field.base import TemplateField
from langflow.template.frontend_node.constants import CLASSES_TO_REMOVE
from langflow.template.frontend_node.custom_components import CustomComponentFrontendNode
from langflow.utils.util import get_base_classes
from loguru import logger
# Used to get the base_classes list
@ -201,7 +200,9 @@ def update_attributes(frontend_node, template_config):
frontend_node[attribute] = template_config[attribute]
def build_field_config(custom_component: CustomComponent, user_id: Optional[Union[str, UUID]] = None):
def build_field_config(
custom_component: CustomComponent, user_id: Optional[Union[str, UUID]] = None, update_field=None
):
"""Build the field configuration for a custom component"""
try:
@ -222,7 +223,19 @@ def build_field_config(custom_component: CustomComponent, user_id: Optional[Unio
) from exc
try:
return custom_class(user_id=user_id).build_config()
build_config: Dict = custom_class(user_id=user_id).build_config()
for field_name, field_dict in build_config.items():
if update_field is not None and field_name != update_field:
continue
try:
update_field_dict(field_dict)
build_config[field_name] = field_dict
except Exception as exc:
logger.error(f"Error while getting build_config: {str(exc)}")
return build_config
except Exception as exc:
logger.error(f"Error while building field config: {str(exc)}")
raise HTTPException(
@ -234,6 +247,18 @@ def build_field_config(custom_component: CustomComponent, user_id: Optional[Unio
) from exc
def update_field_dict(field_dict):
"""Update the field dictionary by calling options() or value() if they are callable"""
if "options" in field_dict and callable(field_dict["options"]):
field_dict["options"] = field_dict["options"]()
# Also update the "refresh" key
field_dict["refresh"] = True
if "value" in field_dict and callable(field_dict["value"]):
field_dict["value"] = field_dict["value"](field_dict.get("options", []))
field_dict["refresh"] = True
def add_extra_fields(frontend_node, field_config, function_args):
"""Add extra fields to the frontend node"""
if not function_args:
@ -314,7 +339,9 @@ def add_output_types(frontend_node, return_types: List[str]):
def build_langchain_template_custom_component(
custom_component: CustomComponent, user_id: Optional[Union[str, UUID]] = None
custom_component: CustomComponent,
user_id: Optional[Union[str, UUID]] = None,
update_field: Optional[str] = None,
):
"""Build a custom component template for the langchain"""
try:
@ -328,7 +355,7 @@ def build_langchain_template_custom_component(
update_attributes(frontend_node, template_config)
logger.debug("Updated attributes")
field_config = build_field_config(custom_component, user_id=user_id)
field_config = build_field_config(custom_component, user_id=user_id, update_field=update_field)
logger.debug("Built field config")
entrypoint_args = custom_component.get_function_entrypoint_args
@ -514,3 +541,9 @@ def merge_nested_dicts(dict1, dict2):
else:
dict1[key] = value
return dict1
def create_and_validate_component(code: str) -> CustomComponent:
component = CustomComponent(code=code)
component.validate()
return component

View file

@ -8,10 +8,10 @@ from fastapi.security import APIKeyHeader, APIKeyQuery, OAuth2PasswordBearer
from jose import JWTError, jwt
from sqlmodel import Session
from langflow.services.database.models.api_key.api_key import ApiKey
from langflow.services.database.models.api_key.model import ApiKey
from langflow.services.database.models.api_key.crud import check_key
from langflow.services.database.models.user.crud import get_user_by_id, get_user_by_username, update_user_last_login_at
from langflow.services.database.models.user.user import User
from langflow.services.database.models.user.model import User
from langflow.services.deps import get_session, get_settings_service
oauth2_login = OAuth2PasswordBearer(tokenUrl="api/v1/login", auto_error=False)

View file

@ -0,0 +1,15 @@
from typing import TYPE_CHECKING
from langflow.services.credentials.service import CredentialService
from langflow.services.factory import ServiceFactory
if TYPE_CHECKING:
from langflow.services.settings.service import SettingsService
class CredentialServiceFactory(ServiceFactory):
def __init__(self):
super().__init__(CredentialService)
def create(self, settings_service: "SettingsService"):
return CredentialService(settings_service)

View file

@ -0,0 +1,32 @@
from typing import TYPE_CHECKING, Union
from uuid import UUID
from fastapi import Depends
from langflow.services.auth import utils as auth_utils
from langflow.services.base import Service
from langflow.services.database.models.credential.model import Credential
from langflow.services.deps import get_session
from sqlmodel import Session
if TYPE_CHECKING:
from langflow.services.settings.service import SettingsService
class CredentialService(Service):
name = "credential_service"
def __init__(self, settings_service: "SettingsService"):
self.settings_service = settings_service
def get_credential(self, user_id: Union[UUID, str], name: str, session: Session = Depends(get_session)) -> str:
# we get the credential from the database
credential = session.query(Credential).filter(Credential.user_id == user_id, Credential.name == name).first()
# we decrypt the value
if not credential:
raise ValueError(f"{name} credential not found.")
decrypted = auth_utils.decrypt_api_key(credential.value, settings_service=self.settings_service)
return decrypted
def list_credentials(self, user_id: Union[UUID, str], session: Session = Depends(get_session)) -> list[Credential]:
credentials = session.query(Credential).filter(Credential.user_id == user_id).all()
return [credential.name for credential in credentials]

View file

@ -1,5 +1,6 @@
from .api_key import ApiKey
from .credential import Credential
from .flow import Flow
from .user import User
from .api_key import ApiKey
__all__ = ["Flow", "User", "ApiKey"]
__all__ = ["Flow", "User", "ApiKey", "Credential"]

View file

@ -1,3 +1,3 @@
from .api_key import ApiKey, ApiKeyCreate, UnmaskedApiKeyRead, ApiKeyRead
from .model import ApiKey, ApiKeyCreate, UnmaskedApiKeyRead, ApiKeyRead
__all__ = ["ApiKey", "ApiKeyCreate", "UnmaskedApiKeyRead", "ApiKeyRead"]

View file

@ -1,15 +1,15 @@
from pydantic import validator
from sqlmodel import Field, Relationship
from uuid import UUID, uuid4
from typing import Optional, TYPE_CHECKING
from datetime import datetime
from langflow.services.database.models.base import SQLModelSerializable
from typing import TYPE_CHECKING, Optional
from uuid import UUID, uuid4
from pydantic import validator
from sqlmodel import Field, Relationship, SQLModel
if TYPE_CHECKING:
from langflow.services.database.models.user import User
class ApiKeyBase(SQLModelSerializable):
class ApiKeyBase(SQLModel):
name: Optional[str] = Field(index=True)
created_at: datetime = Field(default_factory=datetime.utcnow)
last_used_at: Optional[datetime] = Field(default=None, nullable=True)

View file

@ -1,4 +1,3 @@
from sqlmodel import SQLModel
import orjson
@ -16,9 +15,3 @@ def orjson_dumps(v, *, default=None, sort_keys=False, indent_2=True):
if default is None:
return orjson.dumps(v, option=option).decode()
return orjson.dumps(v, default=default, option=option).decode()
class SQLModelSerializable(SQLModel):
# TODO[pydantic]: The following keys were removed: `json_loads`, `json_dumps`.
# Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information.
pass

View file

@ -1,3 +1,3 @@
from .component import Component, ComponentModel
from .model import Component, ComponentModel
__all__ = ["Component", "ComponentModel"]

View file

@ -1,11 +1,11 @@
from langflow.services.database.models.base import SQLModelSerializable, SQLModel
from sqlmodel import Field
from typing import Optional
from datetime import datetime
import uuid
from datetime import datetime
from typing import Optional
from sqlmodel import Field, SQLModel
class Component(SQLModelSerializable, table=True):
class Component(SQLModel, table=True):
id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True)
frontend_node_id: uuid.UUID = Field(index=True)
name: str = Field(index=True)

View file

@ -0,0 +1,3 @@
from .model import Credential, CredentialCreate, CredentialRead, CredentialUpdate
__all__ = ["Credential", "CredentialCreate", "CredentialRead", "CredentialUpdate"]

View file

@ -0,0 +1,45 @@
from datetime import datetime
from typing import TYPE_CHECKING, Optional
from uuid import UUID, uuid4
from langflow.services.database.models.credential.schema import CredentialType
from sqlmodel import Field, Relationship, SQLModel
if TYPE_CHECKING:
from langflow.services.database.models.user import User
class CredentialBase(SQLModel):
name: Optional[str] = Field(None, description="Name of the credential")
value: Optional[str] = Field(None, description="Encrypted value of the credential")
provider: Optional[str] = Field(None, description="Provider of the credential (e.g OpenAI)")
class Credential(CredentialBase, table=True):
id: Optional[UUID] = Field(default_factory=uuid4, primary_key=True, description="Unique ID for the credential")
# name is unique per user
created_at: datetime = Field(default_factory=datetime.utcnow, description="Creation time of the credential")
updated_at: Optional[datetime] = Field(None, description="Last update time of the credential")
# foreign key to user table
user_id: UUID = Field(description="User ID associated with this credential", foreign_key="user.id")
user: "User" = Relationship(back_populates="credentials")
if TYPE_CHECKING:
user: "User" = Relationship(back_populates="credentials")
class CredentialCreate(CredentialBase):
# AcceptedProviders is a custom Enum
provider: CredentialType = Field(description="Provider of the credential (e.g OpenAI)")
class CredentialRead(SQLModel):
id: UUID
name: Optional[str] = Field(None, description="Name of the credential")
provider: Optional[str] = Field(None, description="Provider of the credential (e.g OpenAI)")
class CredentialUpdate(SQLModel):
id: UUID # Include the ID for updating
name: Optional[str] = Field(None, description="Name of the credential")
value: Optional[str] = Field(None, description="Encrypted value of the credential")

View file

@ -0,0 +1,8 @@
from enum import Enum
class CredentialType(str, Enum):
"""CredentialType is an Enum of the accepted providers"""
OPENAI_API_KEY = "OPENAI_API_KEY"
ANTHROPIC_API_KEY = "ANTHROPIC_API_KEY"

View file

@ -1,3 +1,3 @@
from .flow import Flow, FlowCreate, FlowRead, FlowUpdate
from .model import Flow, FlowCreate, FlowRead, FlowUpdate
__all__ = ["Flow", "FlowCreate", "FlowRead", "FlowUpdate"]

View file

@ -5,15 +5,13 @@ from typing import TYPE_CHECKING, Dict, Optional
from uuid import UUID, uuid4
from pydantic import field_serializer, field_validator
from sqlmodel import JSON, Column, Field, Relationship
from langflow.services.database.models.base import SQLModelSerializable
from sqlmodel import JSON, Column, Field, Relationship, SQLModel
if TYPE_CHECKING:
from langflow.services.database.models.user import User
class FlowBase(SQLModelSerializable):
class FlowBase(SQLModel):
name: str = Field(index=True)
description: Optional[str] = Field(index=True, nullable=True, default=None)
data: Optional[Dict] = Field(default=None, nullable=True)
@ -69,7 +67,7 @@ class FlowRead(FlowBase):
user_id: UUID = Field()
class FlowUpdate(SQLModelSerializable):
class FlowUpdate(SQLModel):
name: Optional[str] = None
description: Optional[str] = None
data: Optional[Dict] = None

View file

@ -1,4 +1,4 @@
from .user import User, UserCreate, UserRead, UserUpdate
from .model import User, UserCreate, UserRead, UserUpdate
__all__ = [
"User",

View file

@ -2,7 +2,7 @@ from datetime import datetime, timezone
from typing import Union
from uuid import UUID
from fastapi import Depends, HTTPException, status
from langflow.services.database.models.user.user import User, UserUpdate
from langflow.services.database.models.user.model import User, UserUpdate
from langflow.services.deps import get_session
from sqlalchemy.exc import IntegrityError
from sqlmodel import Session

View file

@ -1,17 +1,16 @@
from langflow.services.database.models.base import SQLModel, SQLModelSerializable
from sqlmodel import Field, Relationship
from datetime import datetime
from typing import Optional, TYPE_CHECKING
from typing import TYPE_CHECKING, Optional
from uuid import UUID, uuid4
from sqlmodel import Field, Relationship, SQLModel
if TYPE_CHECKING:
from langflow.services.database.models.api_key import ApiKey
from langflow.services.database.models.credential import Credential
from langflow.services.database.models.flow import Flow
class User(SQLModelSerializable, table=True):
class User(SQLModel, table=True):
id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True)
username: str = Field(index=True, unique=True)
password: str = Field()
@ -27,6 +26,10 @@ class User(SQLModelSerializable, table=True):
)
store_api_key: str = Field(default=None, nullable=True)
flows: list["Flow"] = Relationship(back_populates="user")
credentials: list["Credential"] = Relationship(
back_populates="user",
sa_relationship_kwargs={"cascade": "delete"},
)
class UserCreate(SQLModel):

View file

@ -1,19 +1,21 @@
import time
from pathlib import Path
from typing import TYPE_CHECKING
import sqlalchemy as sa
from alembic import command, util
from alembic.config import Config
from loguru import logger
from sqlalchemy import inspect
from sqlalchemy.exc import OperationalError
from sqlmodel import Session, SQLModel, create_engine
from langflow.services.base import Service
from langflow.services.database import models # noqa
from langflow.services.database.models.user.crud import get_user_by_username
from langflow.services.database.utils import Result, TableResults
from langflow.services.deps import get_settings_service
from langflow.services.utils import teardown_superuser
from sqlalchemy import inspect
import sqlalchemy as sa
from sqlalchemy.exc import OperationalError
from sqlmodel import SQLModel, Session, create_engine
from loguru import logger
from alembic.config import Config
from alembic import command, util
from langflow.services.database import models # noqa
import time
if TYPE_CHECKING:
from sqlalchemy.engine import Engine

View file

@ -12,6 +12,11 @@ if TYPE_CHECKING:
from langflow.services.settings.service import SettingsService
from langflow.services.store.service import StoreService
from langflow.services.task.service import TaskService
from langflow.services.credentials.service import CredentialService
def get_credential_service() -> "CredentialService":
return service_manager.get(ServiceType.CREDENTIAL_SERVICE)
def get_settings_service() -> "SettingsService":

View file

@ -15,3 +15,4 @@ class ServiceType(str, Enum):
SESSION_SERVICE = "session_service"
TASK_SERVICE = "task_service"
STORE_SERVICE = "store_service"
CREDENTIAL_SERVICE = "credential_service"

View file

@ -17,7 +17,7 @@ class ForbiddenError(CustomException):
class APIKeyError(CustomException):
def __init__(self, detail="API key error"):
super().__init__(detail, 401)
super().__init__(detail, 400) #! Should be 401
class FilterError(CustomException):

View file

@ -445,6 +445,8 @@ class StoreService(Service):
result: List[ListComponentResponse] = []
authorized = False
metadata = {}
comp_count = 0
try:
result, metadata = await self.query_components(
api_key=store_api_key,

View file

@ -18,6 +18,7 @@ def get_factories_and_deps():
from langflow.services.settings import factory as settings_factory
from langflow.services.store import factory as store_factory
from langflow.services.task import factory as task_factory
from langflow.services.credentials import factory as credentials_factory
return [
(settings_factory.SettingsServiceFactory(), []),
@ -40,11 +41,12 @@ def get_factories_and_deps():
[ServiceType.CACHE_SERVICE],
),
(store_factory.StoreServiceFactory(), [ServiceType.SETTINGS_SERVICE]),
(credentials_factory.CredentialServiceFactory(), [ServiceType.SETTINGS_SERVICE]),
]
def get_or_create_super_user(session: Session, username, password, is_default):
from langflow.services.database.models.user.user import User
from langflow.services.database.models.user.model import User
user = session.query(User).filter(User.username == username).first()
@ -127,7 +129,7 @@ def teardown_superuser(settings_service, session):
try:
logger.debug("AUTO_LOGIN is set to False. Removing default superuser if exists.")
username = DEFAULT_SUPERUSER
from langflow.services.database.models.user.user import User
from langflow.services.database.models.user.model import User
user = session.query(User).filter(User.username == username).first()
if user and user.is_superuser:

View file

@ -59,6 +59,9 @@ class TemplateFieldCreator(BaseModel, ABC):
info: Optional[str] = ""
"""Additional information about the field to be shown in the tooltip. Defaults to an empty string."""
refresh: Optional[bool] = None
"""Specifies if the field should be refreshed. Defaults to False."""
def to_dict(self):
result = self.model_dump()
# Remove key if it is None

View file

@ -1,15 +1,15 @@
from collections import defaultdict
import re
from collections import defaultdict
from typing import ClassVar, DefaultDict, Dict, List, Optional
from pydantic import BaseModel, Field
from langflow.template.frontend_node.formatter import field_formatters
from langflow.template.field.base import TemplateField
from langflow.template.frontend_node.constants import (
CLASSES_TO_REMOVE,
FORCE_SHOW_FIELDS,
)
from langflow.template.field.base import TemplateField
from langflow.template.frontend_node.formatter import field_formatters
from langflow.template.template.base import Template
from langflow.utils import constants
@ -51,7 +51,9 @@ class FrontendNode(BaseModel):
documentation: str = ""
custom_fields: Optional[DefaultDict[str, List[str]]] = defaultdict(list)
output_types: List[str] = []
full_path: Optional[str] = None
field_formatters: FieldFormatters = Field(default_factory=FieldFormatters)
beta: bool = False
error: Optional[str] = None

View file

@ -1,11 +1,11 @@
from typing import Optional
from loguru import logger
from pathlib import Path
from rich.logging import RichHandler
from platformdirs import user_cache_dir
import os
import orjson
from pathlib import Path
from typing import Optional
import orjson
from loguru import logger
from platformdirs import user_cache_dir
from rich.logging import RichHandler
VALID_LOG_LEVELS = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
@ -66,4 +66,4 @@ def configure(log_level: Optional[str] = None, log_file: Optional[Path] = None):
logger.debug(f"Logger set up with log level: {log_level}")
if log_file:
logger.info(f"Log file: {log_file}")
logger.debug(f"Log file: {log_file}")

View file

@ -28,16 +28,16 @@ def extract_input_variables(nodes):
return nodes
def get_root_node(graph):
def get_root_vertex(graph):
"""
Returns the root node of the template.
"""
incoming_edges = {edge.source for edge in graph.edges}
incoming_edges = {edge.source_id for edge in graph.edges}
if not incoming_edges and len(graph.nodes) == 1:
return graph.nodes[0]
if not incoming_edges and len(graph.vertices) == 1:
return graph.vertices[0]
return next((node for node in graph.nodes if node not in incoming_edges), None)
return next((node for node in graph.vertices if node.id not in incoming_edges), None)
def build_json(root, graph) -> Dict:

View file

@ -1,7 +1,7 @@
import ast
import contextlib
import importlib
import types
from types import FunctionType
from typing import Dict
@ -61,7 +61,7 @@ def eval_function(function_string: str):
(
obj
for name, obj in namespace.items()
if isinstance(obj, types.FunctionType) and obj.__code__.co_filename == "<string>"
if isinstance(obj, FunctionType) and obj.__code__.co_filename == "<string>"
),
None,
)

View file

@ -21,11 +21,17 @@ import PromptAreaComponent from "../../../../components/promptComponent";
import TextAreaComponent from "../../../../components/textAreaComponent";
import ToggleShadComponent from "../../../../components/toggleShadComponent";
import { Button } from "../../../../components/ui/button";
import { TOOLTIP_EMPTY } from "../../../../constants/constants";
import {
LANGFLOW_SUPPORTED_TYPES,
TOOLTIP_EMPTY,
} from "../../../../constants/constants";
import { alertContext } from "../../../../contexts/alertContext";
import { FlowsContext } from "../../../../contexts/flowsContext";
import { typesContext } from "../../../../contexts/typesContext";
import { undoRedoContext } from "../../../../contexts/undoRedoContext";
import { postCustomComponentUpdate } from "../../../../controllers/API";
import { ParameterComponentType } from "../../../../types/components";
import { NodeDataType } from "../../../../types/flow";
import {
convertObjToArray,
convertValuesToNumbers,
@ -59,6 +65,7 @@ export default function ParameterComponent({
const ref = useRef<HTMLDivElement>(null);
const refHtml = useRef<HTMLDivElement & ReactNode>(null);
const infoHtml = useRef<HTMLDivElement & ReactNode>(null);
const { setErrorData } = useContext(alertContext);
const updateNodeInternals = useUpdateNodeInternals();
const [position, setPosition] = useState(0);
const { setTabsState, tabId, flows } = useContext(FlowsContext);
@ -93,6 +100,25 @@ export default function ParameterComponent({
const { takeSnapshot } = useContext(undoRedoContext);
const handleUpdateValues = async (name: string, data: NodeDataType) => {
const code = data.node?.template["code"]?.value;
if (!code) {
console.error("Code not found in the template");
return;
}
try {
const res = await postCustomComponentUpdate(code, name);
if (res.status === 200 && data.node?.template) {
let clone = cloneDeep(data);
clone.node!.template[name] = res.data.template[name];
setData(clone);
}
} catch (err) {
setErrorData(err as { title: string; list?: Array<string> });
}
};
const handleOnNewValue = (
newValue: string | string[] | boolean | Object[]
): void => {
@ -222,17 +248,7 @@ export default function ParameterComponent({
}, [tooltipTitle, flow]);
return !showNode ? (
left &&
(type === "str" ||
type === "bool" ||
type === "float" ||
type === "code" ||
type === "prompt" ||
type === "file" ||
type === "int" ||
type === "dict" ||
type === "NestedDict") &&
!optionalHandle ? (
left && LANGFLOW_SUPPORTED_TYPES.has(type ?? "") && !optionalHandle ? (
<></>
) : (
<Button className="h-7 truncate bg-muted p-0 text-sm font-normal text-black hover:bg-muted">
@ -305,17 +321,7 @@ export default function ParameterComponent({
)}
</div>
</div>
{left &&
(type === "str" ||
type === "bool" ||
type === "float" ||
type === "code" ||
type === "prompt" ||
type === "file" ||
type === "int" ||
type === "dict" ||
type === "NestedDict") &&
!optionalHandle ? (
{left && LANGFLOW_SUPPORTED_TYPES.has(type ?? "") && !optionalHandle ? (
<></>
) : (
<Button className="h-7 truncate bg-muted p-0 text-sm font-normal text-black hover:bg-muted">
@ -409,12 +415,26 @@ export default function ParameterComponent({
) : left === true &&
type === "str" &&
data.node?.template[name].options ? (
<div className="mt-2 w-full">
<Dropdown
options={data.node.template[name].options}
onSelect={handleOnNewValue}
value={data.node.template[name].value ?? "Choose an option"}
></Dropdown>
// TODO: Improve CSS
<div className="mt-2 flex w-full items-center">
<div className="w-5/6 flex-grow">
<Dropdown
options={data.node.template[name].options}
onSelect={handleOnNewValue}
value={data.node.template[name].value ?? "Choose an option"}
id={"dropdown-" + index}
/>
</div>
{data.node?.template[name].refresh && (
<button
className="extra-side-bar-buttons ml-2 mt-1 w-1/6"
onClick={() => {
handleUpdateValues(name, data);
}}
>
<IconComponent name="RefreshCcw" />
</button>
)}
</div>
) : left === true && type === "code" ? (
<div className="mt-2 w-full">
@ -491,6 +511,7 @@ export default function ParameterComponent({
data.node!.template[name].value = newValue;
handleOnNewValue(newValue);
}}
id="div-dict-input"
/>
</div>
) : left === true && type === "dict" ? (

View file

@ -38,7 +38,7 @@ export default function GenericNode({
const { types, deleteNode, reactFlowInstance, setFilterEdge, getFilterEdge } =
useContext(typesContext);
const name = nodeIconsLucide[data.type] ? data.type : types[data.type];
const [inputName, setInputName] = useState(true);
const [inputName, setInputName] = useState(false);
const [nodeName, setNodeName] = useState(data.node!.display_name);
const [inputDescription, setInputDescription] = useState(false);
const [nodeDescription, setNodeDescription] = useState(
@ -206,13 +206,21 @@ export default function GenericNode({
) : (
<ShadTooltip content={data.node?.display_name}>
<div
className="generic-node-tooltip-div text-primary"
className="flex"
onDoubleClick={() => {
setInputName(true);
takeSnapshot();
}}
>
{data.node?.display_name}
<div className="generic-node-tooltip-div pr-3 text-primary">
{data.node?.display_name}
</div>
{data.node?.flow && (
<IconComponent
name="Pencil"
className="h-5 w-5 text-primary-content"
/>
)}
</div>
</ShadTooltip>
)}

View file

@ -11,6 +11,7 @@ const SanitizedHTMLWrapper = ({
return (
<div
data-testid="edit-prompt-sanitized"
className={className}
dangerouslySetInnerHTML={{ __html: sanitizedHTML }}
suppressContentEditableWarning={suppressWarning}

View file

@ -45,6 +45,7 @@ export default function CodeAreaComponent({
<div className="flex w-full items-center">
<span
id={id}
data-testid={id}
className={
editNode
? "input-edit-node input-dialog"

View file

@ -27,6 +27,7 @@ import {
TabsList,
TabsTrigger,
} from "../../components/ui/tabs";
import { LANGFLOW_SUPPORTED_TYPES } from "../../constants/constants";
import { darkContext } from "../../contexts/darkContext";
import { typesContext } from "../../contexts/typesContext";
import { codeTabsPropsType } from "../../types/components";
@ -242,24 +243,10 @@ export default function CodeTabsComponent({
templateField.charAt(0) !== "_" &&
node.data.node.template[templateField]
.show &&
(node.data.node.template[templateField]
.type === "str" ||
LANGFLOW_SUPPORTED_TYPES.has(
node.data.node.template[templateField]
.type === "bool" ||
node.data.node.template[templateField]
.type === "float" ||
node.data.node.template[templateField]
.type === "code" ||
node.data.node.template[templateField]
.type === "prompt" ||
node.data.node.template[templateField]
.type === "file" ||
node.data.node.template[templateField]
.type === "int" ||
node.data.node.template[templateField]
.type === "dict" ||
node.data.node.template[templateField]
.type === "NestedDict")
.type
)
)
.map((templateField, indx) => {
return (

View file

@ -10,6 +10,7 @@ export default function DictComponent({
onChange,
disabled,
editNode = false,
id = "",
}: DictComponentType): JSX.Element {
useEffect(() => {
if (disabled) {
@ -30,7 +31,7 @@ export default function DictComponent({
)}
>
{
<div className="flex w-full gap-3">
<div className="flex w-full gap-3" data-testid={id}>
<DictAreaModal
value={ref.current}
onChange={(obj) => {
@ -45,6 +46,7 @@ export default function DictComponent({
: "input-disable pointer-events-none cursor-pointer"
}
placeholder="Click to edit your dictionary..."
data-testid="dict-input"
/>
</DictAreaModal>
</div>

View file

@ -11,6 +11,7 @@ export default function Dropdown({
editNode = false,
numberOfOptions = 0,
apiModal = false,
id = "",
}: DropDownComponentType): JSX.Element {
let [internalValue, setInternalValue] = useState(
value === "" || !value ? "Choose an option" : value
@ -31,15 +32,19 @@ export default function Dropdown({
>
{({ open }) => (
<>
<div className={editNode ? "mt-1" : "relative mt-1"}>
<div className={"relative mt-1"}>
<Listbox.Button
data-test={`${id ?? ""}`}
className={
editNode
? "dropdown-component-outline"
: "dropdown-component-false-outline"
}
>
<span className="dropdown-component-display">
<span
className="dropdown-component-display"
data-testid={`${id ?? ""}-display`}
>
{internalValue}
</span>
<span className={"dropdown-component-arrow"}>
@ -63,7 +68,7 @@ export default function Dropdown({
editNode
? "dropdown-component-true-options nowheel custom-scroll"
: "dropdown-component-false-options nowheel custom-scroll",
apiModal ? "mb-2 w-[250px]" : "absolute"
apiModal ? "mb-2 w-[250px]" : "absolute w-full"
)}
>
{options.map((option, id) => (
@ -86,6 +91,7 @@ export default function Dropdown({
selected ? "font-semibold" : "font-normal",
"block truncate "
)}
data-testid={`${option}-${id ?? ""}-option`}
>
{option}
</span>

View file

@ -3,7 +3,10 @@ import { IconComponentProps } from "../../types/components";
import { nodeIconsLucide } from "../../utils/styleUtils";
const ForwardedIconComponent = forwardRef(
({ name, className, iconColor, stroke }: IconComponentProps, ref) => {
(
{ name, className, iconColor, stroke, id = "" }: IconComponentProps,
ref
) => {
const TargetIcon = nodeIconsLucide[name] ?? nodeIconsLucide["unknown"];
return (
<TargetIcon
@ -12,6 +15,7 @@ const ForwardedIconComponent = forwardRef(
style={iconColor ? { color: iconColor } : {}}
ref={ref}
stroke={stroke ? stroke : "currentColor"}
data-testid={id ? `${id}-${name}` : "icon-" + name}
/>
);
}

View file

@ -53,7 +53,7 @@ export default function KeypairListComponent({
return (
<div key={idx} className="flex w-full gap-2">
<Input
id={"keypair" + index}
id={editNode ? "editNodekeypair" + index : "keypair" + index}
type="text"
value={key.trim()}
className={classNames(
@ -71,7 +71,11 @@ export default function KeypairListComponent({
/>
<Input
id={"keypair" + (index + 100).toString()}
id={
editNode
? "editNodekeypair" + (index + 100).toString()
: "keypair" + (index + 100).toString()
}
type="text"
value={obj[key]}
className={editNode ? "input-edit-node" : ""}
@ -88,7 +92,11 @@ export default function KeypairListComponent({
newInputList.push({ "": "" });
onChange(newInputList);
}}
id={"plusbtn" + index.toString()}
id={
editNode
? "editNodeplusbtn" + index.toString()
: "plusbtn" + index.toString()
}
>
<IconComponent
name="Plus"
@ -102,7 +110,11 @@ export default function KeypairListComponent({
newInputList.splice(index, 1);
onChange(newInputList);
}}
id={"minusbtn" + index.toString()}
id={
editNode
? "editNodeminusbtn" + index.toString()
: "minusbtn" + index.toString()
}
>
<IconComponent
name="X"

View file

@ -53,6 +53,7 @@ export default function PromptAreaComponent({
<div className="flex w-full items-center">
<span
id={id}
data-testid={id}
className={
editNode
? "input-edit-node input-dialog"
@ -64,6 +65,7 @@ export default function PromptAreaComponent({
</span>
{!editNode && (
<IconComponent
id={id}
name="ExternalLink"
className={
"icons-parameters-comp" +

View file

@ -47,6 +47,7 @@ export default function TextAreaComponent({
/>
{!editNode && (
<IconComponent
id={id}
name="ExternalLink"
className={
"icons-parameters-comp" +

View file

@ -663,3 +663,15 @@ export const LAST_USED_SPAN_1 = "The last time this key was used.";
export const LAST_USED_SPAN_2 =
"Accurate to within the hour from the most recent usage.";
export const LANGFLOW_SUPPORTED_TYPES = new Set([
"str",
"bool",
"float",
"code",
"prompt",
"file",
"int",
"dict",
"NestedDict",
]);

View file

@ -56,7 +56,11 @@ const FlowsContextInitialValue: FlowsContextType = {
isLoading: true,
flows: [],
removeFlow: (id: string) => {},
addFlow: async (newProject: boolean, flowData?: FlowType) => "",
addFlow: async (
newProject: boolean,
flowData?: FlowType,
override?: boolean
) => "",
updateFlow: (newFlow: FlowType) => {},
incrementNodeId: () => uid(),
downloadFlow: (flow: FlowType) => {},
@ -78,7 +82,7 @@ const FlowsContextInitialValue: FlowsContextType = {
selection: { nodes: any; edges: any },
position: { x: number; y: number; paneX?: number; paneY?: number }
) => {},
saveComponent: async (component: NodeDataType) => "",
saveComponent: async (component: NodeDataType, override: boolean) => "",
deleteComponent: (key: string) => {},
version: "",
};
@ -496,7 +500,8 @@ export function FlowsProvider({ children }: { children: ReactNode }) {
const addFlow = async (
newProject: Boolean,
flow?: FlowType
flow?: FlowType,
override?: boolean
): Promise<String | undefined> => {
if (newProject) {
let flowData = flow
@ -505,6 +510,15 @@ export function FlowsProvider({ children }: { children: ReactNode }) {
// Create a new flow with a default name if no flow is provided.
if (override) {
deleteComponent(flow!.name);
const newFlow = createNewFlow(flowData, flow!);
const { id } = await saveFlowToDatabase(newFlow);
newFlow.id = id;
addFlowToLocalState(newFlow);
return;
}
const newFlow = createNewFlow(flowData, flow!);
const newName = addVersionToDuplicates(newFlow, flows);
@ -595,14 +609,16 @@ export function FlowsProvider({ children }: { children: ReactNode }) {
newFlows[index].data = newFlow.data;
newFlows[index].name = newFlow.name;
}
newFlow = {
...newFlow,
};
return newFlows;
});
}
async function saveFlow(newFlow: FlowType, silent?: boolean) {
console.log(newFlow);
if (newFlow?.data?.nodes?.length === 0) return;
try {
// updates flow in db
const updatedFlow = await updateFlowInDatabase(newFlow);
@ -615,14 +631,10 @@ export function FlowsProvider({ children }: { children: ReactNode }) {
const newFlows = [...prevState];
const index = newFlows.findIndex((flow) => flow.id === newFlow.id);
if (index !== -1) {
newFlows[index] = {
...newFlows[index],
description: updatedFlow.description,
data: updatedFlow.data,
name: updatedFlow.name,
};
newFlows[index].description = newFlow.description ?? "";
newFlows[index].data = newFlow.data;
newFlows[index].name = newFlow.name;
}
return newFlows;
});
//update tabs state
@ -644,9 +656,9 @@ export function FlowsProvider({ children }: { children: ReactNode }) {
}
}
function saveComponent(component: NodeDataType) {
function saveComponent(component: NodeDataType, override: boolean) {
component.node!.official = false;
return addFlow(true, createFlowComponent(component, version));
return addFlow(true, createFlowComponent(component, version), override);
}
function deleteComponent(key: string) {

View file

@ -360,6 +360,16 @@ export async function postCustomComponent(
return await api.post(`${BASE_URL_API}custom_component`, { code });
}
export async function postCustomComponentUpdate(
code: string,
field: string
): Promise<AxiosResponse<APIClassType>> {
return await api.post(`${BASE_URL_API}custom_component/update`, {
code,
field,
});
}
export async function onLogin(user: LoginType) {
try {
const response = await api.post(

View file

@ -13,7 +13,10 @@ import {
// import "ace-builds/webpack-resolver";
import CodeTabsComponent from "../../components/codeTabsComponent";
import IconComponent from "../../components/genericIconComponent";
import { EXPORT_CODE_DIALOG } from "../../constants/constants";
import {
EXPORT_CODE_DIALOG,
LANGFLOW_SUPPORTED_TYPES,
} from "../../constants/constants";
import { AuthContext } from "../../contexts/authContext";
import { FlowsContext } from "../../contexts/flowsContext";
import { TemplateVariableType } from "../../types/api";
@ -99,15 +102,9 @@ const ApiModal = forwardRef(
(templateField) =>
templateField.charAt(0) !== "_" &&
node.data.node.template[templateField].show &&
(node.data.node.template[templateField].type === "str" ||
node.data.node.template[templateField].type === "bool" ||
node.data.node.template[templateField].type === "float" ||
node.data.node.template[templateField].type === "code" ||
node.data.node.template[templateField].type === "prompt" ||
node.data.node.template[templateField].type === "file" ||
node.data.node.template[templateField].type === "int" ||
node.data.node.template[templateField].type === "dict" ||
node.data.node.template[templateField].type === "NestedDict")
LANGFLOW_SUPPORTED_TYPES.has(
node.data.node.template[templateField].type
)
)
.map((n, i) => {
arrNodesWithValues.push(node["id"]);
@ -146,9 +143,9 @@ const ApiModal = forwardRef(
);
if (existingTweak) {
existingTweak[tw][template["name"]] = changes as string;
existingTweak[tw][template["name"]!] = changes as string;
if (existingTweak[tw][template["name"]] == template.value) {
if (existingTweak[tw][template["name"]!] == template.value) {
tweak.current.forEach((element) => {
if (element[tw] && Object.keys(element[tw])?.length === 0) {
tweak.current = tweak.current.filter((obj) => {
@ -161,7 +158,7 @@ const ApiModal = forwardRef(
} else {
const newTweak = {
[tw]: {
[template["name"]]: changes,
[template["name"]!]: changes,
},
} as uniqueTweakType;
tweak.current.push(newTweak);

View file

@ -36,10 +36,15 @@ function ConfirmationModal({
size,
open,
onClose,
onCancel,
}: ConfirmationModalType) {
const Icon: any = nodeIconsLucide[icon];
const [modalOpen, setModalOpen] = useState(open ?? false);
useEffect(() => {
if (open) setModalOpen(open);
}, [open]);
useEffect(() => {
if (onClose) onClose!(modalOpen);
}, [modalOpen]);
@ -86,6 +91,7 @@ function ConfirmationModal({
className="mt-5"
variant="outline"
onClick={() => {
if (onCancel) onCancel();
setModalOpen(false);
}}
>

View file

@ -25,7 +25,10 @@ import {
TableHeader,
TableRow,
} from "../../components/ui/table";
import { limitScrollFieldsModal } from "../../constants/constants";
import {
LANGFLOW_SUPPORTED_TYPES,
limitScrollFieldsModal,
} from "../../constants/constants";
import { FlowsContext } from "../../contexts/flowsContext";
import { typesContext } from "../../contexts/typesContext";
import { NodeDataType } from "../../types/flow";
@ -123,7 +126,7 @@ const EditNodeModal = forwardRef(
"edit-node-modal-box",
nodeLength > limitScrollFieldsModal
? "overflow-scroll overflow-x-hidden custom-scroll"
: "overflow-hidden"
: ""
)}
>
{nodeLength > 0 && (
@ -144,24 +147,9 @@ const EditNodeModal = forwardRef(
(templateParam) =>
templateParam.charAt(0) !== "_" &&
myData.node?.template[templateParam].show &&
(myData.node.template[templateParam]
.type === "str" ||
myData.node.template[templateParam]
.type === "bool" ||
myData.node.template[templateParam]
.type === "float" ||
myData.node.template[templateParam]
.type === "code" ||
myData.node.template[templateParam]
.type === "prompt" ||
myData.node.template[templateParam]
.type === "file" ||
myData.node.template[templateParam]
.type === "int" ||
myData.node.template[templateParam]
.type === "dict" ||
myData.node.template[templateParam]
.type === "NestedDict")
LANGFLOW_SUPPORTED_TYPES.has(
myData.node.template[templateParam].type
)
)
.map((templateParam, index) => (
<TableRow key={index} className="h-10">
@ -261,7 +249,7 @@ const EditNodeModal = forwardRef(
value={
myData.node!.template[
templateParam
].value.toString() === "{}"
]?.value?.toString() === "{}"
? {
yourkey: "value",
}
@ -275,6 +263,7 @@ const EditNodeModal = forwardRef(
].value = newValue;
handleOnNewValue(newValue, templateParam);
}}
id="editnode-div-dict-input"
/>
</div>
) : myData.node?.template[templateParam]
@ -380,6 +369,7 @@ const EditNodeModal = forwardRef(
templateParam
].value ?? "Choose an option"
}
id={"dropdown-edit-" + index}
></Dropdown>
</div>
) : myData.node?.template[templateParam]

View file

@ -54,6 +54,7 @@ export default function DictAreaModal({
/>
<div className="flex h-fit w-full justify-end">
<Button
data-testid="save-dict-button"
className="mt-3"
type="submit"
onClick={() => {

View file

@ -213,6 +213,7 @@ export default function GenericModal({
{type === TypeModal.PROMPT && isEdit && !readonly ? (
<Textarea
id={"modal-" + id}
data-testid={"modal-" + id}
ref={divRefPrompt}
className="form-input h-full w-full rounded-lg custom-scroll focus-visible:ring-1"
value={inputValue}
@ -252,6 +253,8 @@ export default function GenericModal({
handleKeyDown(e, value, "");
}}
readOnly={readonly}
id={"text-area-modal"}
data-testid={"text-area-modal"}
/>
) : (
<></>
@ -308,6 +311,7 @@ export default function GenericModal({
)}
</div>
<Button
data-testid="genericModalBtnSave"
id="genericModalBtnSave"
disabled={readonly}
onClick={() => {

View file

@ -6,6 +6,7 @@ import { Button } from "../../components/ui/button";
import { Checkbox } from "../../components/ui/checkbox";
import { alertContext } from "../../contexts/alertContext";
import { FlowsContext } from "../../contexts/flowsContext";
import { StoreContext } from "../../contexts/storeContext";
import {
getStoreComponents,
getStoreTags,
@ -22,14 +23,17 @@ export default function ShareModal({
children,
open,
setOpen,
disabled,
}: {
children?: ReactNode;
is_component: boolean;
component: FlowType;
open?: boolean;
setOpen?: (open: boolean) => void;
disabled?: boolean;
}): JSX.Element {
const { version, addFlow } = useContext(FlowsContext);
const { hasApiKey } = useContext(StoreContext);
const { setSuccessData, setErrorData } = useContext(alertContext);
const [checked, setChecked] = useState(true);
const [name, setName] = useState(component?.name ?? "");
@ -46,10 +50,12 @@ export default function ShareModal({
useEffect(() => {
if (open || internalOpen) {
handleGetTags();
handleGetNames();
if (hasApiKey) {
handleGetTags();
handleGetNames();
}
}
}, [open, internalOpen]);
}, [open, internalOpen, hasApiKey]);
function handleGetTags() {
setLoadingTags(true);
@ -113,7 +119,7 @@ export default function ShareModal({
return (
<BaseModal
size="smaller-h-full"
open={open ?? internalOpen}
open={(!disabled && open) ?? internalOpen}
setOpen={setOpen ?? internalSetOpen}
>
<BaseModal.Trigger>{children ? children : <></>}</BaseModal.Trigger>

View file

@ -70,6 +70,7 @@ export default function Page({
saveFlow,
setTabsState,
tabId,
flows,
} = useContext(FlowsContext);
const {
types,
@ -144,7 +145,6 @@ export default function Page({
document.removeEventListener("mousemove", handleMouseMove);
};
}, [position, lastCopiedSelection, lastSelection]);
const [selectionMenuVisible, setSelectionMenuVisible] = useState(false);
const { setExtraComponent, setExtraNavigation } = useContext(locationContext);
@ -181,6 +181,8 @@ export default function Page({
const [seconds, setSeconds] = useState(0);
useEffect(() => {
const index = flows.findIndex((flowId) => flowId.id === flow.id);
const interval = setInterval(() => {
setSeconds((prevSeconds) => {
let updatedSeconds = prevSeconds + 1;
@ -188,7 +190,7 @@ export default function Page({
if (updatedSeconds % 30 === 0) {
saveFlow(
{
...flow!,
...flows[index]!,
data: reactFlowInstance
? reactFlowInstance!.toObject()
: flow!.data,
@ -318,9 +320,9 @@ export default function Page({
);
// Calculate the position where the node should be created
const position = reactFlowInstance!.project({
x: event.clientX - reactflowBounds!.left,
y: event.clientY - reactflowBounds!.top,
const position = reactFlowInstance!.screenToFlowPosition({
x: event.clientX,
y: event.clientY,
});
// Generate a unique node ID

View file

@ -6,6 +6,7 @@ import { Input } from "../../../../components/ui/input";
import { Separator } from "../../../../components/ui/separator";
import { alertContext } from "../../../../contexts/alertContext";
import { FlowsContext } from "../../../../contexts/flowsContext";
import { StoreContext } from "../../../../contexts/storeContext";
import { typesContext } from "../../../../contexts/typesContext";
import ApiModal from "../../../../modals/ApiModal";
import ExportModal from "../../../../modals/exportModal";
@ -29,6 +30,7 @@ export default function ExtraSidebar(): JSX.Element {
useContext(typesContext);
const { flows, tabId, uploadFlow, tabsState, saveFlow, isBuilt, version } =
useContext(FlowsContext);
const { hasApiKey, validApiKey } = useContext(StoreContext);
const { setErrorData } = useContext(alertContext);
const [dataFilter, setFilterData] = useState(data);
const [search, setSearch] = useState("");
@ -179,25 +181,56 @@ export default function ExtraSidebar(): JSX.Element {
}, [getFilterEdge, data]);
const ModalMemo = useMemo(
() => (
<ShareModal is_component={false} component={flow!}>
<ShadTooltip content="Share" side="top">
<div className={classNames("extra-side-bar-buttons")}>
<IconComponent name="Share2" className="side-bar-button-size" />
</div>
</ShadTooltip>
</ShareModal>
),
[]
() =>
!hasApiKey || !validApiKey ? (
<button
disabled={!hasApiKey || !validApiKey}
className={classNames(
"extra-side-bar-buttons",
!hasApiKey || !validApiKey ? "button-disable cursor-default" : ""
)}
>
<IconComponent
name="Share2"
className={classNames(
"side-bar-button-size",
!hasApiKey || !validApiKey ? "extra-side-bar-save-disable" : ""
)}
/>
</button>
) : (
<ShareModal
is_component={false}
component={flow!}
disabled={!hasApiKey || !validApiKey}
>
<button
disabled={!hasApiKey || !validApiKey}
className={classNames(
"extra-side-bar-buttons",
!hasApiKey || !validApiKey ? "button-disable cursor-default" : ""
)}
>
<IconComponent
name="Share2"
className={classNames(
"side-bar-button-size",
!hasApiKey || !validApiKey ? "extra-side-bar-save-disable" : ""
)}
/>
</button>
</ShareModal>
),
[hasApiKey, validApiKey]
);
const ExportMemo = useMemo(
() => (
<ExportModal>
<ShadTooltip content="Export" side="top">
<div className={classNames("extra-side-bar-buttons")}>
<button className={classNames("extra-side-bar-buttons")}>
<IconComponent name="FileDown" className="side-bar-button-size" />
</div>
</button>
</ShadTooltip>
</ExportModal>
),
@ -276,8 +309,9 @@ export default function ExtraSidebar(): JSX.Element {
</div>
</ShadTooltip>
</div>
<div className="side-bar-button">{ModalMemo}</div>
<ShadTooltip content="Share" side="top" styleClasses="cursor-default">
<div className="side-bar-button">{ModalMemo}</div>
</ShadTooltip>
</div>
<Separator />
<div className="side-bar-search-div-placement">
@ -305,7 +339,15 @@ export default function ExtraSidebar(): JSX.Element {
<div className="side-bar-components-div-arrangement">
{Object.keys(dataFilter)
.sort()
.sort((a, b) => {
if (a.toLowerCase() === "saved_components") {
return -1;
} else if (b.toLowerCase() === "saved_components") {
return 1;
} else {
return a.localeCompare(b);
}
})
.map((SBSectionName: keyof APIObjectType, index) =>
Object.keys(dataFilter[SBSectionName]).length > 0 ? (
<DisclosureComponent

View file

@ -88,17 +88,17 @@ export default function SidebarDraggableComponent({
}}
>
<div
data-testid={sectionName + display_name}
id={sectionName + display_name}
className="side-bar-components-div-form"
>
<span className="side-bar-components-text">{display_name}</span>
<div>
<SelectTrigger>
<IconComponent
name="Menu"
className="side-bar-components-icon "
/>
</SelectTrigger>
<IconComponent
name="Menu"
className="side-bar-components-icon "
/>
<SelectTrigger></SelectTrigger>
<SelectContent>
<SelectItem value={"download"}>
<div className="flex">

View file

@ -10,6 +10,8 @@ import {
SelectTrigger,
} from "../../../../components/ui/select-custom";
import { FlowsContext } from "../../../../contexts/flowsContext";
import { StoreContext } from "../../../../contexts/storeContext";
import ConfirmationModal from "../../../../modals/ConfirmationModal";
import EditNodeModal from "../../../../modals/EditNodeModal";
import ShareModal from "../../../../modals/shareModal";
import { nodeToolbarPropsType } from "../../../../types/components";
@ -49,6 +51,7 @@ export default function NodeToolbarComponent({
);
const updateNodeInternals = useUpdateNodeInternals();
const { getNodeId } = useContext(FlowsContext);
const { hasApiKey, validApiKey } = useContext(StoreContext);
function canMinimize() {
let countHandles: number = 0;
@ -61,17 +64,18 @@ export default function NodeToolbarComponent({
const isMinimal = canMinimize();
const isGroup = data.node?.flow ? true : false;
const { paste, saveComponent, version } = useContext(FlowsContext);
const { paste, saveComponent, version, flows } = useContext(FlowsContext);
const reactFlowInstance = useReactFlow();
const [showModalAdvanced, setShowModalAdvanced] = useState(false);
const [showconfirmShare, setShowconfirmShare] = useState(false);
const [selectedValue, setSelectedValue] = useState("");
const [showOverrideModal, setShowOverrideModal] = useState(false);
const [flowComponent, setFlowComponent] = useState<FlowType>();
useEffect(() => {
setFlowComponent(createFlowComponent(cloneDeep(data), version));
}, [data]);
}, [data, showModalAdvanced]);
const handleSelectChange = (event) => {
switch (event) {
@ -86,10 +90,10 @@ export default function NodeToolbarComponent({
downloadNode(createFlowComponent(cloneDeep(data), version));
break;
case "Share":
setShowconfirmShare(true);
if (hasApiKey) setShowconfirmShare(true);
break;
case "SaveAll":
saveComponent(cloneDeep(data));
saveComponent(cloneDeep(data), false);
break;
case "disabled":
break;
@ -97,9 +101,16 @@ export default function NodeToolbarComponent({
updateFlowPosition(position, data.node?.flow!);
expandGroupNode(data, reactFlowInstance, getNodeId);
break;
case "override":
setShowOverrideModal(true);
break;
}
};
const isSaved = flows.some((flow) =>
Object.values(flow).includes(data.node?.display_name!)
);
return (
<>
<div className="w-26 h-10">
@ -174,10 +185,7 @@ export default function NodeToolbarComponent({
<div
data-testid="more-options-modal"
className={classNames(
"relative -ml-px inline-flex h-8 w-[31px] items-center rounded-r-md bg-background text-foreground shadow-md ring-1 ring-inset ring-ring transition-all duration-500 ease-in-out hover:bg-muted focus:z-10" +
(nodeLength == 0
? " text-muted-foreground"
: " text-foreground")
"relative -ml-px inline-flex h-8 w-[31px] items-center rounded-r-md bg-background text-foreground shadow-md ring-1 ring-inset ring-ring transition-all duration-500 ease-in-out hover:bg-muted focus:z-10"
)}
>
<IconComponent
@ -191,7 +199,7 @@ export default function NodeToolbarComponent({
<SelectContent>
{nodeLength > 0 && (
<SelectItem value={nodeLength === 0 ? "disabled" : "advanced"}>
<div className="flex">
<div className="flex" data-testid="edit-button-modal">
<IconComponent
name="Settings2"
className="relative top-0.5 mr-2 h-4 w-4"
@ -201,16 +209,29 @@ export default function NodeToolbarComponent({
</SelectItem>
)}
<SelectItem value={"SaveAll"}>
<div className="flex">
<IconComponent
name="SaveAll"
className="relative top-0.5 mr-2 h-4 w-4"
/>{" "}
Save{" "}
</div>{" "}
</SelectItem>
<SelectItem value={"Share"}>
{isSaved ? (
<SelectItem value={"override"}>
<div className="flex">
<IconComponent
name="SaveAll"
className="relative top-0.5 mr-2 h-4 w-4"
/>{" "}
Save{" "}
</div>{" "}
</SelectItem>
) : (
<SelectItem value={"SaveAll"}>
<div className="flex">
<IconComponent
name="SaveAll"
className="relative top-0.5 mr-2 h-4 w-4"
/>{" "}
Save{" "}
</div>{" "}
</SelectItem>
)}
<SelectItem disabled={!hasApiKey || !validApiKey} value={"Share"}>
<div className="flex">
<IconComponent
name="Share2"
@ -252,6 +273,33 @@ export default function NodeToolbarComponent({
)}
</SelectContent>
</Select>
<ConfirmationModal
asChild
open={showOverrideModal}
title={`Replace ${data.node?.display_name}`}
titleHeader={`Please, confirm your save actions`}
modalContentTitle="Attention!"
cancelText="New"
confirmationText="Replace"
icon={"SaveAll"}
index={6}
onConfirm={(index, user) => {
saveComponent(cloneDeep(data), true);
}}
onClose={setShowOverrideModal}
onCancel={() => saveComponent(cloneDeep(data), false)}
>
<ConfirmationModal.Content>
<span>
It seems {data.node?.display_name} already exists. Replacing it
will switch the current component. Proceed with replacement?
</span>
</ConfirmationModal.Content>
<ConfirmationModal.Trigger>
<></>
</ConfirmationModal.Trigger>
</ConfirmationModal>
<EditNodeModal
data={data}
nodeLength={nodeLength}

View file

@ -275,8 +275,18 @@ export default function StorePage(): JSX.Element {
<SelectContent>
<SelectGroup>
<SelectItem value="all">All</SelectItem>
<SelectItem value="createdbyme">Created By Me</SelectItem>
<SelectItem value="likedbyme">Liked By Me</SelectItem>
<SelectItem
disabled={!hasApiKey || !validApiKey}
value="createdbyme"
>
Created By Me
</SelectItem>
<SelectItem
disabled={!hasApiKey || !validApiKey}
value="likedbyme"
>
Liked By Me
</SelectItem>
</SelectGroup>
</SelectContent>
</Select>

View file

@ -114,7 +114,7 @@
@apply pointer-events-none;
}
.extra-side-bar-buttons {
@apply relative inline-flex w-full items-center justify-center rounded-md bg-background px-2 py-2 text-foreground shadow-sm ring-1 ring-inset ring-input transition-all duration-500 ease-in-out;
@apply relative inline-flex w-full items-center justify-center rounded-md bg-background px-2 py-2 text-foreground shadow-sm ring-1 ring-inset ring-input transition-all duration-500 ease-in-out;
}
.extra-side-bar-buttons:hover {
@apply hover:bg-muted;

View file

@ -36,6 +36,7 @@ export type DropDownComponentType = {
editNode?: boolean;
apiModal?: boolean;
numberOfOptions?: number;
id?: string;
};
export type ParameterComponentType = {
data: NodeDataType;
@ -67,6 +68,7 @@ export type KeyPairListComponentType = {
disabled: boolean;
editNode?: boolean;
duplicateKey?: boolean;
editNodeModal?: boolean;
};
export type DictComponentType = {
@ -74,6 +76,7 @@ export type DictComponentType = {
onChange: (value) => void;
disabled: boolean;
editNode?: boolean;
id?: string;
};
export type TextAreaComponentType = {
@ -214,6 +217,7 @@ export type IconComponentProps = {
iconColor?: string;
onClick?: () => void;
stroke?: string;
id?: string;
};
export type InputProps = {
@ -277,6 +281,7 @@ export type PaginatorComponentType = {
};
export type ConfirmationModalType = {
onCancel?: () => void;
title: string;
titleHeader: string;
asChild?: boolean;

View file

@ -10,7 +10,8 @@ export type FlowsContextType = {
removeFlow: (id: string) => void;
addFlow: (
newProject: boolean,
flow?: FlowType
flow?: FlowType,
override?: boolean
) => Promise<String | undefined>;
updateFlow: (newFlow: FlowType) => void;
incrementNodeId: () => string;
@ -44,7 +45,10 @@ export type FlowsContextType = {
setLastCopiedSelection: (selection: { nodes: any; edges: any }) => void;
setTweak: (tweak: tweakType) => tweakType | void;
getTweak: tweakType;
saveComponent: (component: NodeDataType) => Promise<String | undefined>;
saveComponent: (
component: NodeDataType,
override: boolean
) => Promise<String | undefined>;
deleteComponent: (key: string) => void;
version: string;
};

View file

@ -9,7 +9,10 @@ import {
XYPosition,
} from "reactflow";
import ShortUniqueId from "short-unique-id";
import { specialCharsRegex } from "../constants/constants";
import {
LANGFLOW_SUPPORTED_TYPES,
specialCharsRegex,
} from "../constants/constants";
import { APITemplateType, TemplateVariableType } from "../types/api";
import {
FlowType,
@ -634,15 +637,7 @@ function updateGroupNodeTemplate(template: APITemplateType) {
let type = template[key].type;
let input_types = template[key].input_types;
if (
(type === "str" ||
type === "bool" ||
type === "float" ||
type === "code" ||
type === "prompt" ||
type === "file" ||
type === "int" ||
type === "dict" ||
type === "NestedDict") &&
LANGFLOW_SUPPORTED_TYPES.has(type) &&
!template[key].required &&
!input_types
) {

View file

@ -67,6 +67,7 @@ import {
Pencil,
Plus,
Redo,
RefreshCcw,
Rocket,
Save,
SaveAll,
@ -195,7 +196,7 @@ export const nodeNames: { [char: string]: string } = {
agents: "Agents",
tools: "Tools",
memories: "Memories",
saved_components: "Saved Components",
saved_components: "Saved",
advanced: "Advanced",
chat: "Chat",
embeddings: "Embeddings",
@ -354,4 +355,5 @@ export const nodeIconsLucide: iconsType = {
Heart,
Link,
ToyBrick,
RefreshCcw,
};

View file

@ -13,7 +13,7 @@ test("CodeAreaModalComponent", async ({ page }) => {
await page.waitForTimeout(2000);
await page
.locator('//*[@id="sidePythonFunctionTool"]')
.getByTestId("toolsPythonFunctionTool")
.dragTo(page.locator('//*[@id="react-flow-id"]'));
await page.mouse.up();
await page.mouse.down();
@ -34,8 +34,9 @@ test("CodeAreaModalComponent", async ({ page }) => {
await page
.locator('//*[@id="react-flow-id"]/div[1]/div[1]/div[1]/div/div[2]/div')
.click();
await page.locator('//*[@id="advancedIcon"]').click();
await page.locator('//*[@id="editAdvancedBtn"]').click();
await page.getByTestId("genericModalBtnSave").click();
await page.getByTestId("more-options-modal").click();
await page.getByTestId("edit-button-modal").click();
await page.locator('//*[@id="showcode"]').click();
expect(await page.locator('//*[@id="showcode"]').isChecked()).toBeFalsy();
@ -114,8 +115,8 @@ test("CodeAreaModalComponent", async ({ page }) => {
await page
.locator('//*[@id="react-flow-id"]/div[1]/div[1]/div[1]/div/div[2]/div')
.click();
await page.locator('//*[@id="advancedIcon"]').click();
await page.locator('//*[@id="editAdvancedBtn"]').click();
await page.getByTestId("more-options-modal").click();
await page.getByTestId("edit-button-modal").click();
await page.locator('//*[@id="showcode"]').click();
expect(await page.locator('//*[@id="showcode"]').isChecked()).toBeTruthy();

View file

@ -0,0 +1,104 @@
import { expect, test } from "@playwright/test";
test("dropDownComponent", async ({ page }) => {
await page.goto("http://localhost:3000/");
await page.waitForTimeout(2000);
await page.locator('//*[@id="new-project-btn"]').click();
await page.waitForTimeout(2000);
await page.getByPlaceholder("Search").click();
await page.getByPlaceholder("Search").fill("amazon");
await page.waitForTimeout(2000);
await page
.getByTestId("llmsAmazon Bedrock")
.first()
.dragTo(page.locator('//*[@id="react-flow-id"]'));
await page.mouse.up();
await page.mouse.down();
await page.getByTestId("dropdown-2-display").click();
await page.getByTestId("ai21.j2-grande-instruct-0-option").click();
let value = await page.getByTestId("dropdown-2-display").innerText();
if (value !== "ai21.j2-grande-instruct") {
expect(false).toBeTruthy();
}
await page.getByTestId("dropdown-2-display").click();
await page.getByTestId("ai21.j2-jumbo-instruct-1-option").click();
value = await page.getByTestId("dropdown-2-display").innerText();
if (value !== "ai21.j2-jumbo-instruct") {
expect(false).toBeTruthy();
}
await page.getByTestId("more-options-modal").click();
await page.getByTestId("edit-button-modal").click();
value = await page.getByTestId("dropdown-edit-1-display").innerText();
if (value !== "ai21.j2-jumbo-instruct") {
expect(false).toBeTruthy();
}
// showcredentials_profile_name
await page.locator('//*[@id="showcredentials_profile_name"]').click();
expect(
await page.locator('//*[@id="showcredentials_profile_name"]').isChecked()
).toBeFalsy();
// showmodel_id
await page.locator('//*[@id="showmodel_id"]').click();
expect(await page.locator('//*[@id="showmodel_id"]').isChecked()).toBeFalsy();
// showcredentials_profile_name
await page.locator('//*[@id="showcredentials_profile_name"]').click();
expect(
await page.locator('//*[@id="showcredentials_profile_name"]').isChecked()
).toBeTruthy();
// showmodel_id
await page.locator('//*[@id="showmodel_id"]').click();
expect(
await page.locator('//*[@id="showmodel_id"]').isChecked()
).toBeTruthy();
// showcredentials_profile_name
await page.locator('//*[@id="showcredentials_profile_name"]').click();
expect(
await page.locator('//*[@id="showcredentials_profile_name"]').isChecked()
).toBeFalsy();
// showmodel_id
await page.locator('//*[@id="showmodel_id"]').click();
expect(await page.locator('//*[@id="showmodel_id"]').isChecked()).toBeFalsy();
// showcredentials_profile_name
await page.locator('//*[@id="showcredentials_profile_name"]').click();
expect(
await page.locator('//*[@id="showcredentials_profile_name"]').isChecked()
).toBeTruthy();
// showmodel_id
await page.locator('//*[@id="showmodel_id"]').click();
expect(
await page.locator('//*[@id="showmodel_id"]').isChecked()
).toBeTruthy();
await page.getByTestId("dropdown-edit-1-display").click();
await page.getByTestId("ai21.j2-ultra-v1-5-option").click();
value = await page.getByTestId("dropdown-edit-1-display").innerText();
if (value !== "ai21.j2-ultra-v1") {
expect(false).toBeTruthy();
}
await page.locator('//*[@id="saveChangesBtn"]').click();
value = await page.getByTestId("dropdown-2-display").innerText();
if (value !== "ai21.j2-ultra-v1") {
expect(false).toBeTruthy();
}
});

View file

@ -42,6 +42,7 @@ test("FloatComponent", async ({ page }) => {
)
.click();
await page.getByTestId("more-options-modal").click();
await page.getByTestId("edit-button-modal").click();
await page.locator('//*[@id="showcache"]').click();
expect(await page.locator('//*[@id="showcache"]').isChecked()).toBeTruthy();
@ -385,7 +386,8 @@ test("FloatComponent", async ({ page }) => {
'//*[@id="react-flow-id"]/div[1]/div[1]/div[1]/div/div[2]/div/div/div[1]/div/div[1]/div'
)
.click();
await page.locator('//*[@id="editAdvancedIcon"]').click();
await page.getByTestId("more-options-modal").click();
await page.getByTestId("edit-button-modal").click();
// showtemperature
await page.locator('//*[@id="showtemperature"]').click();

View file

@ -14,7 +14,7 @@ test.describe("Flow Page tests", () => {
.filter({ hasText: /^Custom Component$/ })
.nth(4)
.dragTo(page.locator(".react-flow__pane"));
await page.locator("div:nth-child(4) > .extra-side-bar-buttons").click();
await page.locator(".success-alert").click();
await page.getByTestId("icon-ExternalLink").click();
await page.locator('//*[@id="checkAndSaveBtn"]').click();
});
});

View file

@ -13,7 +13,8 @@ test("IntComponent", async ({ page }) => {
await page.waitForTimeout(2000);
await page
.locator('//*[@id="sideGET Request"]')
.getByTestId("utilitiesGET Request")
.first()
.dragTo(page.locator('//*[@id="react-flow-id"]'));
await page.mouse.up();
await page.mouse.down();
@ -30,7 +31,7 @@ test("IntComponent", async ({ page }) => {
}
await page.locator('//*[@id="int-input-2"]').click();
await page.locator('//*[@id="int-input-2"]').fill("-3");
await page.locator('//*[@id="int-input-2"]').fill("0");
value = await page.locator('//*[@id="int-input-2"]').inputValue();
@ -41,8 +42,8 @@ test("IntComponent", async ({ page }) => {
await page
.locator('//*[@id="react-flow-id"]/div[1]/div[1]/div[1]/div/div[2]/div')
.click();
await page.locator('//*[@id="advancedIcon"]').click();
await page.locator('//*[@id="editAdvancedBtn"]').click();
await page.getByTestId("more-options-modal").click();
await page.getByTestId("edit-button-modal").click();
value = await page.locator('//*[@id="int-input-1"]').inputValue();
@ -80,8 +81,8 @@ test("IntComponent", async ({ page }) => {
await page
.locator('//*[@id="react-flow-id"]/div[1]/div[1]/div[1]/div/div[2]/div')
.click();
await page.locator('//*[@id="advancedIcon"]').click();
await page.locator('//*[@id="editAdvancedBtn"]').click();
await page.getByTestId("more-options-modal").click();
await page.getByTestId("edit-button-modal").click();
await page.locator('//*[@id="showtimeout"]').click();
expect(

View file

@ -13,7 +13,7 @@ test("KeypairListComponent", async ({ page }) => {
await page.waitForTimeout(2000);
await page
.locator('//*[@id="sideCSVLoader"]')
.getByTestId("documentloadersCSVLoader")
.dragTo(page.locator('//*[@id="react-flow-id"]'));
await page.mouse.up();
await page.mouse.down();
@ -66,8 +66,8 @@ test("KeypairListComponent", async ({ page }) => {
'//*[@id="react-flow-id"]/div[1]/div[1]/div[1]/div/div[2]/div/div/div[1]/div/div[1]/div'
)
.click();
await page.locator('//*[@id="advancedIcon"]').click();
await page.locator('//*[@id="editAdvancedBtn"]').click();
await page.getByTestId("more-options-modal").click();
await page.getByTestId("edit-button-modal").click();
await page.locator('//*[@id="showfile_path"]').click();
expect(
@ -87,8 +87,8 @@ test("KeypairListComponent", async ({ page }) => {
'//*[@id="react-flow-id"]/div[1]/div[1]/div[1]/div/div[2]/div/div/div[1]/div/div[1]/div'
)
.click();
await page.locator('//*[@id="advancedIcon"]').click();
await page.locator('//*[@id="editAdvancedBtn"]').click();
await page.getByTestId("more-options-modal").click();
await page.getByTestId("edit-button-modal").click();
await page.locator('//*[@id="showfile_path"]').click();
expect(
@ -99,11 +99,11 @@ test("KeypairListComponent", async ({ page }) => {
await page.locator('//*[@id="showmetadata"]').isChecked()
).toBeTruthy();
await page.locator('//*[@id="keypair0"]').click();
await page.locator('//*[@id="keypair0"]').fill("testtesttesttest");
await page.locator('//*[@id="keypair100"]').click();
await page.locator('//*[@id="editNodekeypair0"]').click();
await page.locator('//*[@id="editNodekeypair0"]').fill("testtesttesttest");
await page.locator('//*[@id="editNodekeypair100"]').click();
await page
.locator('//*[@id="keypair100"]')
.locator('//*[@id="editNodekeypair100"]')
.fill("test test test test test test");
const plusButtonLocator = page.locator('//*[@id="plusbtn0"]');
@ -112,24 +112,24 @@ test("KeypairListComponent", async ({ page }) => {
await plusButtonLocator.click();
}
await page.locator('//*[@id="keypair1"]').click();
await page.locator('//*[@id="keypair1"]').fill("testtesttesttest1");
await page.locator('//*[@id="keypair101"]').click();
await page.locator('//*[@id="editNodekeypair1"]').click();
await page.locator('//*[@id="editNodekeypair1"]').fill("testtesttesttest1");
await page.locator('//*[@id="editNodekeypair101"]').first().click();
await page
.locator('//*[@id="keypair101"]')
.locator('//*[@id="editNodekeypair101"]')
.fill("testtesttesttesttesttest1");
await page.locator('//*[@id="plusbtn1"]').click();
await page.locator('//*[@id="editNodeplusbtn1"]').click();
await page.locator('//*[@id="keypair2"]').click();
await page.locator('//*[@id="keypair2"]').fill("testtesttesttest2");
await page.locator('//*[@id="keypair102"]').click();
await page.locator('//*[@id="editNodekeypair2"]').click();
await page.locator('//*[@id="editNodekeypair2"]').fill("testtesttesttest2");
await page.locator('//*[@id="editNodekeypair102"]').click();
await page
.locator('//*[@id="keypair102"]')
.locator('//*[@id="editNodekeypair102"]')
.fill("testtesttesttesttesttest2");
await page.locator('//*[@id="minusbtn1"]').click();
await page.locator('//*[@id="editNodeminusbtn1"]').click();
const keyPairVerification = page.locator('//*[@id="keypair102"]');
const keyPairVerification = page.locator('//*[@id="editNodekeypair102"]');
const elementKeyCount = await keyPairVerification.count();
if (elementKeyCount === 0) {
@ -143,8 +143,8 @@ test("KeypairListComponent", async ({ page }) => {
if (
key1 === "testtesttesttest" &&
value1 === "test test test test test test" &&
key2 === "testtesttesttest2" &&
value2 === "testtesttesttesttesttest2"
key2 === "testtesttesttest1" &&
value2 === "testtesttesttesttesttest1"
) {
expect(true).toBeTruthy();
} else {

View file

@ -0,0 +1,239 @@
import { expect, test } from "@playwright/test";
test("NestedComponent", async ({ page }) => {
await page.goto("http://localhost:3000/");
await page.waitForTimeout(2000);
await page.locator('//*[@id="new-project-btn"]').click();
await page.waitForTimeout(2000);
await page.getByPlaceholder("Search").click();
await page.getByPlaceholder("Search").fill("pinecone");
await page.waitForTimeout(2000);
await page
.getByTestId("vectorstoresPinecone")
.first()
.dragTo(page.locator('//*[@id="react-flow-id"]'));
await page.mouse.up();
await page.mouse.down();
await page.getByTestId("more-options-modal").click();
await page.getByTestId("edit-button-modal").click();
// showindex_name
await page.locator('//*[@id="showindex_name"]').click();
expect(
await page.locator('//*[@id="showindex_name"]').isChecked()
).toBeFalsy();
// shownamespace
await page.locator('//*[@id="shownamespace"]').click();
expect(
await page.locator('//*[@id="shownamespace"]').isChecked()
).toBeFalsy();
// showpinecone_api_key
await page.locator('//*[@id="showpinecone_api_key"]').click();
expect(
await page.locator('//*[@id="showpinecone_api_key"]').isChecked()
).toBeTruthy();
// showpinecone_env
await page.locator('//*[@id="showpinecone_env"]').click();
expect(
await page.locator('//*[@id="showpinecone_env"]').isChecked()
).toBeTruthy();
// showsearch_kwargs
await page.locator('//*[@id="showsearch_kwargs"]').click();
expect(
await page.locator('//*[@id="showsearch_kwargs"]').isChecked()
).toBeTruthy();
// showindex_name
await page.locator('//*[@id="showindex_name"]').click();
expect(
await page.locator('//*[@id="showindex_name"]').isChecked()
).toBeTruthy();
// shownamespace
await page.locator('//*[@id="shownamespace"]').click();
expect(
await page.locator('//*[@id="shownamespace"]').isChecked()
).toBeTruthy();
// showpinecone_api_key
await page.locator('//*[@id="showpinecone_api_key"]').click();
expect(
await page.locator('//*[@id="showpinecone_api_key"]').isChecked()
).toBeFalsy();
// showpinecone_env
await page.locator('//*[@id="showpinecone_env"]').click();
expect(
await page.locator('//*[@id="showpinecone_env"]').isChecked()
).toBeFalsy();
// showsearch_kwargs
await page.locator('//*[@id="showsearch_kwargs"]').click();
expect(
await page.locator('//*[@id="showsearch_kwargs"]').isChecked()
).toBeFalsy();
// showindex_name
await page.locator('//*[@id="showindex_name"]').click();
expect(
await page.locator('//*[@id="showindex_name"]').isChecked()
).toBeFalsy();
// shownamespace
await page.locator('//*[@id="shownamespace"]').click();
expect(
await page.locator('//*[@id="shownamespace"]').isChecked()
).toBeFalsy();
// showpinecone_api_key
await page.locator('//*[@id="showpinecone_api_key"]').click();
expect(
await page.locator('//*[@id="showpinecone_api_key"]').isChecked()
).toBeTruthy();
// showpinecone_env
await page.locator('//*[@id="showpinecone_env"]').click();
expect(
await page.locator('//*[@id="showpinecone_env"]').isChecked()
).toBeTruthy();
// showsearch_kwargs
await page.locator('//*[@id="showsearch_kwargs"]').click();
expect(
await page.locator('//*[@id="showsearch_kwargs"]').isChecked()
).toBeTruthy();
// showindex_name
await page.locator('//*[@id="showindex_name"]').click();
expect(
await page.locator('//*[@id="showindex_name"]').isChecked()
).toBeTruthy();
// shownamespace
await page.locator('//*[@id="shownamespace"]').click();
expect(
await page.locator('//*[@id="shownamespace"]').isChecked()
).toBeTruthy();
// showpinecone_api_key
await page.locator('//*[@id="showpinecone_api_key"]').click();
expect(
await page.locator('//*[@id="showpinecone_api_key"]').isChecked()
).toBeFalsy();
// showpinecone_env
await page.locator('//*[@id="showpinecone_env"]').click();
expect(
await page.locator('//*[@id="showpinecone_env"]').isChecked()
).toBeFalsy();
// showsearch_kwargs
await page.locator('//*[@id="showsearch_kwargs"]').click();
expect(
await page.locator('//*[@id="showsearch_kwargs"]').isChecked()
).toBeFalsy();
// showindex_name
await page.locator('//*[@id="showindex_name"]').click();
expect(
await page.locator('//*[@id="showindex_name"]').isChecked()
).toBeFalsy();
// shownamespace
await page.locator('//*[@id="shownamespace"]').click();
expect(
await page.locator('//*[@id="shownamespace"]').isChecked()
).toBeFalsy();
// showpinecone_api_key
await page.locator('//*[@id="showpinecone_api_key"]').click();
expect(
await page.locator('//*[@id="showpinecone_api_key"]').isChecked()
).toBeTruthy();
// showpinecone_env
await page.locator('//*[@id="showpinecone_env"]').click();
expect(
await page.locator('//*[@id="showpinecone_env"]').isChecked()
).toBeTruthy();
// showsearch_kwargs
await page.locator('//*[@id="showsearch_kwargs"]').click();
expect(
await page.locator('//*[@id="showsearch_kwargs"]').isChecked()
).toBeTruthy();
// showindex_name
await page.locator('//*[@id="showindex_name"]').click();
expect(
await page.locator('//*[@id="showindex_name"]').isChecked()
).toBeTruthy();
// shownamespace
await page.locator('//*[@id="shownamespace"]').click();
expect(
await page.locator('//*[@id="shownamespace"]').isChecked()
).toBeTruthy();
// showpinecone_api_key
await page.locator('//*[@id="showpinecone_api_key"]').click();
expect(
await page.locator('//*[@id="showpinecone_api_key"]').isChecked()
).toBeFalsy();
// showpinecone_env
await page.locator('//*[@id="showpinecone_env"]').click();
expect(
await page.locator('//*[@id="showpinecone_env"]').isChecked()
).toBeFalsy();
await page.locator('//*[@id="saveChangesBtn"]').click();
await page.getByTestId("div-dict-input").click();
const spanElement = await page
.locator('//*[@id="radix-:r4i:"]/div[2]/div/div/code/div/div/span[1]')
.innerHTML();
if (spanElement !== "yourkey") {
expect(true).toBeFalsy();
}
});

View file

@ -13,19 +13,20 @@ test("PromptTemplateComponent", async ({ page }) => {
await page.waitForTimeout(2000);
await page
.locator('//*[@id="sidePromptTemplate"]')
.locator('//*[@id="promptsPromptTemplate"]')
.dragTo(page.locator('//*[@id="react-flow-id"]'));
await page.mouse.up();
await page.mouse.down();
await page.locator('//*[@id="prompt-input-4"]').click();
await page
.locator('//*[@id="modal-prompt-input-4"]')
.fill("{prompt} example {prompt1}");
await page.getByTestId("prompt-input-4-ExternalLink").click();
// await page.getByTestId("edit-prompt-sanitized").click();
let value = await page
.locator('//*[@id="modal-prompt-input-4"]')
.inputValue();
await page
.getByTestId("modal-prompt-input-4")
.fill("{prompt} example {prompt1}");
// await page.getByTestId("edit-prompt-sanitized").click();
let value = await page.getByTestId("modal-prompt-input-4").inputValue();
if (value != "{prompt} example {prompt1}") {
expect(false).toBeTruthy();
@ -43,33 +44,38 @@ test("PromptTemplateComponent", async ({ page }) => {
await page.locator('//*[@id="genericModalBtnSave"]').click();
await page.locator('//*[@id="textarea-7"]').click();
await page.locator('//*[@id="textarea-7"]').fill("prompt_value_!@#!@#");
await page.getByTestId("textarea-4-ExternalLink").click();
await page.getByTestId("text-area-modal").fill("prompt_value_!@#!@#");
value = await page.locator('//*[@id="textarea-7"]').inputValue();
value = await page.getByTestId("text-area-modal").inputValue();
if (value != "prompt_value_!@#!@#") {
expect(false).toBeTruthy();
}
await page.locator('//*[@id="textarea-8"]').click();
await page.getByTestId("genericModalBtnSave").click();
await page.getByTestId("textarea-5-ExternalLink").click();
await page
.locator('//*[@id="textarea-8"]')
.getByTestId("text-area-modal")
.fill("prompt_name_test_123123!@#!@#");
value = await page.locator('//*[@id="textarea-8"]').inputValue();
value = await page.getByTestId("text-area-modal").inputValue();
if (value != "prompt_name_test_123123!@#!@#") {
expect(false).toBeTruthy();
}
value = await page.locator('//*[@id="prompt-input-4"]').innerText();
value = await page.getByTestId("text-area-modal").inputValue();
if (value != "{prompt} example {prompt1}") {
if (value != "prompt_name_test_123123!@#!@#") {
expect(false).toBeTruthy();
}
await page.locator('//*[@id="editAdvancedIcon"]').click();
await page.getByTestId("genericModalBtnSave").click();
await page.getByTestId("more-options-modal").click();
await page.getByTestId("edit-button-modal").click();
value = await page.locator('//*[@id="textarea-edit-1"]').inputValue();
@ -146,7 +152,8 @@ test("PromptTemplateComponent", async ({ page }) => {
)
.click();
await page.locator('//*[@id="editAdvancedIcon"]').click();
await page.getByTestId("more-options-modal").click();
await page.getByTestId("edit-button-modal").click();
await page.locator('//*[@id="showprompt1"]').click();
expect(

View file

@ -23,7 +23,7 @@ test("InputComponent", async ({ page }) => {
await page.waitForTimeout(2000);
await page
.locator('//*[@id="sideChroma"]')
.getByTestId("vectorstoresChroma")
.dragTo(page.locator('//*[@id="react-flow-id"]'));
await page.mouse.up();
await page.mouse.down();
@ -44,7 +44,8 @@ test("InputComponent", async ({ page }) => {
'//*[@id="react-flow-id"]/div[1]/div[1]/div[1]/div/div[2]/div/div/div[1]/div/div[1]/div'
)
.click();
await page.locator('//*[@id="editAdvancedIcon"]').click();
await page.getByTestId("more-options-modal").click();
await page.getByTestId("edit-button-modal").click();
await page.locator('//*[@id="showchroma_server_cors_allow_origins"]').click();
expect(
@ -154,7 +155,8 @@ test("InputComponent", async ({ page }) => {
'//*[@id="react-flow-id"]/div[1]/div[1]/div[1]/div/div[2]/div/div/div[1]/div/div[1]/div'
)
.click();
await page.locator('//*[@id="editAdvancedIcon"]').click();
await page.getByTestId("more-options-modal").click();
await page.getByTestId("edit-button-modal").click();
await page.locator('//*[@id="showcollection_name"]').click();
expect(

View file

@ -22,7 +22,7 @@ test("ToggleComponent", async ({ page }) => {
await page.waitForTimeout(2000);
await page
.locator('//*[@id="sideDirectoryLoader"]')
.getByTestId("documentloadersDirectoryLoader")
.dragTo(page.locator('//*[@id="react-flow-id"]'));
await page.mouse.up();
await page.mouse.down();
@ -32,8 +32,8 @@ test("ToggleComponent", async ({ page }) => {
'//*[@id="react-flow-id"]/div[1]/div[1]/div/div/div[2]/div/div/div[1]/div'
)
.click();
await page.locator('//*[@id="advancedIcon"]').click();
await page.locator('//*[@id="editAdvancedBtn"]').click();
await page.getByTestId("more-options-modal").click();
await page.getByTestId("edit-button-modal").click();
await page.locator('//*[@id="showload_hidden"]').click();
expect(
@ -62,8 +62,8 @@ test("ToggleComponent", async ({ page }) => {
'//*[@id="react-flow-id"]/div[1]/div[1]/div/div/div[2]/div/div/div[1]/div'
)
.click();
await page.locator('//*[@id="advancedIcon"]').click();
await page.locator('//*[@id="editAdvancedBtn"]').click();
await page.getByTestId("more-options-modal").click();
await page.getByTestId("edit-button-modal").click();
expect(
await page.locator('//*[@id="toggle-edit-1"]').isChecked()
@ -144,8 +144,8 @@ test("ToggleComponent", async ({ page }) => {
await page
.locator('//*[@id="react-flow-id"]/div[1]/div[1]/div[1]/div/div[2]/div')
.click();
await page.locator('//*[@id="advancedIcon"]').click();
await page.locator('//*[@id="editAdvancedBtn"]').click();
await page.getByTestId("more-options-modal").click();
await page.getByTestId("edit-button-modal").click();
await page.locator('//*[@id="showload_hidden"]').click();
expect(

View file

@ -12,8 +12,8 @@ from fastapi.testclient import TestClient
from httpx import AsyncClient
from langflow.graph.graph.base import Graph
from langflow.services.auth.utils import get_password_hash
from langflow.services.database.models.flow.flow import Flow, FlowCreate
from langflow.services.database.models.user.user import User, UserCreate
from langflow.services.database.models.flow.model import Flow, FlowCreate
from langflow.services.database.models.user.model import User, UserCreate
from langflow.services.database.utils import session_getter
from langflow.services.deps import get_db_service
from sqlmodel import Session, SQLModel, create_engine
@ -260,7 +260,7 @@ def logged_in_headers(client, active_user):
@pytest.fixture
def flow(client, json_flow: str, active_user):
from langflow.services.database.models.flow.flow import FlowCreate
from langflow.services.database.models.flow.model import FlowCreate
loaded_json = json.loads(json_flow)
flow_data = FlowCreate(

View file

@ -1,7 +1,7 @@
import json
from langflow.graph import Graph
import pytest
from langflow.graph import Graph
def get_graph(_type="basic"):
@ -41,5 +41,5 @@ def langchain_objects_are_equal(obj1, obj2):
def test_build_graph(client, basic_data_graph):
graph = Graph.from_payload(basic_data_graph)
assert graph is not None
assert len(graph.nodes) == len(basic_data_graph["nodes"])
assert len(graph.vertices) == len(basic_data_graph["nodes"])
assert len(graph.edges) == len(basic_data_graph["edges"])

View file

@ -7,10 +7,7 @@ from fastapi import HTTPException
from langflow.field_typing.constants import Data
from langflow.interface.custom.base import CustomComponent
from langflow.interface.custom.code_parser import CodeParser, CodeSyntaxError
from langflow.interface.custom.component import (
Component,
ComponentCodeNullError,
)
from langflow.interface.custom.component import Component, ComponentCodeNullError
from langflow.services.database.models.flow import Flow, FlowCreate
code_default = """
@ -445,7 +442,7 @@ def test_custom_component_build_not_implemented():
def test_build_config_no_code():
component = CustomComponent(code=None)
assert component.get_function_entrypoint_args == ""
assert component.get_function_entrypoint_args == []
assert component.get_function_entrypoint_return_type == []

View file

@ -2,7 +2,7 @@ from collections import namedtuple
import uuid
from langflow.processing.process import Result
from langflow.services.auth.utils import get_password_hash
from langflow.services.database.models.api_key.api_key import ApiKey
from langflow.services.database.models.api_key.model import ApiKey
from langflow.services.deps import get_settings_service
from langflow.services.database.utils import session_getter
from langflow.services.deps import get_db_service

View file

@ -24,7 +24,7 @@ from langflow.graph.utils import UnbuiltObject
from langflow.graph.vertex.base import Vertex
from langflow.graph.vertex.types import FileToolVertex, LLMVertex, ToolkitVertex
from langflow.processing.process import get_result_and_thought
from langflow.utils.payload import get_root_node
from langflow.utils.payload import get_root_vertex
# Test cases for the graph module
@ -70,19 +70,19 @@ def sample_nodes():
def get_node_by_type(graph, node_type: Type[Vertex]) -> Union[Vertex, None]:
"""Get a node by type"""
return next((node for node in graph.nodes if isinstance(node, node_type)), None)
return next((node for node in graph.vertices if isinstance(node, node_type)), None)
def test_graph_structure(basic_graph):
assert isinstance(basic_graph, Graph)
assert len(basic_graph.nodes) > 0
assert len(basic_graph.vertices) > 0
assert len(basic_graph.edges) > 0
for node in basic_graph.nodes:
for node in basic_graph.vertices:
assert isinstance(node, Vertex)
for edge in basic_graph.edges:
assert isinstance(edge, Edge)
assert edge.source in basic_graph.nodes
assert edge.target in basic_graph.nodes
assert edge.source_id in basic_graph.vertex_ids
assert edge.target_id in basic_graph.vertex_ids
def test_circular_dependencies(basic_graph):
@ -90,7 +90,7 @@ def test_circular_dependencies(basic_graph):
def check_circular(node, visited):
visited.add(node)
neighbors = basic_graph.get_nodes_with_target(node)
neighbors = basic_graph.get_vertices_with_target(node)
for neighbor in neighbors:
if neighbor in visited:
return True
@ -98,7 +98,7 @@ def test_circular_dependencies(basic_graph):
return True
return False
for node in basic_graph.nodes:
for node in basic_graph.vertices:
assert not check_circular(node, set())
@ -123,13 +123,13 @@ def test_invalid_node_types():
Graph(graph_data["nodes"], graph_data["edges"])
def test_get_nodes_with_target(basic_graph):
def test_get_vertices_with_target(basic_graph):
"""Test getting connected nodes"""
assert isinstance(basic_graph, Graph)
# Get root node
root = get_root_node(basic_graph)
root = get_root_vertex(basic_graph)
assert root is not None
connected_nodes = basic_graph.get_nodes_with_target(root)
connected_nodes = basic_graph.get_vertices_with_target(root.id)
assert connected_nodes is not None
@ -138,9 +138,9 @@ def test_get_node_neighbors_basic(basic_graph):
assert isinstance(basic_graph, Graph)
# Get root node
root = get_root_node(basic_graph)
root = get_root_vertex(basic_graph)
assert root is not None
neighbors = basic_graph.get_node_neighbors(root)
neighbors = basic_graph.get_vertex_neighbors(root)
assert neighbors is not None
assert isinstance(neighbors, dict)
# Root Node is an Agent, it requires an LLMChain and tools
@ -153,8 +153,8 @@ def test_get_node_neighbors_basic(basic_graph):
def test_get_node(basic_graph):
"""Test getting a single node"""
node_id = basic_graph.nodes[0].id
node = basic_graph.get_node(node_id)
node_id = basic_graph.vertices[0].id
node = basic_graph.get_vertex(node_id)
assert isinstance(node, Vertex)
assert node.id == node_id
@ -162,8 +162,8 @@ def test_get_node(basic_graph):
def test_build_nodes(basic_graph):
"""Test building nodes"""
assert len(basic_graph.nodes) == len(basic_graph._nodes)
for node in basic_graph.nodes:
assert len(basic_graph.vertices) == len(basic_graph._vertices)
for node in basic_graph.vertices:
assert isinstance(node, Vertex)
@ -172,20 +172,21 @@ def test_build_edges(basic_graph):
assert len(basic_graph.edges) == len(basic_graph._edges)
for edge in basic_graph.edges:
assert isinstance(edge, Edge)
assert isinstance(edge.source, Vertex)
assert isinstance(edge.target, Vertex)
assert isinstance(edge.source_id, str)
assert isinstance(edge.target_id, str)
def test_get_root_node(client, basic_graph, complex_graph):
def test_get_root_vertex(client, basic_graph, complex_graph):
"""Test getting root node"""
assert isinstance(basic_graph, Graph)
root = get_root_node(basic_graph)
root = get_root_vertex(basic_graph)
assert root is not None
assert isinstance(root, Vertex)
assert root.data["type"] == "TimeTravelGuideChain"
# For complex example, the root node is a ZeroShotAgent too
assert isinstance(complex_graph, Graph)
root = get_root_node(complex_graph)
root = get_root_vertex(complex_graph)
assert root is not None
assert isinstance(root, Vertex)
assert root.data["type"] == "ZeroShotAgent"
@ -221,7 +222,7 @@ def test_build_params(basic_graph):
# The matched_type attribute should be in the source_types attr
assert all(edge.matched_type in edge.source_types for edge in basic_graph.edges)
# Get the root node
root = get_root_node(basic_graph)
root = get_root_vertex(basic_graph)
# Root node is a TimeTravelGuideChain
# which requires an llm and memory
assert root is not None
@ -278,7 +279,7 @@ async def test_file_tool_node_build(client, openapi_graph):
assert Path(file_path).exists()
file_tool_node = get_node_by_type(openapi_graph, FileToolVertex)
assert file_tool_node is not UnbuiltObject
assert file_tool_node is not UnbuiltObject and file_tool_node is not None
built_object = await file_tool_node.build()
assert built_object is not UnbuiltObject
# Remove the file
@ -301,7 +302,7 @@ async def test_get_result_and_thought(basic_graph):
llm_node._built = True
langchain_object = await basic_graph.build()
# assert all nodes are built
assert all(node._built for node in basic_graph.nodes)
assert all(node._built for node in basic_graph.vertices)
# now build again and check if FakeListLLM was used
# Get the result and thought
@ -420,10 +421,12 @@ def test_update_template(sample_template, sample_nodes):
node2_updated = next((n for n in nodes_copy if n["id"] == "node2"), None)
node3_updated = next((n for n in nodes_copy if n["id"] == "node3"), None)
assert node1_updated is not None
assert node1_updated["data"]["node"]["template"]["some_field"]["show"] is True
assert node1_updated["data"]["node"]["template"]["some_field"]["advanced"] is False
assert node1_updated["data"]["node"]["template"]["some_field"]["display_name"] == "Name1"
assert node2_updated is not None
assert node2_updated["data"]["node"]["template"]["other_field"]["show"] is False
assert node2_updated["data"]["node"]["template"]["other_field"]["advanced"] is True
assert node2_updated["data"]["node"]["template"]["other_field"]["display_name"] == "DisplayName2"
@ -502,7 +505,7 @@ async def test_pickle_each_vertex(json_vector_store):
loaded_json = json.loads(json_vector_store)
graph = Graph.from_payload(loaded_json)
assert isinstance(graph, Graph)
for vertex in graph.nodes:
for vertex in graph.vertices:
await vertex.build()
pickled = pickle.dumps(vertex)
assert pickled is not UnbuiltObject

View file

@ -1,6 +1,5 @@
from unittest.mock import MagicMock, patch
from langflow.services.database.models.user.user import User
from langflow.services.settings.constants import DEFAULT_SUPERUSER, DEFAULT_SUPERUSER_PASSWORD
from langflow.services.utils import teardown_superuser
@ -104,13 +103,7 @@ def test_teardown_superuser_default_superuser(mock_get_session, mock_get_setting
teardown_superuser(mock_settings_service, mock_session)
mock_session.query.assert_called_once_with(User)
actual_expr = mock_session.query.return_value.filter.call_args[0][0]
expected_expr = User.username == DEFAULT_SUPERUSER
assert str(actual_expr) == str(expected_expr)
mock_session.delete.assert_called_once_with(mock_user)
mock_session.commit.assert_called_once()
mock_session.query.assert_not_called()
@patch("langflow.services.deps.get_settings_service")
@ -131,6 +124,6 @@ def test_teardown_superuser_no_default_superuser(mock_get_session, mock_get_sett
teardown_superuser(mock_settings_service, mock_session)
mock_session.query.assert_not_called()
mock_session.query.assert_called_once()
mock_session.delete.assert_not_called()
mock_session.commit.assert_not_called()

Some files were not shown because too many files have changed in this diff Show more