Login (#802)
### Description This pull request introduces a significant enhancement to the project by incorporating robust authentication logic. The enhancement addresses the critical need for secure user access and data protection within the application. With this addition, the system will be able to authenticate users, verify their identities, and provide access only to authorized individuals. ### Changes Implemented - **Authentication Middleware**: A new middleware module has been implemented to intercept incoming requests and validate user authentication. This middleware checks for valid authentication tokens or credentials and ensures that only authenticated users can access protected resources. - **User Authentication Endpoint**: A new endpoint has been created to handle user authentication. This endpoint allows users to provide their credentials, which are then securely processed and validated against the stored user data. - **Password Hashing**: User passwords are now securely hashed using a strong cryptographic algorithm before being stored in the database. This ensures that even if the database is compromised, user passwords remain inaccessible. - **Token-Based Authentication**: Upon successful authentication, the system generates a time-limited access token for the user. This token must be included in subsequent requests to access protected resources. Token validity is maintained through token expiration and refresh mechanisms. - **Authorization Checks**: The logic has been extended to include authorization checks after successful authentication. This ensures that authenticated users only have access to the resources they are authorized to use, based on their roles and permissions. ### Benefits 1. **Enhanced Security**: By implementing strong authentication mechanisms and password hashing, the project significantly improves security, minimizing the risk of unauthorized access or data breaches. 2. **User-Friendly Experience**: Users can securely access the application, confident in the knowledge that their credentials and data are protected. The token-based authentication simplifies the user experience by eliminating the need for frequent re-authentication. 3. **Scalability**: The authentication logic has been designed with scalability in mind, ensuring that the system can handle a growing number of users and requests without compromising security or performance. 4. **Modularity**: The new authentication logic has been integrated as a separate module, enhancing the project's modularity and maintainability.
This commit is contained in:
commit
02cc23d6b1
110 changed files with 4566 additions and 1804 deletions
2
.gitattributes
vendored
2
.gitattributes
vendored
|
|
@ -31,4 +31,4 @@ Dockerfile text
|
|||
*.gif binary
|
||||
*.mp4 binary
|
||||
*.svg binary
|
||||
*.csv binary
|
||||
*.csv binary
|
||||
|
|
|
|||
8
Makefile
8
Makefile
|
|
@ -19,7 +19,7 @@ coverage:
|
|||
--cov-report term-missing:skip-covered
|
||||
|
||||
tests:
|
||||
poetry run pytest tests
|
||||
poetry run pytest tests -n auto
|
||||
|
||||
format:
|
||||
poetry run black .
|
||||
|
|
@ -27,15 +27,15 @@ format:
|
|||
cd src/frontend && npm run format
|
||||
|
||||
lint:
|
||||
poetry run mypy .
|
||||
poetry run mypy --exclude .venv .
|
||||
poetry run black . --check
|
||||
poetry run ruff . --fix
|
||||
|
||||
install_frontend:
|
||||
cd src/frontend && npm install;
|
||||
cd src/frontend && npm install
|
||||
|
||||
install_frontendc:
|
||||
cd src/frontend && rm -rf node_modules package-lock.json && npm install;
|
||||
cd src/frontend && rm -rf node_modules package-lock.json && npm install
|
||||
|
||||
run_frontend:
|
||||
cd src/frontend && npm start
|
||||
|
|
|
|||
314
poetry.lock
generated
314
poetry.lock
generated
|
|
@ -703,76 +703,76 @@ click = "*"
|
|||
|
||||
[[package]]
|
||||
name = "clickhouse-connect"
|
||||
version = "0.6.9"
|
||||
version = "0.6.10"
|
||||
description = "ClickHouse Database Core Driver for Python, Pandas, and Superset"
|
||||
optional = false
|
||||
python-versions = "~=3.7"
|
||||
files = [
|
||||
{file = "clickhouse-connect-0.6.9.tar.gz", hash = "sha256:ba735bcb73c4743788e7c8bfeb865edd887da28a253bd189c449df20d9abff64"},
|
||||
{file = "clickhouse_connect-0.6.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74fb1bc9eea037db1361d75adb6482ce6d8c22e4a47a37735edd8e3862d931f7"},
|
||||
{file = "clickhouse_connect-0.6.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fb5e4a489b5960fc4bc48fdda052b3a446a736ee4be74105ae7663307da8a063"},
|
||||
{file = "clickhouse_connect-0.6.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e954bfe759fd437595732a15a40356e9e29035a83f485a23601f863aab2f7c6f"},
|
||||
{file = "clickhouse_connect-0.6.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e2f4ada702a933f2ffc38e77bf948f11d745f3467191d6b6fc2190683d02bb8"},
|
||||
{file = "clickhouse_connect-0.6.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34d06a56e2be745adb692fbf1eedd9fb6d5dbb46ce325c1d9e57b53ba99eee95"},
|
||||
{file = "clickhouse_connect-0.6.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5dcbd145ad907e9bc7dbbc5e80440888201de8f1622f755032595c8b8302e4ef"},
|
||||
{file = "clickhouse_connect-0.6.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f155d1f214c81c5d8e72cbe325dfec72340c082156108db06c862ddd76771d7e"},
|
||||
{file = "clickhouse_connect-0.6.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0b47ec963b9d9cf1674490385183dc988d6e4d33287bb9d81e23373012232223"},
|
||||
{file = "clickhouse_connect-0.6.9-cp310-cp310-win32.whl", hash = "sha256:59dc14e47fa287578495835e4c5efdff90e40430b5b27a3c1453bb83d65e17cb"},
|
||||
{file = "clickhouse_connect-0.6.9-cp310-cp310-win_amd64.whl", hash = "sha256:bcabfaa3fbef4ea9ba723d16e9f50e5e02a3c871b3afc8106b6a04a53a7b19d7"},
|
||||
{file = "clickhouse_connect-0.6.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:93025ec43ffc25ae3e5111c0da65f8227dc6ae68834beeda3b0256c22baedd9f"},
|
||||
{file = "clickhouse_connect-0.6.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ac72323af794b4c79a804f4f311cfe4ae28426f92ac1b7f390aabbab6a93a4bd"},
|
||||
{file = "clickhouse_connect-0.6.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d8c1e71b34e870d8cd4805c3be86678e19a63931a718f3bf657b48da82c74df"},
|
||||
{file = "clickhouse_connect-0.6.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c286a6814facf3fa5bf492863e99b300154b33c3ecedf7799070ef9b8cd12474"},
|
||||
{file = "clickhouse_connect-0.6.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9924d53302f3333bcc40b9f5238bea4c29c107a6a82e22dc5ba24ce7c1cdb75"},
|
||||
{file = "clickhouse_connect-0.6.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:adf29b7319e5cdb9b6cb8ec3d4e85056e588ac51265b92ffaf6c69481283e643"},
|
||||
{file = "clickhouse_connect-0.6.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6fc2d4e428af2be8c9db23b37f4493848696417376f0eaba23b0e8f053f6a0d4"},
|
||||
{file = "clickhouse_connect-0.6.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b3793d26cabd88bad6f4a3fe9f93974e480cc785f137158be8b58d700baaeb4f"},
|
||||
{file = "clickhouse_connect-0.6.9-cp311-cp311-win32.whl", hash = "sha256:1fa2fddd00d019b4593b194a303339268699808c5ebeab2d331b2ee0d29eabc0"},
|
||||
{file = "clickhouse_connect-0.6.9-cp311-cp311-win_amd64.whl", hash = "sha256:536df34ecb49ddd7c61ebd6b900a7d06b3a246fbe30441cb68c568ea42e292d3"},
|
||||
{file = "clickhouse_connect-0.6.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d61337ca4c48b78a959627e9c0de58b86a8399510de184bd6d4c27b8b7e93c17"},
|
||||
{file = "clickhouse_connect-0.6.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69449ee694ed9aa4ee5f290c13ce5038efe394ff99b43eeee8e8190f3e4aa909"},
|
||||
{file = "clickhouse_connect-0.6.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b97ee1de55fea90f12e1f04ea6521dd827f2ed25361cfb99374cb0649222a8f0"},
|
||||
{file = "clickhouse_connect-0.6.9-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:adc42d8970b322bf78053a62e6c555fd8e03b29aecffe21521efaf5bad4e2ba4"},
|
||||
{file = "clickhouse_connect-0.6.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:064a27bd8d92f413ddcf20926a6c868a0482f7e757f9d412c0778b875d20c536"},
|
||||
{file = "clickhouse_connect-0.6.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a4aaf279efc5e8e13c6b4ac7ce41a3700f786dd34a58d3686ddf29660364dc62"},
|
||||
{file = "clickhouse_connect-0.6.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:022e0438728cc323d1817ade5105e4458acf71be822dffee130934140524fcbd"},
|
||||
{file = "clickhouse_connect-0.6.9-cp37-cp37m-win32.whl", hash = "sha256:e7a9c511f680e0f40b8765e7fbb8bde6b101cbf0a5b180f6ccc18ad59c9776ea"},
|
||||
{file = "clickhouse_connect-0.6.9-cp37-cp37m-win_amd64.whl", hash = "sha256:c2bd8fc5767fd883d5d1ca2e35e3034e590994ea64f76b11a3814ab20862a0b0"},
|
||||
{file = "clickhouse_connect-0.6.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:159cba55d55ea85cea310a8781144559deade5c8cee6b13bc720253e6a6e4a5c"},
|
||||
{file = "clickhouse_connect-0.6.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f4a34a7e3d16c0e4c34a11b7d8cbd633f22ee48e1a193f1977f64fe470ad9c79"},
|
||||
{file = "clickhouse_connect-0.6.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:026b9363553a6bd22f5c9f4b65cacd3c3fc0b50d5d0159c47bb34f63dc87ccff"},
|
||||
{file = "clickhouse_connect-0.6.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaec4e9dc5110bf6f17a0abbb61e286293400d2c3450e29040fd100dc075dc89"},
|
||||
{file = "clickhouse_connect-0.6.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0224509fd36637ccf1add66f4ec12d464e2e80e4a5521e846f4d7602b6664a0c"},
|
||||
{file = "clickhouse_connect-0.6.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0aa5b9b5699ae385d4f7c1f493e1029d7803b47c74ecb2502f01fb79135253d2"},
|
||||
{file = "clickhouse_connect-0.6.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1b0418b347f4e7a27e692f6947b9788759b23ee4f4aabccbb376b5241190f8a7"},
|
||||
{file = "clickhouse_connect-0.6.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:80778a118f5b2c52daf64c051fdbc5df23a8c37e9e385205e4942fdd60925d25"},
|
||||
{file = "clickhouse_connect-0.6.9-cp38-cp38-win32.whl", hash = "sha256:841838e793cd4283d3a245e278b136ea5681e636dcdc816c27c4ff77e4bb2077"},
|
||||
{file = "clickhouse_connect-0.6.9-cp38-cp38-win_amd64.whl", hash = "sha256:63de88cf244adda961742c1e05e051340de09714b38cc33d757138af6033b364"},
|
||||
{file = "clickhouse_connect-0.6.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:47216788f28fa8d1c225acdd366f5eab53c00b131ca246c7f004d94b1aff7cef"},
|
||||
{file = "clickhouse_connect-0.6.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e8b6669ba538b6fe18cbe01f4451c4e5d5674471cd55aec7af3d6f1a8e064b8f"},
|
||||
{file = "clickhouse_connect-0.6.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43de9421a8f91e82efec5f9bffe366fb341e2fb7c7dce89e303061ba7065baa3"},
|
||||
{file = "clickhouse_connect-0.6.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62b184813807112a8ef2853df93b6c07899bdf04f188e547f92c54fc2e056be3"},
|
||||
{file = "clickhouse_connect-0.6.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5c869d6761f38bfb940fa0992078bd7d8ece1c1c47a330ab3e8390ca8b2ba980"},
|
||||
{file = "clickhouse_connect-0.6.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b83f62002ae8a19102f05868639217fe3dddabf4c099dc0ddf1586901f1501fb"},
|
||||
{file = "clickhouse_connect-0.6.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:442f9119a223c4f97608d3e4f38debc9c62873e0e9dc948cb1b21691be35af55"},
|
||||
{file = "clickhouse_connect-0.6.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cef80d7596887eff39256e29a31c23297a1a65aac735b3e0e323c702dd95d2ee"},
|
||||
{file = "clickhouse_connect-0.6.9-cp39-cp39-win32.whl", hash = "sha256:f9cdccf6fa12349e4c25d099bed1d80274104cc4e4a8110f6692e553f4491b99"},
|
||||
{file = "clickhouse_connect-0.6.9-cp39-cp39-win_amd64.whl", hash = "sha256:067b5b104e0ac1e16255313766cd97cccd06285ab36f7a2bea7960bd643f5c13"},
|
||||
{file = "clickhouse_connect-0.6.9-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e08de57aeb1272897208f91873511090d3f904fede4e509f9b2a0ff00db4d49"},
|
||||
{file = "clickhouse_connect-0.6.9-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5318b4ac3fbf91ed3ab766bfa55b5ce72ba520181ad6f61fbf37ed63150033f4"},
|
||||
{file = "clickhouse_connect-0.6.9-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df27dfab1565dce2dad8d6ff41a9bc35fb52ed56fba857f26faffac05ffe201"},
|
||||
{file = "clickhouse_connect-0.6.9-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:71319fde3214e2382bc7ec402268b9b8c320a2eb86ea764e79a29f7562de06fd"},
|
||||
{file = "clickhouse_connect-0.6.9-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:7e0910ac23e7c1ba9af00818f52cdfc812210b4ab10c2be54f1f6456e144e0a7"},
|
||||
{file = "clickhouse_connect-0.6.9-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8566b0b57537b570bbef6736d461108c2354e40bd1b7166f9f51cc2e0d8124cb"},
|
||||
{file = "clickhouse_connect-0.6.9-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7a0f9c9dcb3bb482043a2b0e3ceb371b17d7ad320317529c2834960649fd20"},
|
||||
{file = "clickhouse_connect-0.6.9-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70ae9442b369fa6365018ee92cda1fa26eade3b87640c744b8e3d327872cd8ae"},
|
||||
{file = "clickhouse_connect-0.6.9-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c018fcf03428f940dfa52eea7ff443dea9eee20c2e161486d0a6d2509693904"},
|
||||
{file = "clickhouse_connect-0.6.9-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:adb7d4a449af756ad9c5b2074ce7844ef34981827cce7510151a8a01493e68f3"},
|
||||
{file = "clickhouse_connect-0.6.9-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:66a1d90bcd837734efbe3060f17b98a071f773cac50183efc9a6d2dc40bdbce6"},
|
||||
{file = "clickhouse_connect-0.6.9-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41fbc4a75f7098d38246528af34a17b9b7411fb63914787141c82d178bc189be"},
|
||||
{file = "clickhouse_connect-0.6.9-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39b6da9732b26cd98b8ed672bc7684ccf7a589f8d7f56faa7439f8d78b5f4c32"},
|
||||
{file = "clickhouse_connect-0.6.9-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c1954ba9735bf4af6737adc9da68179b6c49b698e288e839705c5c0a260ce85"},
|
||||
{file = "clickhouse_connect-0.6.9-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fa6b01af8c82b860fb4799651dc10a79645b88c8ee103bb14c6e8cdd8b00d8bd"},
|
||||
{file = "clickhouse-connect-0.6.10.tar.gz", hash = "sha256:5d4fc0deff7151db66670f289bb2fe714eecc75352eb1d19b3144e267b21456d"},
|
||||
{file = "clickhouse_connect-0.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f13409dbd1bf06e13e4d24c0e6ef40dcf8a452f26bebf1b5ef2ca096acd6c357"},
|
||||
{file = "clickhouse_connect-0.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5e47206073d8714bc872bceebbbd0580f25cf02d3a5c3a298d19ebc6939876c5"},
|
||||
{file = "clickhouse_connect-0.6.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b0c6253e4be25c9ae65e5aafe0a35fb9d98ac8f22e41884abaeeefb37d432ed"},
|
||||
{file = "clickhouse_connect-0.6.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7c073c936672c918b440207bef0f40b691140646a3ab7a65f02d71de719dd53"},
|
||||
{file = "clickhouse_connect-0.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f901bca0f4a19cad08cd111a42c339ccf7682af6aae154f72454ec2032d0f422"},
|
||||
{file = "clickhouse_connect-0.6.10-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0214c4bc5f7aaae3e14cd67bd24f6bccd7bc97ec2e96e1ec8d69094ed3ffd399"},
|
||||
{file = "clickhouse_connect-0.6.10-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bdebd13aceeef172f88c6f717cbcf1546c6b916fc601547cfdf4e56a2a129191"},
|
||||
{file = "clickhouse_connect-0.6.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:535a92a2e45a9e64be6cfc1aa85b51ba66ec2cc5e753758f56bd4fdbaa9a7990"},
|
||||
{file = "clickhouse_connect-0.6.10-cp310-cp310-win32.whl", hash = "sha256:a45cf355a56aca90aca01d74d92bf8c94e1c853fc4c474ee3f8450701dbf6973"},
|
||||
{file = "clickhouse_connect-0.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:aa98d776a86495f31a7641b7f31313292a37e0765f0821e82f7c54b3ff9d1325"},
|
||||
{file = "clickhouse_connect-0.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66dcf77fc062fe7d8134308ad8b4ddfdbeda2be81cc84147c8090cdb8743f11d"},
|
||||
{file = "clickhouse_connect-0.6.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9bc615e5df873e11dc034af56ec7fc96e7b95d29899188098a1dcba4852542b6"},
|
||||
{file = "clickhouse_connect-0.6.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a919e9d6d3e830a6dde38fd28785f3070531e468be9128bcc84ca80d0b0caa07"},
|
||||
{file = "clickhouse_connect-0.6.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eab582154a8204d1e2bd309f01c6e75c7d5a66370573069d969e4d8d6596f3ee"},
|
||||
{file = "clickhouse_connect-0.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cc9c22bd234ad76a3f7ff5493276254e6d94dae75476ce92aebba294c52ed30"},
|
||||
{file = "clickhouse_connect-0.6.10-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fc13116b43a37674db3641ba325ccb7abdfc45aa4c81c0d60a04d9b03c2a4e07"},
|
||||
{file = "clickhouse_connect-0.6.10-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2ed439110a634348af432ba891fc53b69eeb0f456867f279b440f73ef1ee243d"},
|
||||
{file = "clickhouse_connect-0.6.10-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a7442ea25c7c2447c2efe7c81642f038470d65e70bf07f6d620fc9cd92459e1"},
|
||||
{file = "clickhouse_connect-0.6.10-cp311-cp311-win32.whl", hash = "sha256:e2e1248ce2b0b6dc00c04a183d0e5c5f5ff2e51d5b782d7095f7747ea38e0493"},
|
||||
{file = "clickhouse_connect-0.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:995335519d79ea692160beaa98f9717c3c14170fe43a7ff3c19470b930ec8d81"},
|
||||
{file = "clickhouse_connect-0.6.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9becc18719e393afa840dfcf04e5645d7ae413cd66ef68fb5dadf1affe4b8616"},
|
||||
{file = "clickhouse_connect-0.6.10-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4878ec5ef82e09401418b9dd47e228d50f3100aabc15564193430e13c3ceb6c2"},
|
||||
{file = "clickhouse_connect-0.6.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed71b85f3b8fd66a57d55732981f0d478322c59346b31c5a733c3aaae99e8218"},
|
||||
{file = "clickhouse_connect-0.6.10-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe2d0af7165059e20c82dc663dc8c446d844282e4fac5f9673d1c63a4db4c23f"},
|
||||
{file = "clickhouse_connect-0.6.10-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:00c1480cce60e58ae0857f19134c84856bc033b496baaeb920be258e306200e4"},
|
||||
{file = "clickhouse_connect-0.6.10-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d9450ef2ffd09d01e4485bb6b41b3cb49c92a310d7ddf3c7aaa33abb12b924db"},
|
||||
{file = "clickhouse_connect-0.6.10-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b7ffcd268abf59e23757cb79e9279dcdea4dcfd5ea6603d3c994e7df5e94c6b5"},
|
||||
{file = "clickhouse_connect-0.6.10-cp37-cp37m-win32.whl", hash = "sha256:29dc07e8d48580b8dfa8a4c2bbb8a6343d5d2f8e64120dca854bc31c2faea45a"},
|
||||
{file = "clickhouse_connect-0.6.10-cp37-cp37m-win_amd64.whl", hash = "sha256:17e47909d02623e762d941b82237f6aa457985058b65aea4a46ceed2d7235919"},
|
||||
{file = "clickhouse_connect-0.6.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bb29eefe4c7899f85f7c675160ba973f780284a2baadf1f7ba8b09d3d6955aa9"},
|
||||
{file = "clickhouse_connect-0.6.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bf119182bdb6069d99ff70ad8d9603b888f070d7efae8792820be78333df566"},
|
||||
{file = "clickhouse_connect-0.6.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c131001f3143f8265fc5a0147590d103e26b04cf16877c045e490a48e577985"},
|
||||
{file = "clickhouse_connect-0.6.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c4cd828f669a13c1fe8179796277d0a66a647f5acf3add87d6f53dfb69a74dc"},
|
||||
{file = "clickhouse_connect-0.6.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9992ee252ca78c84b0fd9f3c1c102f9ce51be11e3908a0419fb7c44d7df6bab"},
|
||||
{file = "clickhouse_connect-0.6.10-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7a1c81a6cc8d46865162699c1832cbfd8af5923b8516820415e7b5d7078e5df6"},
|
||||
{file = "clickhouse_connect-0.6.10-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a2e2e847ab5ca88761c0736fbc70b22f612406e3b3b6478907defe321415ddc"},
|
||||
{file = "clickhouse_connect-0.6.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:937c1dc1457935778909058e8527f9c1b91bfa0ef943ec4fb3b214f2b81da863"},
|
||||
{file = "clickhouse_connect-0.6.10-cp38-cp38-win32.whl", hash = "sha256:23a819007930be03bfef325742f0bfb51ce42bce7cf1adb203f879a5ccd9b16a"},
|
||||
{file = "clickhouse_connect-0.6.10-cp38-cp38-win_amd64.whl", hash = "sha256:876134ded05bee9168d2c76eef49f7719501496a2ba440778649240e906017b6"},
|
||||
{file = "clickhouse_connect-0.6.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:816efe8f178fa212520a0f1f0c9f9899d03732b73aff33a875368e28bb4b8b06"},
|
||||
{file = "clickhouse_connect-0.6.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:891048c48dbcce7ff096fa0a338c763e3c2fe4768e41bcc7a27223133ba58101"},
|
||||
{file = "clickhouse_connect-0.6.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f7106ad09f736676e57987039de95817e260eeeb1cae1931e41b4362ea13b5f"},
|
||||
{file = "clickhouse_connect-0.6.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88d3edf76fb4d6ebd38c8ce4c84e7a7489c226592e9e5b8664156b3aac726a51"},
|
||||
{file = "clickhouse_connect-0.6.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1287a32fc28fd8b32401810948366c50a7a25c74074ab2f98c369804c6366743"},
|
||||
{file = "clickhouse_connect-0.6.10-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f2a8ac16f5750ee219960e68003a43dc25b873f0d608ad379035e9d642cd5a30"},
|
||||
{file = "clickhouse_connect-0.6.10-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9c36e19fd418ef41b4162fbbe658d2d579d590685c3f3db7ad55aca9cc2fb0f6"},
|
||||
{file = "clickhouse_connect-0.6.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4de6ce7bcb8da818929aaac03bb2c0c6d68d13702d79d1348d123ebf868c6917"},
|
||||
{file = "clickhouse_connect-0.6.10-cp39-cp39-win32.whl", hash = "sha256:bd498329370c3551c7dbc6013018b0521da102f63e40d554485a29e07ff57cf3"},
|
||||
{file = "clickhouse_connect-0.6.10-cp39-cp39-win_amd64.whl", hash = "sha256:128fba3e62292e340194d271883d049f63f2fdb6fc56513f776df3fccc81cfab"},
|
||||
{file = "clickhouse_connect-0.6.10-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba12b89e19be14c9aeb1042ec72fa0ba0eb892e4c9d6bfb6ffc52a0e1a628fb1"},
|
||||
{file = "clickhouse_connect-0.6.10-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:227a6cc07c372778909c55d114b238616c46ae70d99fed4647e960aba2d189ff"},
|
||||
{file = "clickhouse_connect-0.6.10-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cecbee657bb902b0baa6d2b768166b967d1edba0e3ba77f22ddc8338ce588c46"},
|
||||
{file = "clickhouse_connect-0.6.10-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbeca9ba2529bea8ac32f8e6fa35c43f61d8c3b4883b56668c712a1065a58b2e"},
|
||||
{file = "clickhouse_connect-0.6.10-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:7238fdbafdec8fb60f8a0e3e5f2237ae3ded8bad372216df82c0d94709815136"},
|
||||
{file = "clickhouse_connect-0.6.10-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7984d9b55927e6fbdcd774790327aa3abba2f12020caa7a422ef0ca0f95080ae"},
|
||||
{file = "clickhouse_connect-0.6.10-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:496cbc65916e83cb4c5175f22e678427b6aef75b582d04e73f3f6303ee674bf9"},
|
||||
{file = "clickhouse_connect-0.6.10-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e57182bb96fded997c526160626a1258ff050910ffa6e7eac21c6591f74f2d0"},
|
||||
{file = "clickhouse_connect-0.6.10-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:193b9e78a190814e36f1570466eb372e3981ce2c2c0536ef03d728ea6504af0f"},
|
||||
{file = "clickhouse_connect-0.6.10-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3e4e4aa9e83d7c73a347becba3ab5c4c7a0abde7c01d57edd0c2bb6399585946"},
|
||||
{file = "clickhouse_connect-0.6.10-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:56962ec499d25bd0cb33c90cdf52c9220691be30a587faf45ba346add679168b"},
|
||||
{file = "clickhouse_connect-0.6.10-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abf77febdf4e98990bd1e775c3909f9db62fd544a759298fc8b03e72167a2375"},
|
||||
{file = "clickhouse_connect-0.6.10-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d773e048635223ccd974ebbcac84190682772bcf7ed89333f7a2c3f42da5cc1"},
|
||||
{file = "clickhouse_connect-0.6.10-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29365f01bee352e0b0ed3ef145d41c6b642d3ffc54b85c8a743c75398a752384"},
|
||||
{file = "clickhouse_connect-0.6.10-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ce55154bb1d6911e865bd95d2af935bbb1eb3d66da7b5631d6c42369383eb36d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -969,13 +969,13 @@ test-randomorder = ["pytest-randomly"]
|
|||
|
||||
[[package]]
|
||||
name = "ctransformers"
|
||||
version = "0.2.23"
|
||||
version = "0.2.24"
|
||||
description = "Python bindings for the Transformer models implemented in C/C++ using GGML library."
|
||||
optional = true
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "ctransformers-0.2.23-py3-none-any.whl", hash = "sha256:ebbba968c81a71919f9c3975b20d9b6c85e6fd5f640d1f7b86cb8d5d56c3d347"},
|
||||
{file = "ctransformers-0.2.23.tar.gz", hash = "sha256:de665dfbd529cf369059e52b277dcbdd6761a0547c8931b3cfbf89ea1eeb3d3c"},
|
||||
{file = "ctransformers-0.2.24-py3-none-any.whl", hash = "sha256:fa2ad7a38726c3ad6e57d1aff696f6e89fe3c0de5df2109b579cb6bc6c2ef599"},
|
||||
{file = "ctransformers-0.2.24.tar.gz", hash = "sha256:bb463204f557d00d533e1dc50346e0b57870cea68965ec135d3fa8db1c76ed2e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -1312,6 +1312,20 @@ files = [
|
|||
[package.extras]
|
||||
test = ["pytest (>=6)"]
|
||||
|
||||
[[package]]
|
||||
name = "execnet"
|
||||
version = "2.0.2"
|
||||
description = "execnet: rapid multi-Python deployment"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"},
|
||||
{file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
testing = ["hatch", "pre-commit", "pytest", "tox"]
|
||||
|
||||
[[package]]
|
||||
name = "executing"
|
||||
version = "1.2.0"
|
||||
|
|
@ -1688,13 +1702,13 @@ six = "*"
|
|||
|
||||
[[package]]
|
||||
name = "google-cloud-aiplatform"
|
||||
version = "1.31.0"
|
||||
version = "1.31.1"
|
||||
description = "Vertex AI API client library"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "google-cloud-aiplatform-1.31.0.tar.gz", hash = "sha256:a5de8f5cb9bcd66db08a404cf74e7ed252d5d4038649a58f37588ccb4e2785f0"},
|
||||
{file = "google_cloud_aiplatform-1.31.0-py2.py3-none-any.whl", hash = "sha256:19429dfb6098414f758810fde1690d8e8170aff7add4281681dc61de79b4112b"},
|
||||
{file = "google-cloud-aiplatform-1.31.1.tar.gz", hash = "sha256:6de8d7d647990cc0ee601d938d3a1693e3ef50f3d54d735397b2e31ca8eeb946"},
|
||||
{file = "google_cloud_aiplatform-1.31.1-py2.py3-none-any.whl", hash = "sha256:360d95c4c6f6a27fc2a4a071741a66588f0f0ca245509315839cfa320d6862e2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -2900,39 +2914,38 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)",
|
|||
|
||||
[[package]]
|
||||
name = "langchain"
|
||||
version = "0.0.256"
|
||||
version = "0.0.274"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = ">=3.8.1,<4.0"
|
||||
files = [
|
||||
{file = "langchain-0.0.256-py3-none-any.whl", hash = "sha256:3389fcb85d8d4fb16bae5ca9995d3ce634a3330f8ac1f458afc6171e4ca52de5"},
|
||||
{file = "langchain-0.0.256.tar.gz", hash = "sha256:b80115e19f86199c49bca8ef18c09d2d87548332a0144a1c5ce6a2f82e4f5f9c"},
|
||||
{file = "langchain-0.0.274-py3-none-any.whl", hash = "sha256:402e0518a2e3183498158c159cd50f7d13e948908430f682eebe2741a51ebc2a"},
|
||||
{file = "langchain-0.0.274.tar.gz", hash = "sha256:adc2cf9993765c9d241aae6079497b0f62090bebff05aa985dd92e1b10b8cacb"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiohttp = ">=3.8.3,<4.0.0"
|
||||
async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""}
|
||||
dataclasses-json = ">=0.5.7,<0.6.0"
|
||||
langsmith = ">=0.0.11,<0.1.0"
|
||||
langsmith = ">=0.0.21,<0.1.0"
|
||||
numexpr = ">=2.8.4,<3.0.0"
|
||||
numpy = ">=1,<2"
|
||||
openapi-schema-pydantic = ">=1.2,<2.0"
|
||||
pydantic = ">=1,<2"
|
||||
pydantic = ">=1,<3"
|
||||
PyYAML = ">=5.3"
|
||||
requests = ">=2,<3"
|
||||
SQLAlchemy = ">=1.4,<3"
|
||||
tenacity = ">=8.1.0,<9.0.0"
|
||||
|
||||
[package.extras]
|
||||
all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "amadeus (>=8.1.0)", "anthropic (>=0.3,<0.4)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "awadb (>=0.3.9,<0.4.0)", "azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clarifai (>=9.1.0)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=4,<5)", "deeplake (>=3.6.8,<4.0.0)", "docarray[hnswlib] (>=0.32.0,<0.33.0)", "duckduckgo-search (>=3.8.3,<4.0.0)", "elasticsearch (>=8,<9)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-auth (>=2.18.1,<3.0.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jina (>=3.14,<4.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "langkit (>=0.0.6,<0.1.0)", "lark (>=1.1.5,<2.0.0)", "libdeeplake (>=0.0.60,<0.0.61)", "librosa (>=0.10.0.post2,<0.11.0)", "lxml (>=4.9.2,<5.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "marqo (>=0.11.0,<0.12.0)", "momento (>=1.5.0,<2.0.0)", "nebula3-python (>=3.4.0,<4.0.0)", "neo4j (>=5.8.1,<6.0.0)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "octoai-sdk (>=0.1.1,<0.2.0)", "openai (>=0,<1)", "openlm (>=0.0.5,<0.0.6)", "opensearch-py (>=2.0.0,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pymongo (>=4.3.3,<5.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "python-arango (>=7.5.9,<8.0.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.3.1,<2.0.0)", "rdflib (>=6.3.2,<7.0.0)", "redis (>=4,<5)", "requests-toolbelt (>=1.0.0,<2.0.0)", "sentence-transformers (>=2,<3)", "singlestoredb (>=0.7.1,<0.8.0)", "spacy (>=3,<4)", "steamship (>=2.16.9,<3.0.0)", "tensorflow-text (>=2.11.0,<3.0.0)", "tigrisdb (>=1.0.0b6,<2.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)", "xinference (>=0.0.6,<0.0.7)"]
|
||||
azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b6)", "openai (>=0,<1)"]
|
||||
all = ["O365 (>=2.0.26,<3.0.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "amadeus (>=8.1.0)", "arxiv (>=1.4,<2.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "awadb (>=0.3.9,<0.4.0)", "azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "beautifulsoup4 (>=4,<5)", "clarifai (>=9.1.0)", "clickhouse-connect (>=0.5.14,<0.6.0)", "cohere (>=4,<5)", "deeplake (>=3.6.8,<4.0.0)", "docarray[hnswlib] (>=0.32.0,<0.33.0)", "duckduckgo-search (>=3.8.3,<4.0.0)", "elasticsearch (>=8,<9)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "google-api-python-client (==2.70.0)", "google-auth (>=2.18.1,<3.0.0)", "google-search-results (>=2,<3)", "gptcache (>=0.1.7)", "html2text (>=2020.1.16,<2021.0.0)", "huggingface_hub (>=0,<1)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lancedb (>=0.1,<0.2)", "langkit (>=0.0.6,<0.1.0)", "lark (>=1.1.5,<2.0.0)", "libdeeplake (>=0.0.60,<0.0.61)", "librosa (>=0.10.0.post2,<0.11.0)", "lxml (>=4.9.2,<5.0.0)", "manifest-ml (>=0.0.1,<0.0.2)", "marqo (>=1.2.4,<2.0.0)", "momento (>=1.5.0,<2.0.0)", "nebula3-python (>=3.4.0,<4.0.0)", "neo4j (>=5.8.1,<6.0.0)", "networkx (>=2.6.3,<3.0.0)", "nlpcloud (>=1,<2)", "nltk (>=3,<4)", "nomic (>=1.0.43,<2.0.0)", "openai (>=0,<1)", "openlm (>=0.0.5,<0.0.6)", "opensearch-py (>=2.0.0,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pexpect (>=4.8.0,<5.0.0)", "pgvector (>=0.1.6,<0.2.0)", "pinecone-client (>=2,<3)", "pinecone-text (>=0.4.2,<0.5.0)", "psycopg2-binary (>=2.9.5,<3.0.0)", "pymongo (>=4.3.3,<5.0.0)", "pyowm (>=3.3.0,<4.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pytesseract (>=0.3.10,<0.4.0)", "python-arango (>=7.5.9,<8.0.0)", "pyvespa (>=0.33.0,<0.34.0)", "qdrant-client (>=1.3.1,<2.0.0)", "rdflib (>=6.3.2,<7.0.0)", "redis (>=4,<5)", "requests-toolbelt (>=1.0.0,<2.0.0)", "sentence-transformers (>=2,<3)", "singlestoredb (>=0.7.1,<0.8.0)", "tensorflow-text (>=2.11.0,<3.0.0)", "tigrisdb (>=1.0.0b6,<2.0.0)", "tiktoken (>=0.3.2,<0.4.0)", "torch (>=1,<3)", "transformers (>=4,<5)", "weaviate-client (>=3,<4)", "wikipedia (>=1,<2)", "wolframalpha (==5.0.0)"]
|
||||
azure = ["azure-ai-formrecognizer (>=3.2.1,<4.0.0)", "azure-ai-vision (>=0.11.1b1,<0.12.0)", "azure-cognitiveservices-speech (>=1.28.0,<2.0.0)", "azure-core (>=1.26.4,<2.0.0)", "azure-cosmos (>=4.4.0b1,<5.0.0)", "azure-identity (>=1.12.0,<2.0.0)", "azure-search-documents (==11.4.0b8)", "openai (>=0,<1)"]
|
||||
clarifai = ["clarifai (>=9.1.0)"]
|
||||
cohere = ["cohere (>=4,<5)"]
|
||||
docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"]
|
||||
embeddings = ["sentence-transformers (>=2,<3)"]
|
||||
extended-testing = ["amazon-textract-caller (<2)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.0.7,<0.0.8)", "chardet (>=5.1.0,<6.0.0)", "esprima (>=4.0.1,<5.0.0)", "feedparser (>=6.0.10,<7.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "gql (>=3.4.1,<4.0.0)", "html2text (>=2020.1.16,<2021.0.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "openai (>=0,<1)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tqdm (>=4.48.0)", "xata (>=1.0.0a7,<2.0.0)", "xinference (>=0.0.6,<0.0.7)", "zep-python (>=0.32)"]
|
||||
extended-testing = ["amazon-textract-caller (<2)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.0.7,<0.0.8)", "chardet (>=5.1.0,<6.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "gql (>=3.4.1,<4.0.0)", "html2text (>=2020.1.16,<2021.0.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "openai (>=0,<1)", "openapi-schema-pydantic (>=1.2,<2.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tqdm (>=4.48.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
|
||||
javascript = ["esprima (>=4.0.1,<5.0.0)"]
|
||||
llms = ["anthropic (>=0.3,<0.4)", "clarifai (>=9.1.0)", "cohere (>=4,<5)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "openllm (>=0.1.19)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)", "xinference (>=0.0.6,<0.0.7)"]
|
||||
llms = ["clarifai (>=9.1.0)", "cohere (>=4,<5)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"]
|
||||
openai = ["openai (>=0,<1)", "tiktoken (>=0.3.2,<0.4.0)"]
|
||||
qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"]
|
||||
text-helpers = ["chardet (>=5.1.0,<6.0.0)"]
|
||||
|
|
@ -2978,13 +2991,13 @@ test = ["psutil", "pytest", "pytest-asyncio"]
|
|||
|
||||
[[package]]
|
||||
name = "langsmith"
|
||||
version = "0.0.26"
|
||||
version = "0.0.27"
|
||||
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
|
||||
optional = false
|
||||
python-versions = ">=3.8.1,<4.0"
|
||||
files = [
|
||||
{file = "langsmith-0.0.26-py3-none-any.whl", hash = "sha256:61c1d4582104d96edde04e1eea1dae347645b691c44489a5871341a2a1a2a1eb"},
|
||||
{file = "langsmith-0.0.26.tar.gz", hash = "sha256:80a4ef1b663a24a460d25b9986ab2010c5d06b6061c65be473abafc0647d191a"},
|
||||
{file = "langsmith-0.0.27-py3-none-any.whl", hash = "sha256:f61b07f093ba377b9af53c3d6f68fd1245f8f28605d4fc88433208aca93a5a23"},
|
||||
{file = "langsmith-0.0.27.tar.gz", hash = "sha256:c4df680ee8bf88d37f56ba196048341847c48b50ae561719c5542ef6488170e5"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -3013,12 +3026,12 @@ test = ["coverage", "pytest", "pytest-cov"]
|
|||
|
||||
[[package]]
|
||||
name = "llama-cpp-python"
|
||||
version = "0.1.78"
|
||||
version = "0.1.81"
|
||||
description = "A Python wrapper for llama.cpp"
|
||||
optional = true
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "llama_cpp_python-0.1.78.tar.gz", hash = "sha256:cffdcbc4b5fca2bceb1f6bf3590460ebc898c69295a02439dfc6327566e10367"},
|
||||
{file = "llama_cpp_python-0.1.81.tar.gz", hash = "sha256:8b8fa42e41c6334efe056571b5f19056ffd9776b94ee152530e1fb9fe81deda2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -3819,20 +3832,6 @@ dev = ["black (>=21.6b0,<22.0)", "pytest (==6.*)", "pytest-asyncio", "pytest-moc
|
|||
embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"]
|
||||
wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"]
|
||||
|
||||
[[package]]
|
||||
name = "openapi-schema-pydantic"
|
||||
version = "1.2.4"
|
||||
description = "OpenAPI (v3) specification schema as pydantic class"
|
||||
optional = false
|
||||
python-versions = ">=3.6.1"
|
||||
files = [
|
||||
{file = "openapi-schema-pydantic-1.2.4.tar.gz", hash = "sha256:3e22cf58b74a69f752cc7e5f1537f6e44164282db2700cbbcd3bb99ddd065196"},
|
||||
{file = "openapi_schema_pydantic-1.2.4-py3-none-any.whl", hash = "sha256:a932ecc5dcbb308950282088956e94dea069c9823c84e507d64f6b622222098c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pydantic = ">=1.8.2"
|
||||
|
||||
[[package]]
|
||||
name = "openpyxl"
|
||||
version = "3.1.2"
|
||||
|
|
@ -4504,13 +4503,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co
|
|||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.2.0"
|
||||
version = "1.3.0"
|
||||
description = "plugin and hook calling mechanisms for python"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"},
|
||||
{file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"},
|
||||
{file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"},
|
||||
{file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
|
|
@ -5183,17 +5182,17 @@ diagrams = ["jinja2", "railroad-diagrams"]
|
|||
|
||||
[[package]]
|
||||
name = "pypdf"
|
||||
version = "3.15.2"
|
||||
version = "3.15.4"
|
||||
description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "pypdf-3.15.2-py3-none-any.whl", hash = "sha256:f6e598292be34187287a609c72815c1502b3dc2c997b374ba0870ce79d2e975a"},
|
||||
{file = "pypdf-3.15.2.tar.gz", hash = "sha256:cdf7d75ebb8901f3352cf9488c5f662c6de9c52e432c429d15cada67ba372fce"},
|
||||
{file = "pypdf-3.15.4-py3-none-any.whl", hash = "sha256:791f0a52ddf390709f1f1b0c05c4d8cde13829b4f7cb91b4003b9bdd352bc944"},
|
||||
{file = "pypdf-3.15.4.tar.gz", hash = "sha256:a2780ed01dc4da23ac1542209f58fd3d951d8dd37c3c0309d123cd2f2679fb03"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing_extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
|
||||
typing_extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.10\""}
|
||||
|
||||
[package.extras]
|
||||
crypto = ["PyCryptodome", "cryptography"]
|
||||
|
|
@ -5266,6 +5265,43 @@ pytest = ">=4.6"
|
|||
[package.extras]
|
||||
testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-mock"
|
||||
version = "3.11.1"
|
||||
description = "Thin-wrapper around the mock package for easier use with pytest"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"},
|
||||
{file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pytest = ">=5.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["pre-commit", "pytest-asyncio", "tox"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-xdist"
|
||||
version = "3.3.1"
|
||||
description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pytest-xdist-3.3.1.tar.gz", hash = "sha256:d5ee0520eb1b7bcca50a60a518ab7a7707992812c578198f8b44fdfac78e8c93"},
|
||||
{file = "pytest_xdist-3.3.1-py3-none-any.whl", hash = "sha256:ff9daa7793569e6a68544850fd3927cd257cc03a7ef76c95e86915355e82b5f2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
execnet = ">=1.1"
|
||||
pytest = ">=6.2.0"
|
||||
|
||||
[package.extras]
|
||||
psutil = ["psutil (>=3.0)"]
|
||||
setproctitle = ["setproctitle"]
|
||||
testing = ["filelock"]
|
||||
|
||||
[[package]]
|
||||
name = "python-dateutil"
|
||||
version = "2.8.2"
|
||||
|
|
@ -6527,13 +6563,13 @@ doc = ["reno", "sphinx", "tornado (>=4.5)"]
|
|||
|
||||
[[package]]
|
||||
name = "textual"
|
||||
version = "0.34.0"
|
||||
version = "0.35.1"
|
||||
description = "Modern Text User Interface framework"
|
||||
optional = true
|
||||
python-versions = ">=3.7,<4.0"
|
||||
files = [
|
||||
{file = "textual-0.34.0-py3-none-any.whl", hash = "sha256:c695866acd8e85519eb0920cb921999ac5f58891ef7925e8b132e0eebc142e88"},
|
||||
{file = "textual-0.34.0.tar.gz", hash = "sha256:b66deee4afa9f6986c1bee973731d7dad2b169872377d238c9aad7141449b443"},
|
||||
{file = "textual-0.35.1-py3-none-any.whl", hash = "sha256:c4257ed3019cf8a2da2ac59ae59de5e66e04b95d482d065cfb3099f70fddd36f"},
|
||||
{file = "textual-0.35.1.tar.gz", hash = "sha256:70ca0bfe582f96dfa10179a9ab71329b8b15e750e26b7cee1fb4a67a981bbf36"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -7253,33 +7289,33 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "watchfiles"
|
||||
version = "0.19.0"
|
||||
version = "0.20.0"
|
||||
description = "Simple, modern and high performance file watching and code reload in python."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:91633e64712df3051ca454ca7d1b976baf842d7a3640b87622b323c55f3345e7"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:b6577b8c6c8701ba8642ea9335a129836347894b666dd1ec2226830e263909d3"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:18b28f6ad871b82df9542ff958d0c86bb0d8310bb09eb8e87d97318a3b5273af"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fac19dc9cbc34052394dbe81e149411a62e71999c0a19e1e09ce537867f95ae0"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:09ea3397aecbc81c19ed7f025e051a7387feefdb789cf768ff994c1228182fda"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0376deac92377817e4fb8f347bf559b7d44ff556d9bc6f6208dd3f79f104aaf"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c75eff897786ee262c9f17a48886f4e98e6cfd335e011c591c305e5d083c056"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb5d45c4143c1dd60f98a16187fd123eda7248f84ef22244818c18d531a249d1"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:79c533ff593db861ae23436541f481ec896ee3da4e5db8962429b441bbaae16e"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3d7d267d27aceeeaa3de0dd161a0d64f0a282264d592e335fff7958cc0cbae7c"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-win32.whl", hash = "sha256:176a9a7641ec2c97b24455135d58012a5be5c6217fc4d5fef0b2b9f75dbf5154"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-win_amd64.whl", hash = "sha256:945be0baa3e2440151eb3718fd8846751e8b51d8de7b884c90b17d271d34cae8"},
|
||||
{file = "watchfiles-0.19.0-cp37-abi3-win_arm64.whl", hash = "sha256:0089c6dc24d436b373c3c57657bf4f9a453b13767150d17284fc6162b2791911"},
|
||||
{file = "watchfiles-0.19.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cae3dde0b4b2078f31527acff6f486e23abed307ba4d3932466ba7cdd5ecec79"},
|
||||
{file = "watchfiles-0.19.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f3920b1285a7d3ce898e303d84791b7bf40d57b7695ad549dc04e6a44c9f120"},
|
||||
{file = "watchfiles-0.19.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9afd0d69429172c796164fd7fe8e821ade9be983f51c659a38da3faaaaac44dc"},
|
||||
{file = "watchfiles-0.19.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68dce92b29575dda0f8d30c11742a8e2b9b8ec768ae414b54f7453f27bdf9545"},
|
||||
{file = "watchfiles-0.19.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:5569fc7f967429d4bc87e355cdfdcee6aabe4b620801e2cf5805ea245c06097c"},
|
||||
{file = "watchfiles-0.19.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5471582658ea56fca122c0f0d0116a36807c63fefd6fdc92c71ca9a4491b6b48"},
|
||||
{file = "watchfiles-0.19.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b538014a87f94d92f98f34d3e6d2635478e6be6423a9ea53e4dd96210065e193"},
|
||||
{file = "watchfiles-0.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20b44221764955b1e703f012c74015306fb7e79a00c15370785f309b1ed9aa8d"},
|
||||
{file = "watchfiles-0.19.0.tar.gz", hash = "sha256:d9b073073e048081e502b6c6b0b88714c026a1a4c890569238d04aca5f9ca74b"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:3796312bd3587e14926013612b23066912cf45a14af71cf2b20db1c12dadf4e9"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:d0002d81c89a662b595645fb684a371b98ff90a9c7d8f8630c82f0fde8310458"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:570848706440373b4cd8017f3e850ae17f76dbdf1e9045fc79023b11e1afe490"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a0351d20d03c6f7ad6b2e8a226a5efafb924c7755ee1e34f04c77c3682417fa"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:007dcc4a401093010b389c044e81172c8a2520dba257c88f8828b3d460c6bb38"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0d82dbc1832da83e441d112069833eedd4cf583d983fb8dd666fbefbea9d99c0"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99f4c65fd2fce61a571b2a6fcf747d6868db0bef8a934e8ca235cc8533944d95"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5392dd327a05f538c56edb1c6ebba6af91afc81b40822452342f6da54907bbdf"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:08dc702529bb06a2b23859110c214db245455532da5eaea602921687cfcd23db"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7d4e66a857621584869cfbad87039e65dadd7119f0d9bb9dbc957e089e32c164"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-win32.whl", hash = "sha256:a03d1e6feb7966b417f43c3e3783188167fd69c2063e86bad31e62c4ea794cc5"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-win_amd64.whl", hash = "sha256:eccc8942bcdc7d638a01435d915b913255bbd66f018f1af051cd8afddb339ea3"},
|
||||
{file = "watchfiles-0.20.0-cp37-abi3-win_arm64.whl", hash = "sha256:b17d4176c49d207865630da5b59a91779468dd3e08692fe943064da260de2c7c"},
|
||||
{file = "watchfiles-0.20.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d97db179f7566dcf145c5179ddb2ae2a4450e3a634eb864b09ea04e68c252e8e"},
|
||||
{file = "watchfiles-0.20.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:835df2da7a5df5464c4a23b2d963e1a9d35afa422c83bf4ff4380b3114603644"},
|
||||
{file = "watchfiles-0.20.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:608cd94a8767f49521901aff9ae0c92cc8f5a24d528db7d6b0295290f9d41193"},
|
||||
{file = "watchfiles-0.20.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89d1de8218874925bce7bb2ae9657efc504411528930d7a83f98b1749864f2ef"},
|
||||
{file = "watchfiles-0.20.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:13f995d5152a8ba4ed7c2bbbaeee4e11a5944defc7cacd0ccb4dcbdcfd78029a"},
|
||||
{file = "watchfiles-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9b5c8d3be7b502f8c43a33c63166ada8828dbb0c6d49c8f9ce990a96de2f5a49"},
|
||||
{file = "watchfiles-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e43af4464daa08723c04b43cf978ab86cc55c684c16172622bdac64b34e36af0"},
|
||||
{file = "watchfiles-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87d9e1f75c4f86c93d73b5bd1ebe667558357548f11b4f8af4e0e272f79413ce"},
|
||||
{file = "watchfiles-0.20.0.tar.gz", hash = "sha256:728575b6b94c90dd531514677201e8851708e6e4b5fe7028ac506a200b622019"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -7298,13 +7334,13 @@ files = [
|
|||
|
||||
[[package]]
|
||||
name = "weaviate-client"
|
||||
version = "3.23.0"
|
||||
version = "3.23.1"
|
||||
description = "A python native Weaviate client"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "weaviate-client-3.23.0.tar.gz", hash = "sha256:3ffd7f1460c9e32755d84d4f5fc63dfc0bd990dbe2c3dc20d5c68119d467680e"},
|
||||
{file = "weaviate_client-3.23.0-py3-none-any.whl", hash = "sha256:3d3bb75c1d96b2b71e213c5eb885ae3e3f42e4304955383c467d100187d9ff8e"},
|
||||
{file = "weaviate-client-3.23.1.tar.gz", hash = "sha256:035f395fb8b17008224dc8a9ca4459b7ef4a2b0449209ac0c8d0f2e3b9a77f59"},
|
||||
{file = "weaviate_client-3.23.1-py3-none-any.whl", hash = "sha256:826b28237f7143ee4c51b988c5c37494760b4377a8536acc772a2194eb33f30c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
|
@ -7742,4 +7778,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"]
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.9,<3.11"
|
||||
content-hash = "fc078c55010bf3749e684cf032a4fc64b3918b15b60b4521c17a27815518032e"
|
||||
content-hash = "505fe04c51514ef25dd955b377a00185c4a2581770af3cc84db4c47477760048"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "langflow"
|
||||
version = "0.4.14"
|
||||
version = "0.5.0a0"
|
||||
description = "A Python package with a built-in web application"
|
||||
authors = ["Logspace <contact@logspace.ai>"]
|
||||
maintainers = [
|
||||
|
|
@ -33,7 +33,7 @@ google-search-results = "^2.4.1"
|
|||
google-api-python-client = "^2.79.0"
|
||||
typer = "^0.9.0"
|
||||
gunicorn = "^21.1.0"
|
||||
langchain = "^0.0.256"
|
||||
langchain = "^0.0.274"
|
||||
openai = "^0.27.8"
|
||||
pandas = "^2.0.0"
|
||||
chromadb = "^0.3.21"
|
||||
|
|
@ -100,6 +100,8 @@ types-appdirs = "^1.4.3.5"
|
|||
types-pyyaml = "^6.0.12.8"
|
||||
types-python-jose = "^3.3.4.8"
|
||||
types-passlib = "^1.7.7.13"
|
||||
pytest-mock = "^3.11.1"
|
||||
pytest-xdist = "^3.3.1"
|
||||
|
||||
|
||||
[tool.poetry.extras]
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
import sys
|
||||
import time
|
||||
import httpx
|
||||
from langflow.services.manager import initialize_settings_manager
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from langflow.utils.util import get_number_of_workers
|
||||
from multiprocess import Process # type: ignore
|
||||
from langflow.services.database.utils import session_getter
|
||||
from langflow.services.manager import initialize_services, initialize_settings_manager
|
||||
from langflow.services.utils import get_db_manager, get_settings_manager
|
||||
|
||||
from multiprocess import Process, cpu_count # type: ignore
|
||||
import platform
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
|
@ -12,15 +13,46 @@ import socket
|
|||
from rich.panel import Panel
|
||||
from rich import box
|
||||
from rich import print as rprint
|
||||
from rich.table import Table
|
||||
import typer
|
||||
from langflow.main import setup_app
|
||||
from langflow.utils.logger import configure, logger
|
||||
import webbrowser
|
||||
from dotenv import load_dotenv
|
||||
|
||||
from rich.console import Console
|
||||
|
||||
console = Console()
|
||||
|
||||
app = typer.Typer()
|
||||
|
||||
|
||||
def get_number_of_workers(workers=None):
|
||||
if workers == -1 or workers is None:
|
||||
workers = (cpu_count() * 2) + 1
|
||||
logger.debug(f"Number of workers: {workers}")
|
||||
return workers
|
||||
|
||||
|
||||
def display_results(results):
|
||||
"""
|
||||
Display the results of the migration.
|
||||
"""
|
||||
for table_results in results:
|
||||
table = Table(title=f"Migration {table_results.table_name}")
|
||||
table.add_column("Name")
|
||||
table.add_column("Type")
|
||||
table.add_column("Status")
|
||||
|
||||
for result in table_results.results:
|
||||
status = "Success" if result.success else "Failure"
|
||||
color = "green" if result.success else "red"
|
||||
table.add_row(result.name, result.type, f"[{color}]{status}[/{color}]")
|
||||
|
||||
console.print(table)
|
||||
console.print() # Print a new line
|
||||
|
||||
|
||||
def update_settings(
|
||||
config: str,
|
||||
cache: str,
|
||||
|
|
@ -94,7 +126,7 @@ def serve_on_jcloud():
|
|||
|
||||
|
||||
@app.command()
|
||||
def serve(
|
||||
def run(
|
||||
host: str = typer.Option(
|
||||
"127.0.0.1", help="Host to bind the server to.", envvar="LANGFLOW_HOST"
|
||||
),
|
||||
|
|
@ -312,6 +344,43 @@ def run_langflow(host, port, log_level, options, app):
|
|||
sys.exit(1)
|
||||
|
||||
|
||||
@app.command()
|
||||
def superuser(
|
||||
username: str = typer.Option(..., prompt=True, help="Username for the superuser."),
|
||||
password: str = typer.Option(
|
||||
..., prompt=True, hide_input=True, help="Password for the superuser."
|
||||
),
|
||||
):
|
||||
initialize_services()
|
||||
db_manager = get_db_manager()
|
||||
with session_getter(db_manager) as session:
|
||||
from langflow.services.auth.utils import create_super_user
|
||||
|
||||
if create_super_user(session, username, password):
|
||||
# Verify that the superuser was created
|
||||
from langflow.services.database.models.user.user import User
|
||||
|
||||
user = session.query(User).filter(User.username == username).first()
|
||||
if user is None:
|
||||
typer.echo("Superuser creation failed.")
|
||||
return
|
||||
|
||||
typer.echo("Superuser created successfully.")
|
||||
|
||||
else:
|
||||
typer.echo("Superuser creation failed.")
|
||||
|
||||
|
||||
@app.command()
|
||||
def migration(test: bool = typer.Option(False, help="Run migrations in test mode.")):
|
||||
initialize_services()
|
||||
db_manager = get_db_manager()
|
||||
if not test:
|
||||
db_manager.run_migrations()
|
||||
results = db_manager.run_migrations_test()
|
||||
display_results(results)
|
||||
|
||||
|
||||
def main():
|
||||
app()
|
||||
|
||||
|
|
|
|||
|
|
@ -46,6 +46,7 @@ def run_migrations_offline() -> None:
|
|||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
render_as_batch=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
|
|
@ -66,7 +67,9 @@ def run_migrations_online() -> None:
|
|||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata, render_as_batch=True
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
|
|
|||
|
|
@ -1,42 +0,0 @@
|
|||
"""Remove FlowStyles table
|
||||
|
||||
Revision ID: 0a534bdfd84b
|
||||
Revises: 4814b6f4abfd
|
||||
Create Date: 2023-08-07 14:09:06.844104
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "0a534bdfd84b"
|
||||
down_revision: Union[str, None] = "4814b6f4abfd"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table("flowstyle")
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"flowstyle",
|
||||
sa.Column("color", sa.VARCHAR(), nullable=False),
|
||||
sa.Column("emoji", sa.VARCHAR(), nullable=False),
|
||||
sa.Column("flow_id", sa.CHAR(length=32), nullable=True),
|
||||
sa.Column("id", sa.CHAR(length=32), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["flow_id"],
|
||||
["flow.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("id"),
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
|
@ -0,0 +1,177 @@
|
|||
"""Adds tables
|
||||
|
||||
Revision ID: 260dbcc8b680
|
||||
Revises:
|
||||
Create Date: 2023-08-27 19:49:02.681355
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import sqlmodel
|
||||
from sqlalchemy.engine.reflection import Inspector
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "260dbcc8b680"
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn)
|
||||
# List existing tables
|
||||
existing_tables = inspector.get_table_names()
|
||||
# Drop 'flowstyle' table if it exists
|
||||
# and other related indices
|
||||
if "flowstyle" in existing_tables:
|
||||
op.drop_table("flowstyle")
|
||||
if "ix_flowstyle_flow_id" in [
|
||||
index["name"] for index in inspector.get_indexes("flowstyle")
|
||||
]:
|
||||
op.drop_index("ix_flowstyle_flow_id", table_name="flowstyle")
|
||||
|
||||
existing_indices_flow = []
|
||||
existing_fks_flow = []
|
||||
if "flow" in existing_tables:
|
||||
existing_indices_flow = [
|
||||
index["name"] for index in inspector.get_indexes("flow")
|
||||
]
|
||||
# Existing foreign keys for the 'flow' table, if it exists
|
||||
existing_fks_flow = [
|
||||
fk["referred_table"] + "." + fk["referred_columns"][0]
|
||||
for fk in inspector.get_foreign_keys("flow")
|
||||
]
|
||||
# Now check if the columns user_id exists in the 'flow' table
|
||||
# If it does not exist, we need to create the foreign key
|
||||
|
||||
if "user" not in existing_tables:
|
||||
op.create_table(
|
||||
"user",
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("username", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("password", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("is_active", sa.Boolean(), nullable=False),
|
||||
sa.Column("is_superuser", sa.Boolean(), nullable=False),
|
||||
sa.Column("create_at", sa.DateTime(), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(), nullable=False),
|
||||
sa.Column("last_login_at", sa.DateTime(), nullable=True),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("id"),
|
||||
)
|
||||
with op.batch_alter_table("user", schema=None) as batch_op:
|
||||
batch_op.create_index(
|
||||
batch_op.f("ix_user_username"), ["username"], unique=True
|
||||
)
|
||||
|
||||
if "apikey" not in existing_tables:
|
||||
op.create_table(
|
||||
"apikey",
|
||||
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||
sa.Column("last_used_at", sa.DateTime(), nullable=True),
|
||||
sa.Column("total_uses", sa.Integer(), nullable=False, default=0),
|
||||
sa.Column("is_active", sa.Boolean(), nullable=False, default=True),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("api_key", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["user_id"],
|
||||
["user.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("id"),
|
||||
)
|
||||
with op.batch_alter_table("apikey", schema=None) as batch_op:
|
||||
batch_op.create_index(
|
||||
batch_op.f("ix_apikey_api_key"), ["api_key"], unique=True
|
||||
)
|
||||
batch_op.create_index(batch_op.f("ix_apikey_name"), ["name"], unique=False)
|
||||
batch_op.create_index(
|
||||
batch_op.f("ix_apikey_user_id"), ["user_id"], unique=False
|
||||
)
|
||||
if "flow" not in existing_tables:
|
||||
op.create_table(
|
||||
"flow",
|
||||
sa.Column("data", sa.JSON(), nullable=True),
|
||||
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.Column("user_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["user_id"],
|
||||
["user.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("id"),
|
||||
)
|
||||
# Conditionally create indices for 'flow' table
|
||||
# if _alembic_tmp_flow exists, then we need to drop it first
|
||||
# This is to deal with SQLite not being able to ROLLBACK
|
||||
# for some unknown reason
|
||||
if "_alembic_tmp_flow" in existing_tables:
|
||||
op.drop_table("_alembic_tmp_flow")
|
||||
with op.batch_alter_table("flow", schema=None) as batch_op:
|
||||
flow_columns = [col["name"] for col in inspector.get_columns("flow")]
|
||||
if "user_id" not in flow_columns:
|
||||
batch_op.add_column(
|
||||
sa.Column(
|
||||
"user_id",
|
||||
sqlmodel.sql.sqltypes.GUID(),
|
||||
nullable=True, # This should be False, but we need to allow NULL values for now
|
||||
)
|
||||
)
|
||||
if "user.id" not in existing_fks_flow:
|
||||
batch_op.create_foreign_key("fk_flow_user_id", "user", ["user_id"], ["id"])
|
||||
if "ix_flow_description" not in existing_indices_flow:
|
||||
batch_op.create_index(
|
||||
batch_op.f("ix_flow_description"), ["description"], unique=False
|
||||
)
|
||||
if "ix_flow_name" not in existing_indices_flow:
|
||||
batch_op.create_index(batch_op.f("ix_flow_name"), ["name"], unique=False)
|
||||
with op.batch_alter_table("flow", schema=None) as batch_op:
|
||||
if "ix_flow_user_id" not in existing_indices_flow:
|
||||
batch_op.create_index(
|
||||
batch_op.f("ix_flow_user_id"), ["user_id"], unique=False
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
||||
conn = op.get_bind()
|
||||
inspector = Inspector.from_engine(conn)
|
||||
# List existing tables
|
||||
existing_tables = inspector.get_table_names()
|
||||
if "flow" in existing_tables:
|
||||
with op.batch_alter_table("flow", schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f("ix_flow_user_id"))
|
||||
batch_op.drop_index(batch_op.f("ix_flow_name"))
|
||||
batch_op.drop_index(batch_op.f("ix_flow_description"))
|
||||
|
||||
op.drop_table("flow")
|
||||
if "apikey" in existing_tables:
|
||||
with op.batch_alter_table("apikey", schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f("ix_apikey_user_id"))
|
||||
batch_op.drop_index(batch_op.f("ix_apikey_name"))
|
||||
batch_op.drop_index(batch_op.f("ix_apikey_api_key"))
|
||||
|
||||
op.drop_table("apikey")
|
||||
if "user" in existing_tables:
|
||||
with op.batch_alter_table("user", schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f("ix_user_username"))
|
||||
|
||||
op.drop_table("user")
|
||||
|
||||
if "flowstyle" in existing_tables:
|
||||
op.drop_table("flowstyle")
|
||||
|
||||
if "component" in existing_tables:
|
||||
op.drop_table("component")
|
||||
# ### end Alembic commands ###
|
||||
|
|
@ -1,65 +0,0 @@
|
|||
"""Add Flow table
|
||||
|
||||
Revision ID: 4814b6f4abfd
|
||||
Revises:
|
||||
Create Date: 2023-08-05 17:47:42.879824
|
||||
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import sqlmodel
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "4814b6f4abfd"
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
||||
# This suppress is used to not break the migration if the table already exists.
|
||||
with contextlib.suppress(sa.exc.OperationalError):
|
||||
op.create_table(
|
||||
"flow",
|
||||
sa.Column("data", sa.JSON(), nullable=True),
|
||||
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("id"),
|
||||
)
|
||||
op.create_index(
|
||||
op.f("ix_flow_description"), "flow", ["description"], unique=False
|
||||
)
|
||||
op.create_index(op.f("ix_flow_name"), "flow", ["name"], unique=False)
|
||||
with contextlib.suppress(sa.exc.OperationalError):
|
||||
op.create_table(
|
||||
"flowstyle",
|
||||
sa.Column("color", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("emoji", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||
sa.Column("flow_id", sqlmodel.sql.sqltypes.GUID(), nullable=True),
|
||||
sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["flow_id"],
|
||||
["flow.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("id"),
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table("flowstyle")
|
||||
op.drop_index(op.f("ix_flow_name"), table_name="flow")
|
||||
op.drop_index(op.f("ix_flow_description"), table_name="flow")
|
||||
op.drop_table("flow")
|
||||
# ### end Alembic commands ###
|
||||
|
|
@ -6,6 +6,9 @@ from langflow.api.v1 import (
|
|||
validate_router,
|
||||
flows_router,
|
||||
component_router,
|
||||
users_router,
|
||||
api_key_router,
|
||||
login_router,
|
||||
)
|
||||
|
||||
router = APIRouter(
|
||||
|
|
@ -16,3 +19,6 @@ router.include_router(endpoints_router)
|
|||
router.include_router(validate_router)
|
||||
router.include_router(component_router)
|
||||
router.include_router(flows_router)
|
||||
router.include_router(users_router)
|
||||
router.include_router(api_key_router)
|
||||
router.include_router(login_router)
|
||||
|
|
|
|||
|
|
@ -3,6 +3,9 @@ from langflow.api.v1.validate import router as validate_router
|
|||
from langflow.api.v1.chat import router as chat_router
|
||||
from langflow.api.v1.flows import router as flows_router
|
||||
from langflow.api.v1.components import router as component_router
|
||||
from langflow.api.v1.users import router as users_router
|
||||
from langflow.api.v1.api_key import router as api_key_router
|
||||
from langflow.api.v1.login import router as login_router
|
||||
|
||||
__all__ = [
|
||||
"chat_router",
|
||||
|
|
@ -10,4 +13,7 @@ __all__ = [
|
|||
"component_router",
|
||||
"validate_router",
|
||||
"flows_router",
|
||||
"users_router",
|
||||
"api_key_router",
|
||||
"login_router",
|
||||
]
|
||||
|
|
|
|||
61
src/backend/langflow/api/v1/api_key.py
Normal file
61
src/backend/langflow/api/v1/api_key.py
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
from uuid import UUID
|
||||
from fastapi import APIRouter, HTTPException, Depends
|
||||
from langflow.api.v1.schemas import ApiKeysResponse
|
||||
from langflow.services.auth.utils import get_current_active_user
|
||||
from langflow.services.database.models.api_key.api_key import (
|
||||
ApiKeyCreate,
|
||||
UnmaskedApiKeyRead,
|
||||
)
|
||||
|
||||
# Assuming you have these methods in your service layer
|
||||
from langflow.services.database.models.api_key.crud import (
|
||||
get_api_keys,
|
||||
create_api_key,
|
||||
delete_api_key,
|
||||
)
|
||||
from langflow.services.database.models.user.user import User
|
||||
from langflow.services.utils import get_session
|
||||
from sqlmodel import Session
|
||||
|
||||
|
||||
router = APIRouter(tags=["APIKey"], prefix="/api_key")
|
||||
|
||||
|
||||
@router.get("/", response_model=ApiKeysResponse)
|
||||
def get_api_keys_route(
|
||||
db: Session = Depends(get_session),
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
try:
|
||||
user_id = current_user.id
|
||||
keys = get_api_keys(db, user_id)
|
||||
|
||||
return ApiKeysResponse(total_count=len(keys), user_id=user_id, api_keys=keys)
|
||||
except Exception as exc:
|
||||
raise HTTPException(status_code=400, detail=str(exc)) from exc
|
||||
|
||||
|
||||
@router.post("/", response_model=UnmaskedApiKeyRead)
|
||||
def create_api_key_route(
|
||||
req: ApiKeyCreate,
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
db: Session = Depends(get_session),
|
||||
):
|
||||
try:
|
||||
user_id = current_user.id
|
||||
return create_api_key(db, req, user_id=user_id)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e)) from e
|
||||
|
||||
|
||||
@router.delete("/{api_key_id}")
|
||||
def delete_api_key_route(
|
||||
api_key_id: UUID,
|
||||
current_user=Depends(get_current_active_user),
|
||||
db: Session = Depends(get_session),
|
||||
):
|
||||
try:
|
||||
delete_api_key(db, api_key_id)
|
||||
return {"detail": "API Key deleted"}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e)) from e
|
||||
|
|
@ -1,12 +1,23 @@
|
|||
from fastapi import APIRouter, HTTPException, WebSocket, WebSocketException, status
|
||||
from fastapi import (
|
||||
APIRouter,
|
||||
Depends,
|
||||
HTTPException,
|
||||
Query,
|
||||
WebSocket,
|
||||
WebSocketException,
|
||||
status,
|
||||
)
|
||||
from fastapi.responses import StreamingResponse
|
||||
from langflow.api.utils import build_input_keys_response
|
||||
from langflow.api.v1.schemas import BuildStatus, BuiltResponse, InitResponse, StreamData
|
||||
|
||||
from langflow.services import service_manager, ServiceType
|
||||
from langflow.graph.graph.base import Graph
|
||||
from langflow.services.auth.utils import get_current_active_user, get_current_user
|
||||
from langflow.services.utils import get_session
|
||||
from langflow.utils.logger import logger
|
||||
from cachetools import LRUCache
|
||||
from sqlmodel import Session
|
||||
|
||||
router = APIRouter(tags=["Chat"])
|
||||
|
||||
|
|
@ -14,9 +25,17 @@ flow_data_store: LRUCache = LRUCache(maxsize=10)
|
|||
|
||||
|
||||
@router.websocket("/chat/{client_id}")
|
||||
async def chat(client_id: str, websocket: WebSocket):
|
||||
async def chat(
|
||||
client_id: str,
|
||||
websocket: WebSocket,
|
||||
token: str = Query(...),
|
||||
db: Session = Depends(get_session),
|
||||
):
|
||||
"""Websocket endpoint for chat."""
|
||||
try:
|
||||
user = await get_current_user(token, db)
|
||||
if not user.is_active:
|
||||
raise HTTPException(status_code=401, detail="Invalid token")
|
||||
chat_manager = service_manager.get(ServiceType.CHAT_MANAGER)
|
||||
if client_id in chat_manager.in_memory_cache:
|
||||
await chat_manager.handle_websocket(client_id, websocket)
|
||||
|
|
@ -32,7 +51,9 @@ async def chat(client_id: str, websocket: WebSocket):
|
|||
|
||||
|
||||
@router.post("/build/init/{flow_id}", response_model=InitResponse, status_code=201)
|
||||
async def init_build(graph_data: dict, flow_id: str):
|
||||
async def init_build(
|
||||
graph_data: dict, flow_id: str, current_user=Depends(get_current_active_user)
|
||||
):
|
||||
"""Initialize the build by storing graph data and returning a unique session ID."""
|
||||
|
||||
try:
|
||||
|
|
@ -54,6 +75,7 @@ async def init_build(graph_data: dict, flow_id: str):
|
|||
flow_data_store[flow_id] = {
|
||||
"graph_data": graph_data,
|
||||
"status": BuildStatus.STARTED,
|
||||
"user_id": current_user.id,
|
||||
}
|
||||
|
||||
return InitResponse(flowId=flow_id)
|
||||
|
|
@ -99,6 +121,7 @@ async def stream_build(flow_id: str):
|
|||
return
|
||||
|
||||
graph_data = flow_data_store[flow_id].get("graph_data")
|
||||
user_id = flow_data_store[flow_id]["user_id"]
|
||||
|
||||
if not graph_data:
|
||||
error_message = "No data provided"
|
||||
|
|
@ -119,7 +142,7 @@ async def stream_build(flow_id: str):
|
|||
"log": f"Building node {vertex.vertex_type}",
|
||||
}
|
||||
yield str(StreamData(event="log", data=log_dict))
|
||||
vertex.build()
|
||||
vertex.build(user_id)
|
||||
params = vertex._built_object_repr()
|
||||
valid = True
|
||||
logger.debug(f"Building node {str(vertex.vertex_type)}")
|
||||
|
|
|
|||
|
|
@ -1,13 +1,15 @@
|
|||
from http import HTTPStatus
|
||||
from typing import Annotated, Optional, Union
|
||||
from typing import Annotated, Any, Optional, Union
|
||||
from langflow.services.auth.utils import api_key_security, get_current_active_user
|
||||
|
||||
from langflow.services.cache.utils import save_uploaded_file
|
||||
from langflow.services.database.models.flow import Flow
|
||||
from langflow.processing.process import process_graph_cached, process_tweaks
|
||||
from langflow.services.database.models.user.user import User
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from langflow.utils.logger import logger
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, Body
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, Body, status
|
||||
import sqlalchemy as sa
|
||||
from langflow.interface.custom.custom_component import CustomComponent
|
||||
|
||||
|
||||
|
|
@ -33,12 +35,12 @@ router = APIRouter(tags=["Base"])
|
|||
|
||||
|
||||
@router.get("/all")
|
||||
def get_all():
|
||||
def get_all(current_user: User = Depends(get_current_active_user)):
|
||||
logger.debug("Building langchain types dict")
|
||||
native_components = build_langchain_types_dict()
|
||||
# custom_components is a list of dicts
|
||||
# need to merge all the keys into one dict
|
||||
custom_components_from_file = {}
|
||||
custom_components_from_file: dict[str, Any] = {}
|
||||
settings_manager = get_settings_manager()
|
||||
if settings_manager.settings.COMPONENTS_PATH:
|
||||
logger.info(
|
||||
|
|
@ -58,8 +60,12 @@ def get_all():
|
|||
|
||||
logger.info(f"Loading {len(custom_component_dicts)} category(ies)")
|
||||
for custom_component_dict in custom_component_dicts:
|
||||
logger.debug(
|
||||
{key: len(value) for key, value in custom_component_dict.items()}
|
||||
# custom_component_dict is a dict of dicts
|
||||
if not custom_component_dict:
|
||||
continue
|
||||
category = list(custom_component_dict.keys())[0]
|
||||
logger.info(
|
||||
f"Loading {len(custom_component_dict[category])} component(s) from category {category}"
|
||||
)
|
||||
custom_components_from_file = merge_nested_dicts_with_renaming(
|
||||
custom_components_from_file, custom_component_dict
|
||||
|
|
@ -71,22 +77,42 @@ def get_all():
|
|||
|
||||
|
||||
# For backwards compatibility we will keep the old endpoint
|
||||
@router.post("/predict/{flow_id}", response_model=ProcessResponse)
|
||||
@router.post("/process/{flow_id}", response_model=ProcessResponse)
|
||||
@router.post(
|
||||
"/predict/{flow_id}",
|
||||
response_model=ProcessResponse,
|
||||
dependencies=[Depends(api_key_security)],
|
||||
)
|
||||
@router.post(
|
||||
"/process/{flow_id}",
|
||||
response_model=ProcessResponse,
|
||||
)
|
||||
async def process_flow(
|
||||
session: Annotated[Session, Depends(get_session)],
|
||||
flow_id: str,
|
||||
inputs: Optional[dict] = None,
|
||||
tweaks: Optional[dict] = None,
|
||||
clear_cache: Annotated[bool, Body(embed=True)] = False, # noqa: F821
|
||||
session_id: Annotated[Union[None, str], Body(embed=True)] = None, # noqa: F821
|
||||
session: Session = Depends(get_session),
|
||||
api_key_user: User = Depends(api_key_security),
|
||||
):
|
||||
"""
|
||||
Endpoint to process an input with a given flow_id.
|
||||
"""
|
||||
|
||||
try:
|
||||
flow = session.get(Flow, flow_id)
|
||||
if api_key_user is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid API Key",
|
||||
)
|
||||
|
||||
# Get the flow that matches the flow_id and belongs to the user
|
||||
flow = (
|
||||
session.query(Flow)
|
||||
.filter(Flow.id == flow_id)
|
||||
.filter(Flow.user_id == api_key_user.id)
|
||||
.first()
|
||||
)
|
||||
if flow is None:
|
||||
raise ValueError(f"Flow {flow_id} not found")
|
||||
|
||||
|
|
@ -102,6 +128,22 @@ async def process_flow(
|
|||
graph_data, inputs, clear_cache, session_id
|
||||
)
|
||||
return ProcessResponse(result=response, session_id=session_id)
|
||||
except sa.exc.StatementError as exc:
|
||||
# StatementError('(builtins.ValueError) badly formed hexadecimal UUID string')
|
||||
if "badly formed hexadecimal UUID string" in str(exc):
|
||||
# This means the Flow ID is not a valid UUID which means it can't find the flow
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)
|
||||
) from exc
|
||||
except ValueError as exc:
|
||||
if f"Flow {flow_id} not found" in str(exc):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)
|
||||
) from exc
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc)
|
||||
) from exc
|
||||
except Exception as e:
|
||||
# Log stack trace
|
||||
logger.exception(e)
|
||||
|
|
|
|||
|
|
@ -4,16 +4,18 @@ from fastapi.encoders import jsonable_encoder
|
|||
|
||||
from langflow.api.utils import remove_api_keys
|
||||
from langflow.api.v1.schemas import FlowListCreate, FlowListRead
|
||||
from langflow.services.auth.utils import get_current_active_user
|
||||
from langflow.services.database.models.flow import (
|
||||
Flow,
|
||||
FlowCreate,
|
||||
FlowRead,
|
||||
FlowUpdate,
|
||||
)
|
||||
from langflow.services.database.models.user.user import User
|
||||
from langflow.services.utils import get_session
|
||||
from langflow.services.utils import get_settings_manager
|
||||
import orjson
|
||||
from sqlmodel import Session, select
|
||||
from sqlmodel import Session
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from fastapi import File, UploadFile
|
||||
|
|
@ -23,9 +25,18 @@ router = APIRouter(prefix="/flows", tags=["Flows"])
|
|||
|
||||
|
||||
@router.post("/", response_model=FlowRead, status_code=201)
|
||||
def create_flow(*, session: Session = Depends(get_session), flow: FlowCreate):
|
||||
def create_flow(
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
flow: FlowCreate,
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Create a new flow."""
|
||||
if flow.user_id is None:
|
||||
flow.user_id = current_user.id
|
||||
|
||||
db_flow = Flow.from_orm(flow)
|
||||
|
||||
session.add(db_flow)
|
||||
session.commit()
|
||||
session.refresh(db_flow)
|
||||
|
|
@ -33,31 +44,49 @@ def create_flow(*, session: Session = Depends(get_session), flow: FlowCreate):
|
|||
|
||||
|
||||
@router.get("/", response_model=list[FlowRead], status_code=200)
|
||||
def read_flows(*, session: Session = Depends(get_session)):
|
||||
def read_flows(
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Read all flows."""
|
||||
try:
|
||||
flows = session.exec(select(Flow)).all()
|
||||
flows = current_user.flows
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e)) from e
|
||||
return [jsonable_encoder(flow) for flow in flows]
|
||||
|
||||
|
||||
@router.get("/{flow_id}", response_model=FlowRead, status_code=200)
|
||||
def read_flow(*, session: Session = Depends(get_session), flow_id: UUID):
|
||||
def read_flow(
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
flow_id: UUID,
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Read a flow."""
|
||||
if flow := session.get(Flow, flow_id):
|
||||
return flow
|
||||
if user_flow := (
|
||||
session.query(Flow)
|
||||
.filter(Flow.id == flow_id)
|
||||
.filter(Flow.user_id == current_user.id)
|
||||
.first()
|
||||
):
|
||||
return user_flow
|
||||
else:
|
||||
raise HTTPException(status_code=404, detail="Flow not found")
|
||||
|
||||
|
||||
@router.patch("/{flow_id}", response_model=FlowRead, status_code=200)
|
||||
def update_flow(
|
||||
*, session: Session = Depends(get_session), flow_id: UUID, flow: FlowUpdate
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
flow_id: UUID,
|
||||
flow: FlowUpdate,
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Update a flow."""
|
||||
|
||||
db_flow = session.get(Flow, flow_id)
|
||||
db_flow = read_flow(session=session, flow_id=flow_id, current_user=current_user)
|
||||
if not db_flow:
|
||||
raise HTTPException(status_code=404, detail="Flow not found")
|
||||
flow_data = flow.dict(exclude_unset=True)
|
||||
|
|
@ -65,7 +94,8 @@ def update_flow(
|
|||
if settings_manager.settings.REMOVE_API_KEYS:
|
||||
flow_data = remove_api_keys(flow_data)
|
||||
for key, value in flow_data.items():
|
||||
setattr(db_flow, key, value)
|
||||
if value is not None:
|
||||
setattr(db_flow, key, value)
|
||||
session.add(db_flow)
|
||||
session.commit()
|
||||
session.refresh(db_flow)
|
||||
|
|
@ -73,9 +103,14 @@ def update_flow(
|
|||
|
||||
|
||||
@router.delete("/{flow_id}", status_code=200)
|
||||
def delete_flow(*, session: Session = Depends(get_session), flow_id: UUID):
|
||||
def delete_flow(
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
flow_id: UUID,
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Delete a flow."""
|
||||
flow = session.get(Flow, flow_id)
|
||||
flow = read_flow(session=session, flow_id=flow_id, current_user=current_user)
|
||||
if not flow:
|
||||
raise HTTPException(status_code=404, detail="Flow not found")
|
||||
session.delete(flow)
|
||||
|
|
@ -87,10 +122,16 @@ def delete_flow(*, session: Session = Depends(get_session), flow_id: UUID):
|
|||
|
||||
|
||||
@router.post("/batch/", response_model=List[FlowRead], status_code=201)
|
||||
def create_flows(*, session: Session = Depends(get_session), flow_list: FlowListCreate):
|
||||
def create_flows(
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
flow_list: FlowListCreate,
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Create multiple new flows."""
|
||||
db_flows = []
|
||||
for flow in flow_list.flows:
|
||||
flow.user_id = current_user.id
|
||||
db_flow = Flow.from_orm(flow)
|
||||
session.add(db_flow)
|
||||
db_flows.append(db_flow)
|
||||
|
|
@ -102,7 +143,10 @@ def create_flows(*, session: Session = Depends(get_session), flow_list: FlowList
|
|||
|
||||
@router.post("/upload/", response_model=List[FlowRead], status_code=201)
|
||||
async def upload_file(
|
||||
*, session: Session = Depends(get_session), file: UploadFile = File(...)
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
file: UploadFile = File(...),
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Upload flows from a file."""
|
||||
contents = await file.read()
|
||||
|
|
@ -111,11 +155,19 @@ async def upload_file(
|
|||
flow_list = FlowListCreate(**data)
|
||||
else:
|
||||
flow_list = FlowListCreate(flows=[FlowCreate(**flow) for flow in data])
|
||||
return create_flows(session=session, flow_list=flow_list)
|
||||
# Now we set the user_id for all flows
|
||||
for flow in flow_list.flows:
|
||||
flow.user_id = current_user.id
|
||||
|
||||
return create_flows(session=session, flow_list=flow_list, current_user=current_user)
|
||||
|
||||
|
||||
@router.get("/download/", response_model=FlowListRead, status_code=200)
|
||||
async def download_file(*, session: Session = Depends(get_session)):
|
||||
async def download_file(
|
||||
*,
|
||||
session: Session = Depends(get_session),
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
):
|
||||
"""Download all flows as a file."""
|
||||
flows = read_flows(session=session)
|
||||
flows = read_flows(session=session, current_user=current_user)
|
||||
return FlowListRead(flows=flows)
|
||||
|
|
|
|||
|
|
@ -1,20 +1,20 @@
|
|||
from uuid import UUID
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlmodel import Session
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from fastapi.security import OAuth2PasswordRequestForm
|
||||
|
||||
from langflow.services.utils import get_session
|
||||
from langflow.database.models.token import Token
|
||||
from langflow.auth.auth import (
|
||||
from langflow.api.v1.schemas import Token
|
||||
from langflow.services.auth.utils import (
|
||||
authenticate_user,
|
||||
create_user_tokens,
|
||||
create_refresh_token,
|
||||
create_user_longterm_token,
|
||||
get_current_active_user,
|
||||
)
|
||||
|
||||
from langflow.services.utils import get_settings_manager
|
||||
|
||||
router = APIRouter()
|
||||
router = APIRouter(tags=["Login"])
|
||||
|
||||
|
||||
@router.post("/login", response_model=Token)
|
||||
|
|
@ -37,9 +37,8 @@ async def login_to_get_access_token(
|
|||
async def auto_login(db: Session = Depends(get_session)):
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
if settings_manager.settings.AUTO_LOGIN:
|
||||
user_id = UUID("3fa85f64-5717-4562-b3fc-2c963f66afa6")
|
||||
return create_user_longterm_token(user_id, db)
|
||||
if settings_manager.auth_settings.AUTO_LOGIN:
|
||||
return create_user_longterm_token(db)
|
||||
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
|
|
@ -51,7 +50,9 @@ async def auto_login(db: Session = Depends(get_session)):
|
|||
|
||||
|
||||
@router.post("/refresh")
|
||||
async def refresh_token(token: str):
|
||||
async def refresh_token(
|
||||
token: str, current_user: Session = Depends(get_current_active_user)
|
||||
):
|
||||
if token:
|
||||
return create_refresh_token(token)
|
||||
else:
|
||||
|
|
@ -1,7 +1,10 @@
|
|||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
from uuid import UUID
|
||||
from langflow.services.database.models.api_key.api_key import ApiKeyRead
|
||||
from langflow.services.database.models.flow import FlowCreate, FlowRead
|
||||
from langflow.services.database.models.user import UserRead
|
||||
from langflow.services.database.models.base import orjson_dumps
|
||||
|
||||
from pydantic import BaseModel, Field, validator
|
||||
|
|
@ -137,3 +140,32 @@ class ComponentListCreate(BaseModel):
|
|||
|
||||
class ComponentListRead(BaseModel):
|
||||
flows: List[FlowRead]
|
||||
|
||||
|
||||
class UsersResponse(BaseModel):
|
||||
total_count: int
|
||||
users: List[UserRead]
|
||||
|
||||
|
||||
class ApiKeyResponse(BaseModel):
|
||||
id: str
|
||||
api_key: str
|
||||
name: str
|
||||
created_at: str
|
||||
last_used_at: str
|
||||
|
||||
|
||||
class ApiKeysResponse(BaseModel):
|
||||
total_count: int
|
||||
user_id: UUID
|
||||
api_keys: List[ApiKeyRead]
|
||||
|
||||
|
||||
class CreateApiKeyRequest(BaseModel):
|
||||
name: str
|
||||
|
||||
|
||||
class Token(BaseModel):
|
||||
access_token: str
|
||||
refresh_token: str
|
||||
token_type: str
|
||||
|
|
|
|||
|
|
@ -1,4 +1,11 @@
|
|||
from uuid import UUID
|
||||
from langflow.api.v1.schemas import UsersResponse
|
||||
from langflow.services.database.models.user import (
|
||||
User,
|
||||
UserCreate,
|
||||
UserRead,
|
||||
UserUpdate,
|
||||
)
|
||||
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
|
@ -7,28 +14,27 @@ from sqlmodel import Session, select
|
|||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
||||
from langflow.services.utils import get_session
|
||||
from langflow.auth.auth import get_current_active_user, get_password_hash
|
||||
from langflow.database.models.user import (
|
||||
User,
|
||||
UserAddModel,
|
||||
UserListModel,
|
||||
UserPatchModel,
|
||||
UsersResponse,
|
||||
from langflow.services.auth.utils import (
|
||||
get_current_active_superuser,
|
||||
get_current_active_user,
|
||||
get_password_hash,
|
||||
)
|
||||
from langflow.services.database.models.user.crud import (
|
||||
update_user,
|
||||
)
|
||||
|
||||
router = APIRouter(tags=["Login"])
|
||||
router = APIRouter(tags=["Users"])
|
||||
|
||||
|
||||
@router.post("/user", response_model=UserListModel)
|
||||
@router.post("/user", response_model=UserRead, status_code=201)
|
||||
def add_user(
|
||||
user: UserAddModel,
|
||||
user: UserCreate,
|
||||
db: Session = Depends(get_session),
|
||||
) -> User:
|
||||
"""
|
||||
Add a new user to the database.
|
||||
"""
|
||||
new_user = User(**user.dict())
|
||||
new_user = User.from_orm(user)
|
||||
try:
|
||||
new_user.password = get_password_hash(user.password)
|
||||
|
||||
|
|
@ -37,13 +43,15 @@ def add_user(
|
|||
db.refresh(new_user)
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=400, detail="User exists") from e
|
||||
raise HTTPException(status_code=400, detail="This username is unavailable.") from e
|
||||
|
||||
return new_user
|
||||
|
||||
|
||||
@router.get("/user", response_model=UserListModel)
|
||||
def read_current_user(current_user: User = Depends(get_current_active_user)) -> User:
|
||||
@router.get("/user", response_model=UserRead)
|
||||
def read_current_user(
|
||||
current_user: User = Depends(get_current_active_user),
|
||||
) -> User:
|
||||
"""
|
||||
Retrieve the current user's data.
|
||||
"""
|
||||
|
|
@ -54,7 +62,7 @@ def read_current_user(current_user: User = Depends(get_current_active_user)) ->
|
|||
def read_all_users(
|
||||
skip: int = 0,
|
||||
limit: int = 10,
|
||||
_: Session = Depends(get_current_active_user),
|
||||
current_user: Session = Depends(get_current_active_superuser),
|
||||
db: Session = Depends(get_session),
|
||||
) -> UsersResponse:
|
||||
"""
|
||||
|
|
@ -68,14 +76,14 @@ def read_all_users(
|
|||
|
||||
return UsersResponse(
|
||||
total_count=total_count, # type: ignore
|
||||
users=[UserListModel(**dict(user.User)) for user in users],
|
||||
users=[UserRead(**dict(user.User)) for user in users],
|
||||
)
|
||||
|
||||
|
||||
@router.patch("/user/{user_id}", response_model=UserListModel)
|
||||
@router.patch("/user/{user_id}", response_model=UserRead)
|
||||
def patch_user(
|
||||
user_id: UUID,
|
||||
user: UserPatchModel,
|
||||
user: UserUpdate,
|
||||
_: Session = Depends(get_current_active_user),
|
||||
db: Session = Depends(get_session),
|
||||
) -> User:
|
||||
|
|
@ -88,12 +96,21 @@ def patch_user(
|
|||
@router.delete("/user/{user_id}")
|
||||
def delete_user(
|
||||
user_id: UUID,
|
||||
_: Session = Depends(get_current_active_user),
|
||||
current_user: User = Depends(get_current_active_superuser),
|
||||
db: Session = Depends(get_session),
|
||||
) -> dict:
|
||||
"""
|
||||
Delete a user from the database.
|
||||
"""
|
||||
if current_user.id == user_id:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="You can't delete your own user account"
|
||||
)
|
||||
elif not current_user.is_superuser:
|
||||
raise HTTPException(
|
||||
status_code=403, detail="You don't have the permission to delete this user"
|
||||
)
|
||||
|
||||
user_db = db.query(User).filter(User.id == user_id).first()
|
||||
if not user_db:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
|
@ -115,14 +132,13 @@ def add_super_user_for_testing_purposes_delete_me_before_merge_into_dev(
|
|||
"""
|
||||
new_user = User(
|
||||
username="superuser",
|
||||
password="12345",
|
||||
password=get_password_hash("12345"),
|
||||
is_active=True,
|
||||
is_superuser=True,
|
||||
last_login_at=None,
|
||||
)
|
||||
|
||||
try:
|
||||
new_user.password = get_password_hash(new_user.password)
|
||||
db.add(new_user)
|
||||
db.commit()
|
||||
db.refresh(new_user)
|
||||
|
|
@ -1,177 +0,0 @@
|
|||
from uuid import UUID
|
||||
from typing import Annotated
|
||||
from jose import JWTError, jwt
|
||||
from sqlalchemy.orm import Session
|
||||
from passlib.context import CryptContext
|
||||
from fastapi.security import OAuth2PasswordBearer
|
||||
from fastapi import Depends, HTTPException, status
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from langflow.services.utils import get_settings_manager
|
||||
|
||||
from langflow.services.utils import get_session
|
||||
from langflow.database.models.user import (
|
||||
User,
|
||||
get_user_by_id,
|
||||
get_user_by_username,
|
||||
update_user_last_login_at,
|
||||
)
|
||||
|
||||
|
||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="login")
|
||||
|
||||
|
||||
async def get_current_user(
|
||||
token: Annotated[str, Depends(oauth2_scheme)], db: Session = Depends(get_session)
|
||||
) -> User:
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
credentials_exception = HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Could not validate credentials",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
token,
|
||||
settings_manager.settings.SECRET_KEY,
|
||||
algorithms=[settings_manager.settings.ALGORITHM],
|
||||
)
|
||||
user_id: UUID = payload.get("sub") # type: ignore
|
||||
token_type: str = payload.get("type") # type: ignore
|
||||
|
||||
if user_id is None or token_type:
|
||||
raise credentials_exception
|
||||
except JWTError as e:
|
||||
raise credentials_exception from e
|
||||
|
||||
user = get_user_by_id(db, user_id) # type: ignore
|
||||
if user is None:
|
||||
raise credentials_exception
|
||||
return user
|
||||
|
||||
|
||||
async def get_current_active_user(
|
||||
current_user: Annotated[User, Depends(get_current_user)]
|
||||
):
|
||||
if not current_user.is_active:
|
||||
raise HTTPException(status_code=400, detail="Inactive user")
|
||||
return current_user
|
||||
|
||||
|
||||
def verify_password(plain_password, hashed_password):
|
||||
return pwd_context.verify(plain_password, hashed_password)
|
||||
|
||||
|
||||
def get_password_hash(password):
|
||||
return pwd_context.hash(password)
|
||||
|
||||
|
||||
def create_token(data: dict, expires_delta: timedelta):
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
to_encode = data.copy()
|
||||
expire = datetime.now(timezone.utc) + expires_delta
|
||||
to_encode["exp"] = expire
|
||||
|
||||
return jwt.encode(
|
||||
to_encode,
|
||||
settings_manager.settings.SECRET_KEY,
|
||||
algorithm=settings_manager.settings.ALGORITHM,
|
||||
)
|
||||
|
||||
|
||||
def create_user_longterm_token(
|
||||
user_id: UUID, db: Session = Depends(get_session), update_last_login: bool = False
|
||||
) -> dict:
|
||||
access_token_expires_longterm = timedelta(days=365)
|
||||
access_token = create_token(
|
||||
data={"sub": str(user_id)},
|
||||
expires_delta=access_token_expires_longterm,
|
||||
)
|
||||
|
||||
# Update: last_login_at
|
||||
if update_last_login:
|
||||
update_user_last_login_at(user_id, db)
|
||||
|
||||
return {
|
||||
"access_token": access_token,
|
||||
"refresh_token": None,
|
||||
"token_type": "bearer",
|
||||
}
|
||||
|
||||
|
||||
def create_user_tokens(
|
||||
user_id: UUID, db: Session = Depends(get_session), update_last_login: bool = False
|
||||
) -> dict:
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
access_token_expires = timedelta(
|
||||
minutes=settings_manager.settings.ACCESS_TOKEN_EXPIRE_MINUTES
|
||||
)
|
||||
access_token = create_token(
|
||||
data={"sub": str(user_id)},
|
||||
expires_delta=access_token_expires,
|
||||
)
|
||||
|
||||
refresh_token_expires = timedelta(
|
||||
minutes=settings_manager.settings.REFRESH_TOKEN_EXPIRE_MINUTES
|
||||
)
|
||||
refresh_token = create_token(
|
||||
data={"sub": str(user_id), "type": "rf"},
|
||||
expires_delta=refresh_token_expires,
|
||||
)
|
||||
|
||||
# Update: last_login_at
|
||||
if update_last_login:
|
||||
update_user_last_login_at(user_id, db)
|
||||
|
||||
return {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
"token_type": "bearer",
|
||||
}
|
||||
|
||||
|
||||
def create_refresh_token(refresh_token: str, db: Session = Depends(get_session)):
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
refresh_token,
|
||||
settings_manager.settings.SECRET_KEY,
|
||||
algorithms=[settings_manager.settings.ALGORITHM],
|
||||
)
|
||||
user_id: UUID = payload.get("sub") # type: ignore
|
||||
token_type: str = payload.get("type") # type: ignore
|
||||
|
||||
if user_id is None or token_type is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid refresh token"
|
||||
)
|
||||
|
||||
return create_user_tokens(user_id, db)
|
||||
|
||||
except JWTError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid refresh token",
|
||||
) from e
|
||||
|
||||
|
||||
def authenticate_user(
|
||||
username: str, password: str, db: Session = Depends(get_session)
|
||||
) -> User | None:
|
||||
user = get_user_by_username(db, username)
|
||||
|
||||
if not user:
|
||||
return None
|
||||
|
||||
if not user.is_active:
|
||||
if not user.last_login_at:
|
||||
raise HTTPException(status_code=400, detail="Waiting for approval")
|
||||
raise HTTPException(status_code=400, detail="Inactive user")
|
||||
|
||||
return user if verify_password(password, user.password) else None
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class Token(BaseModel):
|
||||
access_token: str
|
||||
refresh_token: str
|
||||
token_type: str
|
||||
|
|
@ -1,94 +0,0 @@
|
|||
from sqlmodel import Field
|
||||
from uuid import UUID, uuid4
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional, List
|
||||
from sqlalchemy.orm import Session
|
||||
from datetime import timezone, datetime
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from fastapi import HTTPException, Depends
|
||||
|
||||
from langflow.services.utils import get_session
|
||||
from langflow.services.database.models.base import SQLModelSerializable, SQLModel
|
||||
|
||||
|
||||
class User(SQLModelSerializable, table=True):
|
||||
id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True)
|
||||
username: str = Field(index=True, unique=True)
|
||||
password: str = Field()
|
||||
is_active: bool = Field(default=False)
|
||||
is_superuser: bool = Field(default=False)
|
||||
create_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
updated_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
last_login_at: Optional[datetime] = Field()
|
||||
|
||||
|
||||
class UserAddModel(SQLModel):
|
||||
username: str = Field()
|
||||
password: str = Field()
|
||||
|
||||
|
||||
class UserListModel(SQLModel):
|
||||
id: UUID = Field(default_factory=uuid4)
|
||||
username: str = Field()
|
||||
is_active: bool = Field()
|
||||
is_superuser: bool = Field()
|
||||
create_at: datetime = Field()
|
||||
updated_at: datetime = Field()
|
||||
last_login_at: Optional[datetime] = Field()
|
||||
|
||||
|
||||
class UserPatchModel(SQLModel):
|
||||
username: Optional[str] = Field()
|
||||
is_active: Optional[bool] = Field()
|
||||
is_superuser: Optional[bool] = Field()
|
||||
last_login_at: Optional[datetime] = Field()
|
||||
|
||||
|
||||
class UsersResponse(BaseModel):
|
||||
total_count: int
|
||||
users: List[UserListModel]
|
||||
|
||||
|
||||
def get_user_by_username(db: Session, username: str) -> User:
|
||||
db_user = db.query(User).filter(User.username == username).first()
|
||||
return User.from_orm(db_user) if db_user else None # type: ignore
|
||||
|
||||
|
||||
def get_user_by_id(db: Session, id: UUID) -> User:
|
||||
db_user = db.query(User).filter(User.id == id).first()
|
||||
return User.from_orm(db_user) if db_user else None # type: ignore
|
||||
|
||||
|
||||
def update_user(
|
||||
user_id: UUID, user: UserPatchModel, db: Session = Depends(get_session)
|
||||
) -> User:
|
||||
user_db = get_user_by_username(db, user.username) # type: ignore
|
||||
if user_db and user_db.id != user_id:
|
||||
raise HTTPException(status_code=409, detail="Username already exists")
|
||||
|
||||
user_db = get_user_by_id(db, user_id)
|
||||
if not user_db:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
try:
|
||||
user_data = user.dict(exclude_unset=True)
|
||||
for key, value in user_data.items():
|
||||
setattr(user_db, key, value)
|
||||
|
||||
user_db.updated_at = datetime.now(timezone.utc)
|
||||
user_db = db.merge(user_db)
|
||||
db.commit()
|
||||
if db.identity_key(instance=user_db) is not None:
|
||||
db.refresh(user_db)
|
||||
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=400, detail=str(e)) from e
|
||||
|
||||
return user_db
|
||||
|
||||
|
||||
def update_user_last_login_at(user_id: UUID, db: Session = Depends(get_session)):
|
||||
user_data = UserPatchModel(last_login_at=datetime.now(timezone.utc)) # type: ignore
|
||||
|
||||
return update_user(user_id, user_data, db)
|
||||
|
|
@ -144,7 +144,7 @@ class Graph:
|
|||
|
||||
return list(reversed(sorted_vertices))
|
||||
|
||||
def generator_build(self) -> Generator:
|
||||
def generator_build(self) -> Generator[Vertex, None, None]:
|
||||
"""Builds each vertex in the graph and yields it."""
|
||||
sorted_vertices = self.topological_sort()
|
||||
logger.debug("Sorted vertices: %s", sorted_vertices)
|
||||
|
|
|
|||
|
|
@ -133,13 +133,13 @@ class Vertex:
|
|||
# Add _type to params
|
||||
self.params = params
|
||||
|
||||
def _build(self):
|
||||
def _build(self, user_id=None):
|
||||
"""
|
||||
Initiate the build process.
|
||||
"""
|
||||
logger.debug(f"Building {self.vertex_type}")
|
||||
self._build_each_node_in_params_dict()
|
||||
self._get_and_instantiate_class()
|
||||
self._get_and_instantiate_class(user_id)
|
||||
self._validate_built_object()
|
||||
|
||||
self._built = True
|
||||
|
|
@ -169,23 +169,25 @@ class Vertex:
|
|||
"""
|
||||
return all(self._is_node(node) for node in value)
|
||||
|
||||
def _build_node_and_update_params(self, key, node):
|
||||
def _build_node_and_update_params(self, key, node, user_id=None):
|
||||
"""
|
||||
Builds a given node and updates the params dictionary accordingly.
|
||||
"""
|
||||
result = node.build()
|
||||
result = node.build(user_id)
|
||||
self._handle_func(key, result)
|
||||
if isinstance(result, list):
|
||||
self._extend_params_list_with_result(key, result)
|
||||
self.params[key] = result
|
||||
|
||||
def _build_list_of_nodes_and_update_params(self, key, nodes):
|
||||
def _build_list_of_nodes_and_update_params(
|
||||
self, key, nodes: List["Vertex"], user_id=None
|
||||
):
|
||||
"""
|
||||
Iterates over a list of nodes, builds each and updates the params dictionary.
|
||||
"""
|
||||
self.params[key] = []
|
||||
for node in nodes:
|
||||
built = node.build()
|
||||
built = node.build(user_id)
|
||||
if isinstance(built, list):
|
||||
if key not in self.params:
|
||||
self.params[key] = []
|
||||
|
|
@ -215,7 +217,7 @@ class Vertex:
|
|||
if isinstance(self.params[key], list):
|
||||
self.params[key].extend(result)
|
||||
|
||||
def _get_and_instantiate_class(self):
|
||||
def _get_and_instantiate_class(self, user_id=None):
|
||||
"""
|
||||
Gets the class from a dictionary and instantiates it with the params.
|
||||
"""
|
||||
|
|
@ -226,6 +228,7 @@ class Vertex:
|
|||
node_type=self.vertex_type,
|
||||
base_type=self.base_type,
|
||||
params=self.params,
|
||||
user_id=user_id,
|
||||
)
|
||||
self._update_built_object_and_artifacts(result)
|
||||
except Exception as exc:
|
||||
|
|
@ -255,9 +258,9 @@ class Vertex:
|
|||
|
||||
raise ValueError(message)
|
||||
|
||||
def build(self, force: bool = False) -> Any:
|
||||
def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any:
|
||||
if not self._built or force:
|
||||
self._build()
|
||||
self._build(user_id, *args, **kwargs)
|
||||
|
||||
return self._built_object
|
||||
|
||||
|
|
|
|||
|
|
@ -21,18 +21,18 @@ class AgentVertex(Vertex):
|
|||
elif isinstance(source_node, ChainVertex):
|
||||
self.chains.append(source_node)
|
||||
|
||||
def build(self, force: bool = False) -> Any:
|
||||
def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any:
|
||||
if not self._built or force:
|
||||
self._set_tools_and_chains()
|
||||
# First, build the tools
|
||||
for tool_node in self.tools:
|
||||
tool_node.build()
|
||||
tool_node.build(user_id=user_id)
|
||||
|
||||
# Next, build the chains and the rest
|
||||
for chain_node in self.chains:
|
||||
chain_node.build(tools=self.tools)
|
||||
chain_node.build(tools=self.tools, user_id=user_id)
|
||||
|
||||
self._build()
|
||||
self._build(user_id=user_id)
|
||||
|
||||
return self._built_object
|
||||
|
||||
|
|
@ -49,13 +49,13 @@ class LLMVertex(Vertex):
|
|||
def __init__(self, data: Dict):
|
||||
super().__init__(data, base_type="llms")
|
||||
|
||||
def build(self, force: bool = False) -> Any:
|
||||
def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any:
|
||||
# LLM is different because some models might take up too much memory
|
||||
# or time to load. So we only load them when we need them.ß
|
||||
if self.vertex_type == self.built_node_type:
|
||||
return self.class_built_object
|
||||
if not self._built or force:
|
||||
self._build()
|
||||
self._build(user_id=user_id)
|
||||
self.built_node_type = self.vertex_type
|
||||
self.class_built_object = self._built_object
|
||||
# Avoid deepcopying the LLM
|
||||
|
|
@ -77,11 +77,11 @@ class WrapperVertex(Vertex):
|
|||
def __init__(self, data: Dict):
|
||||
super().__init__(data, base_type="wrappers")
|
||||
|
||||
def build(self, force: bool = False) -> Any:
|
||||
def build(self, force: bool = False, user_id=None, *args, **kwargs) -> Any:
|
||||
if not self._built or force:
|
||||
if "headers" in self.params:
|
||||
self.params["headers"] = ast.literal_eval(self.params["headers"])
|
||||
self._build()
|
||||
self._build(user_id=user_id)
|
||||
return self._built_object
|
||||
|
||||
|
||||
|
|
@ -148,16 +148,19 @@ class ChainVertex(Vertex):
|
|||
def build(
|
||||
self,
|
||||
force: bool = False,
|
||||
tools: Optional[List[Union[ToolkitVertex, ToolVertex]]] = None,
|
||||
user_id=None,
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> Any:
|
||||
if not self._built or force:
|
||||
# Check if the chain requires a PromptVertex
|
||||
for key, value in self.params.items():
|
||||
if isinstance(value, PromptVertex):
|
||||
# Build the PromptVertex, passing the tools if available
|
||||
tools = kwargs.get("tools", None)
|
||||
self.params[key] = value.build(tools=tools, force=force)
|
||||
|
||||
self._build()
|
||||
self._build(user_id=user_id)
|
||||
|
||||
return self._built_object
|
||||
|
||||
|
|
@ -169,7 +172,10 @@ class PromptVertex(Vertex):
|
|||
def build(
|
||||
self,
|
||||
force: bool = False,
|
||||
user_id=None,
|
||||
tools: Optional[List[Union[ToolkitVertex, ToolVertex]]] = None,
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> Any:
|
||||
if not self._built or force:
|
||||
if (
|
||||
|
|
@ -180,7 +186,7 @@ class PromptVertex(Vertex):
|
|||
# Check if it is a ZeroShotPrompt and needs a tool
|
||||
if "ShotPrompt" in self.vertex_type:
|
||||
tools = (
|
||||
[tool_node.build() for tool_node in tools]
|
||||
[tool_node.build(user_id=user_id) for tool_node in tools]
|
||||
if tools is not None
|
||||
else []
|
||||
)
|
||||
|
|
@ -208,7 +214,7 @@ class PromptVertex(Vertex):
|
|||
else:
|
||||
self.params.pop("input_variables", None)
|
||||
|
||||
self._build()
|
||||
self._build(user_id=user_id)
|
||||
return self._built_object
|
||||
|
||||
def _built_object_repr(self):
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
from typing import Any, Callable, List, Optional
|
||||
from typing import Any, Callable, List, Optional, Union
|
||||
from uuid import UUID
|
||||
from fastapi import HTTPException
|
||||
from langflow.interface.custom.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES
|
||||
from langflow.interface.custom.component import Component
|
||||
|
|
@ -22,6 +23,7 @@ class CustomComponent(Component, extra=Extra.allow):
|
|||
function: Optional[Callable] = None
|
||||
return_type_valid_list = list(CUSTOM_COMPONENT_SUPPORTED_TYPES.keys())
|
||||
repr_value: Optional[Any] = ""
|
||||
user_id: Optional[Union[UUID, str]] = None
|
||||
|
||||
def __init__(self, **data):
|
||||
super().__init__(**data)
|
||||
|
|
@ -187,11 +189,16 @@ class CustomComponent(Component, extra=Extra.allow):
|
|||
return build_sorted_vertices_with_caching(graph_data)
|
||||
|
||||
def list_flows(self, *, get_session: Optional[Callable] = None) -> List[Flow]:
|
||||
get_session = get_session or session_getter
|
||||
db_manager = get_db_manager()
|
||||
with get_session(db_manager) as session:
|
||||
flows = session.query(Flow).all()
|
||||
return flows
|
||||
if not self.user_id:
|
||||
raise ValueError("Session is invalid")
|
||||
try:
|
||||
get_session = get_session or session_getter
|
||||
db_manager = get_db_manager()
|
||||
with get_session(db_manager) as session:
|
||||
flows = session.query(Flow).filter(Flow.user_id == self.user_id).all()
|
||||
return flows
|
||||
except Exception as e:
|
||||
raise ValueError("Session is invalid") from e
|
||||
|
||||
def get_flow(
|
||||
self,
|
||||
|
|
@ -207,7 +214,11 @@ class CustomComponent(Component, extra=Extra.allow):
|
|||
if flow_id:
|
||||
flow = session.query(Flow).get(flow_id)
|
||||
elif flow_name:
|
||||
flow = session.query(Flow).filter(Flow.name == flow_name).first()
|
||||
flow = (
|
||||
session.query(Flow)
|
||||
.filter(Flow.name == flow_name)
|
||||
.filter(Flow.user_id == self.user_id)
|
||||
).first()
|
||||
else:
|
||||
raise ValueError("Either flow_name or flow_id must be provided")
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import json
|
||||
import orjson
|
||||
from typing import Any, Callable, Dict, Sequence, Type
|
||||
from typing import Any, Callable, Dict, Sequence, Type, TYPE_CHECKING
|
||||
|
||||
from langchain.agents import agent as agent_module
|
||||
from langchain.agents.agent import AgentExecutor
|
||||
|
|
@ -36,8 +36,13 @@ from langchain.vectorstores.base import VectorStore
|
|||
from langchain.document_loaders.base import BaseLoader
|
||||
from langflow.utils.logger import logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow import CustomComponent
|
||||
|
||||
def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any:
|
||||
|
||||
def instantiate_class(
|
||||
node_type: str, base_type: str, params: Dict, user_id=None
|
||||
) -> Any:
|
||||
"""Instantiate class from module type and key, and params"""
|
||||
params = convert_params_to_sets(params)
|
||||
params = convert_kwargs(params)
|
||||
|
|
@ -48,7 +53,9 @@ def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any:
|
|||
return custom_node(**params)
|
||||
logger.debug(f"Instantiating {node_type} of type {base_type}")
|
||||
class_object = import_by_type(_type=base_type, name=node_type)
|
||||
return instantiate_based_on_type(class_object, base_type, node_type, params)
|
||||
return instantiate_based_on_type(
|
||||
class_object, base_type, node_type, params, user_id=user_id
|
||||
)
|
||||
|
||||
|
||||
def convert_params_to_sets(params):
|
||||
|
|
@ -75,7 +82,7 @@ def convert_kwargs(params):
|
|||
return params
|
||||
|
||||
|
||||
def instantiate_based_on_type(class_object, base_type, node_type, params):
|
||||
def instantiate_based_on_type(class_object, base_type, node_type, params, user_id):
|
||||
if base_type == "agents":
|
||||
return instantiate_agent(node_type, class_object, params)
|
||||
elif base_type == "prompts":
|
||||
|
|
@ -109,19 +116,19 @@ def instantiate_based_on_type(class_object, base_type, node_type, params):
|
|||
elif base_type == "memory":
|
||||
return instantiate_memory(node_type, class_object, params)
|
||||
elif base_type == "custom_components":
|
||||
return instantiate_custom_component(node_type, class_object, params)
|
||||
return instantiate_custom_component(node_type, class_object, params, user_id)
|
||||
elif base_type == "wrappers":
|
||||
return instantiate_wrapper(node_type, class_object, params)
|
||||
else:
|
||||
return class_object(**params)
|
||||
|
||||
|
||||
def instantiate_custom_component(node_type, class_object, params):
|
||||
def instantiate_custom_component(node_type, class_object, params, user_id):
|
||||
# we need to make a copy of the params because we will be
|
||||
# modifying it
|
||||
params_copy = params.copy()
|
||||
class_object = get_function_custom(params_copy.pop("code"))
|
||||
custom_component = class_object()
|
||||
class_object: "CustomComponent" = get_function_custom(params_copy.pop("code"))
|
||||
custom_component = class_object(user_id=user_id)
|
||||
built_object = custom_component.build(**params_copy)
|
||||
return built_object, {"repr": custom_component.custom_repr()}
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import inspect
|
|||
from typing import Dict, Union
|
||||
|
||||
from langchain.agents.tools import Tool
|
||||
from langflow.utils.logger import logger
|
||||
|
||||
|
||||
def get_func_tool_params(func, **kwargs) -> Union[Dict, None]:
|
||||
|
|
@ -57,7 +58,13 @@ def get_func_tool_params(func, **kwargs) -> Union[Dict, None]:
|
|||
|
||||
|
||||
def get_class_tool_params(cls, **kwargs) -> Union[Dict, None]:
|
||||
tree = ast.parse(inspect.getsource(cls))
|
||||
try:
|
||||
tree = ast.parse(inspect.getsource(cls))
|
||||
except IndentationError:
|
||||
logger.error(
|
||||
f"Error parsing class {cls.__name__}. Make sure there are no tabs in the code."
|
||||
)
|
||||
return None
|
||||
|
||||
tool_params = {}
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ from fastapi.responses import FileResponse
|
|||
from fastapi.staticfiles import StaticFiles
|
||||
|
||||
from langflow.api import router
|
||||
from langflow.routers import login, users, health
|
||||
|
||||
|
||||
from langflow.interface.utils import setup_llm_caching
|
||||
from langflow.services.database.utils import initialize_database
|
||||
|
|
@ -31,9 +31,9 @@ def create_app():
|
|||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
app.include_router(login.router)
|
||||
app.include_router(users.router)
|
||||
app.include_router(health.router)
|
||||
@app.get("/health")
|
||||
def health():
|
||||
return {"status": "ok"}
|
||||
|
||||
app.include_router(router)
|
||||
|
||||
|
|
@ -89,7 +89,7 @@ def setup_app(
|
|||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
from langflow.utils.util import get_number_of_workers
|
||||
from langflow.__main__ import get_number_of_workers
|
||||
|
||||
configure()
|
||||
uvicorn.run(
|
||||
|
|
|
|||
|
|
@ -1,8 +0,0 @@
|
|||
from fastapi import APIRouter
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/health")
|
||||
def get_health():
|
||||
return {"status": "OK"}
|
||||
12
src/backend/langflow/services/auth/factory.py
Normal file
12
src/backend/langflow/services/auth/factory.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
from langflow.services.factory import ServiceFactory
|
||||
from langflow.services.auth.service import AuthManager
|
||||
|
||||
|
||||
class AuthManagerFactory(ServiceFactory):
|
||||
name = "auth_manager"
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(AuthManager)
|
||||
|
||||
def create(self, settings_manager):
|
||||
return AuthManager(settings_manager)
|
||||
12
src/backend/langflow/services/auth/service.py
Normal file
12
src/backend/langflow/services/auth/service.py
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
from langflow.services.base import Service
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.settings.manager import SettingsManager
|
||||
|
||||
|
||||
class AuthManager(Service):
|
||||
name = "auth_manager"
|
||||
|
||||
def __init__(self, settings_manager: "SettingsManager"):
|
||||
self.settings_manager = settings_manager
|
||||
283
src/backend/langflow/services/auth/utils.py
Normal file
283
src/backend/langflow/services/auth/utils.py
Normal file
|
|
@ -0,0 +1,283 @@
|
|||
from datetime import datetime, timedelta, timezone
|
||||
from fastapi import Depends, HTTPException, Security, status
|
||||
from fastapi.security import APIKeyHeader, APIKeyQuery, OAuth2PasswordBearer
|
||||
from jose import JWTError, jwt
|
||||
from typing import Annotated, Coroutine, Optional, Union
|
||||
from uuid import UUID
|
||||
from langflow.services.database.models.api_key.api_key import ApiKey
|
||||
from langflow.services.database.models.api_key.crud import check_key
|
||||
from langflow.services.database.models.user.user import User
|
||||
from langflow.services.database.models.user.crud import (
|
||||
get_user_by_id,
|
||||
get_user_by_username,
|
||||
update_user_last_login_at,
|
||||
)
|
||||
from langflow.services.utils import get_session, get_settings_manager
|
||||
from sqlmodel import Session
|
||||
|
||||
oauth2_login = OAuth2PasswordBearer(tokenUrl="api/v1/login")
|
||||
|
||||
API_KEY_NAME = "api-key"
|
||||
|
||||
api_key_query = APIKeyQuery(
|
||||
name=API_KEY_NAME, scheme_name="API key query", auto_error=False
|
||||
)
|
||||
api_key_header = APIKeyHeader(
|
||||
name=API_KEY_NAME, scheme_name="API key header", auto_error=False
|
||||
)
|
||||
|
||||
|
||||
# Source: https://github.com/mrtolkien/fastapi_simple_security/blob/master/fastapi_simple_security/security_api_key.py
|
||||
async def api_key_security(
|
||||
query_param: str = Security(api_key_query),
|
||||
header_param: str = Security(api_key_header),
|
||||
db: Session = Depends(get_session),
|
||||
) -> Optional[User]:
|
||||
settings_manager = get_settings_manager()
|
||||
result: Optional[Union[ApiKey, User]] = None
|
||||
if settings_manager.auth_settings.AUTO_LOGIN:
|
||||
# Get the first user
|
||||
settings_manager.auth_settings.FIRST_SUPERUSER
|
||||
result = get_user_by_username(
|
||||
db, settings_manager.auth_settings.FIRST_SUPERUSER
|
||||
)
|
||||
|
||||
elif not query_param and not header_param:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="An API key must be passed as query or header",
|
||||
)
|
||||
|
||||
elif query_param:
|
||||
result = check_key(db, query_param)
|
||||
|
||||
else:
|
||||
result = check_key(db, header_param)
|
||||
|
||||
if not result:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Invalid or missing API key",
|
||||
)
|
||||
if isinstance(result, ApiKey):
|
||||
return result.user
|
||||
elif isinstance(result, User):
|
||||
return result
|
||||
|
||||
|
||||
async def get_current_user(
|
||||
token: Annotated[str, Depends(oauth2_login)],
|
||||
db: Session = Depends(get_session),
|
||||
) -> User:
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
credentials_exception = HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Could not validate credentials",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
if isinstance(token, Coroutine):
|
||||
token = await token
|
||||
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
token,
|
||||
settings_manager.auth_settings.SECRET_KEY,
|
||||
algorithms=[settings_manager.auth_settings.ALGORITHM],
|
||||
)
|
||||
user_id: UUID = payload.get("sub") # type: ignore
|
||||
token_type: str = payload.get("type") # type: ignore
|
||||
|
||||
if user_id is None or token_type:
|
||||
raise credentials_exception
|
||||
except JWTError as e:
|
||||
raise credentials_exception from e
|
||||
|
||||
user = get_user_by_id(db, user_id) # type: ignore
|
||||
if user is None or not user.is_active:
|
||||
raise credentials_exception
|
||||
return user
|
||||
|
||||
|
||||
def get_current_active_user(current_user: Annotated[User, Depends(get_current_user)]):
|
||||
if not current_user.is_active:
|
||||
raise HTTPException(status_code=400, detail="Inactive user")
|
||||
return current_user
|
||||
|
||||
|
||||
def get_current_active_superuser(
|
||||
current_user: Annotated[User, Depends(get_current_user)]
|
||||
) -> User:
|
||||
if not current_user.is_active:
|
||||
raise HTTPException(status_code=401, detail="Inactive user")
|
||||
if not current_user.is_superuser:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="The user doesn't have enough privileges"
|
||||
)
|
||||
return current_user
|
||||
|
||||
|
||||
def verify_password(plain_password, hashed_password):
|
||||
settings_manager = get_settings_manager()
|
||||
return settings_manager.auth_settings.pwd_context.verify(
|
||||
plain_password, hashed_password
|
||||
)
|
||||
|
||||
|
||||
def get_password_hash(password):
|
||||
settings_manager = get_settings_manager()
|
||||
return settings_manager.auth_settings.pwd_context.hash(password)
|
||||
|
||||
|
||||
def create_token(data: dict, expires_delta: timedelta):
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
to_encode = data.copy()
|
||||
expire = datetime.now(timezone.utc) + expires_delta
|
||||
to_encode["exp"] = expire
|
||||
|
||||
return jwt.encode(
|
||||
to_encode,
|
||||
settings_manager.auth_settings.SECRET_KEY,
|
||||
algorithm=settings_manager.auth_settings.ALGORITHM,
|
||||
)
|
||||
|
||||
|
||||
def create_super_user(
|
||||
db: Session = Depends(get_session),
|
||||
username: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
) -> User:
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
super_user = get_user_by_username(
|
||||
db, username or settings_manager.auth_settings.FIRST_SUPERUSER
|
||||
)
|
||||
|
||||
if not super_user:
|
||||
super_user = User(
|
||||
username=username or settings_manager.auth_settings.FIRST_SUPERUSER,
|
||||
password=get_password_hash(
|
||||
password or settings_manager.auth_settings.FIRST_SUPERUSER_PASSWORD
|
||||
),
|
||||
is_superuser=True,
|
||||
is_active=True,
|
||||
last_login_at=None,
|
||||
)
|
||||
|
||||
db.add(super_user)
|
||||
db.commit()
|
||||
db.refresh(super_user)
|
||||
|
||||
return super_user
|
||||
|
||||
|
||||
def create_user_longterm_token(db: Session = Depends(get_session)) -> dict:
|
||||
super_user = create_super_user(db)
|
||||
|
||||
access_token_expires_longterm = timedelta(days=365)
|
||||
access_token = create_token(
|
||||
data={"sub": str(super_user.id)},
|
||||
expires_delta=access_token_expires_longterm,
|
||||
)
|
||||
|
||||
# Update: last_login_at
|
||||
update_user_last_login_at(super_user.id, db)
|
||||
|
||||
return {
|
||||
"access_token": access_token,
|
||||
"refresh_token": None,
|
||||
"token_type": "bearer",
|
||||
}
|
||||
|
||||
|
||||
def create_user_api_key(user_id: UUID) -> dict:
|
||||
access_token = create_token(
|
||||
data={"sub": str(user_id), "role": "api_key"},
|
||||
expires_delta=timedelta(days=365 * 2),
|
||||
)
|
||||
|
||||
return {"api_key": access_token}
|
||||
|
||||
|
||||
def get_user_id_from_token(token: str) -> UUID:
|
||||
try:
|
||||
user_id = jwt.get_unverified_claims(token)["sub"]
|
||||
return UUID(user_id)
|
||||
except (KeyError, JWTError, ValueError):
|
||||
return UUID(int=0)
|
||||
|
||||
|
||||
def create_user_tokens(
|
||||
user_id: UUID, db: Session = Depends(get_session), update_last_login: bool = False
|
||||
) -> dict:
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
access_token_expires = timedelta(
|
||||
minutes=settings_manager.auth_settings.ACCESS_TOKEN_EXPIRE_MINUTES
|
||||
)
|
||||
access_token = create_token(
|
||||
data={"sub": str(user_id)},
|
||||
expires_delta=access_token_expires,
|
||||
)
|
||||
|
||||
refresh_token_expires = timedelta(
|
||||
minutes=settings_manager.auth_settings.REFRESH_TOKEN_EXPIRE_MINUTES
|
||||
)
|
||||
refresh_token = create_token(
|
||||
data={"sub": str(user_id), "type": "rf"},
|
||||
expires_delta=refresh_token_expires,
|
||||
)
|
||||
|
||||
# Update: last_login_at
|
||||
if update_last_login:
|
||||
update_user_last_login_at(user_id, db)
|
||||
|
||||
return {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
"token_type": "bearer",
|
||||
}
|
||||
|
||||
|
||||
def create_refresh_token(refresh_token: str, db: Session = Depends(get_session)):
|
||||
settings_manager = get_settings_manager()
|
||||
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
refresh_token,
|
||||
settings_manager.auth_settings.SECRET_KEY,
|
||||
algorithms=[settings_manager.auth_settings.ALGORITHM],
|
||||
)
|
||||
user_id: UUID = payload.get("sub") # type: ignore
|
||||
token_type: str = payload.get("type") # type: ignore
|
||||
|
||||
if user_id is None or token_type is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid refresh token"
|
||||
)
|
||||
|
||||
return create_user_tokens(user_id, db)
|
||||
|
||||
except JWTError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid refresh token",
|
||||
) from e
|
||||
|
||||
|
||||
def authenticate_user(
|
||||
username: str, password: str, db: Session = Depends(get_session)
|
||||
) -> Optional[User]:
|
||||
user = get_user_by_username(db, username)
|
||||
|
||||
if not user:
|
||||
return None
|
||||
|
||||
if not user.is_active:
|
||||
if not user.last_login_at:
|
||||
raise HTTPException(status_code=400, detail="Waiting for approval")
|
||||
raise HTTPException(status_code=400, detail="Inactive user")
|
||||
|
||||
return user if verify_password(password, user.password) else None
|
||||
|
|
@ -6,6 +6,6 @@ class CacheManagerFactory(ServiceFactory):
|
|||
def __init__(self):
|
||||
super().__init__(CacheManager)
|
||||
|
||||
def create(self, settings_service):
|
||||
def create(self):
|
||||
# Here you would have logic to create and configure a CacheManager
|
||||
return CacheManager()
|
||||
|
|
|
|||
|
|
@ -6,6 +6,6 @@ class ChatManagerFactory(ServiceFactory):
|
|||
def __init__(self):
|
||||
super().__init__(ChatManager)
|
||||
|
||||
def create(self, settings_service):
|
||||
def create(self):
|
||||
# Here you would have logic to create and configure a ChatManager
|
||||
return ChatManager()
|
||||
|
|
|
|||
|
|
@ -191,7 +191,7 @@ class ChatManager(Service):
|
|||
json_payload = await websocket.receive_json()
|
||||
try:
|
||||
payload = orjson.loads(json_payload)
|
||||
except TypeError:
|
||||
except Exception:
|
||||
payload = json_payload
|
||||
if "clear_history" in payload:
|
||||
self.chat_history.history[client_id] = []
|
||||
|
|
|
|||
|
|
@ -10,8 +10,8 @@ class DatabaseManagerFactory(ServiceFactory):
|
|||
def __init__(self):
|
||||
super().__init__(DatabaseManager)
|
||||
|
||||
def create(self, settings_service: "SettingsManager"):
|
||||
def create(self, settings_manager: "SettingsManager"):
|
||||
# Here you would have logic to create and configure a DatabaseManager
|
||||
if not settings_service.settings.DATABASE_URL:
|
||||
if not settings_manager.settings.DATABASE_URL:
|
||||
raise ValueError("No database URL provided")
|
||||
return DatabaseManager(settings_service.settings.DATABASE_URL)
|
||||
return DatabaseManager(settings_manager.settings.DATABASE_URL)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,10 @@
|
|||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
from langflow.services.base import Service
|
||||
from langflow.services.database.utils import Result, TableResults
|
||||
from langflow.services.utils import get_settings_manager
|
||||
from sqlalchemy import inspect
|
||||
import sqlalchemy as sa
|
||||
from sqlmodel import SQLModel, Session, create_engine
|
||||
from langflow.utils.logger import logger
|
||||
from alembic.config import Config
|
||||
|
|
@ -54,6 +57,41 @@ class DatabaseManager(Service):
|
|||
with Session(self.engine) as session:
|
||||
yield session
|
||||
|
||||
def check_schema_health(self) -> bool:
|
||||
inspector = inspect(self.engine)
|
||||
|
||||
model_mapping = {
|
||||
"flow": models.Flow,
|
||||
"user": models.User,
|
||||
"apikey": models.ApiKey,
|
||||
# Add other SQLModel classes here
|
||||
}
|
||||
|
||||
# To account for tables that existed in older versions
|
||||
legacy_tables = ["flowstyle"]
|
||||
|
||||
for table, model in model_mapping.items():
|
||||
expected_columns = list(model.__fields__.keys())
|
||||
|
||||
try:
|
||||
available_columns = [
|
||||
col["name"] for col in inspector.get_columns(table)
|
||||
]
|
||||
except sa.exc.NoSuchTableError:
|
||||
logger.error(f"Missing table: {table}")
|
||||
return False
|
||||
|
||||
for column in expected_columns:
|
||||
if column not in available_columns:
|
||||
logger.error(f"Missing column: {column} in table {table}")
|
||||
return False
|
||||
|
||||
for table in legacy_tables:
|
||||
if table in inspector.get_table_names():
|
||||
logger.warn(f"Legacy table exists: {table}")
|
||||
|
||||
return True
|
||||
|
||||
def run_migrations(self):
|
||||
logger.info(
|
||||
f"Running DB migrations in {self.script_location} on {self.database_url}"
|
||||
|
|
@ -63,6 +101,40 @@ class DatabaseManager(Service):
|
|||
alembic_cfg.set_main_option("sqlalchemy.url", self.database_url)
|
||||
command.upgrade(alembic_cfg, "head")
|
||||
|
||||
def run_migrations_test(self):
|
||||
# This method is used for testing purposes only
|
||||
# We will check that all models are in the database
|
||||
# and that the database is up to date with all columns
|
||||
sql_models = [models.Flow, models.User, models.ApiKey]
|
||||
results = []
|
||||
for sql_model in sql_models:
|
||||
results.append(
|
||||
TableResults(sql_model.__tablename__, self.check_table(sql_model))
|
||||
)
|
||||
return results
|
||||
|
||||
def check_table(self, model):
|
||||
results = []
|
||||
inspector = inspect(self.engine)
|
||||
table_name = model.__tablename__
|
||||
expected_columns = list(model.__fields__.keys())
|
||||
try:
|
||||
available_columns = [
|
||||
col["name"] for col in inspector.get_columns(table_name)
|
||||
]
|
||||
results.append(Result(name=table_name, type="table", success=True))
|
||||
except sa.exc.NoSuchTableError:
|
||||
logger.error(f"Missing table: {table_name}")
|
||||
results.append(Result(name=table_name, type="table", success=False))
|
||||
|
||||
for column in expected_columns:
|
||||
if column not in available_columns:
|
||||
logger.error(f"Missing column: {column} in table {table_name}")
|
||||
results.append(Result(name=column, type="column", success=False))
|
||||
else:
|
||||
results.append(Result(name=column, type="column", success=True))
|
||||
return results
|
||||
|
||||
def create_db_and_tables(self):
|
||||
logger.debug("Creating database and tables")
|
||||
try:
|
||||
|
|
@ -76,9 +148,14 @@ class DatabaseManager(Service):
|
|||
from sqlalchemy import inspect
|
||||
|
||||
inspector = inspect(self.engine)
|
||||
if "flow" not in inspector.get_table_names():
|
||||
logger.error("Something went wrong creating the database and tables.")
|
||||
logger.error("Please check your database settings.")
|
||||
raise RuntimeError("Something went wrong creating the database and tables.")
|
||||
else:
|
||||
logger.debug("Database and tables created successfully")
|
||||
current_tables = ["flow", "user", "apikey"]
|
||||
table_names = inspector.get_table_names()
|
||||
for table in current_tables:
|
||||
if table not in table_names:
|
||||
logger.error("Something went wrong creating the database and tables.")
|
||||
logger.error("Please check your database settings.")
|
||||
raise RuntimeError(
|
||||
"Something went wrong creating the database and tables."
|
||||
)
|
||||
|
||||
logger.debug("Database and tables created successfully")
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
from .flow import Flow
|
||||
from .user import User
|
||||
from .api_key import ApiKey
|
||||
|
||||
|
||||
__all__ = ["Flow"]
|
||||
__all__ = ["Flow", "User", "ApiKey"]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,3 @@
|
|||
from .api_key import ApiKey, ApiKeyCreate, UnmaskedApiKeyRead, ApiKeyRead
|
||||
|
||||
__all__ = ["ApiKey", "ApiKeyCreate", "UnmaskedApiKeyRead", "ApiKeyRead"]
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
from pydantic import validator
|
||||
from sqlmodel import Field, Relationship
|
||||
from uuid import UUID, uuid4
|
||||
from typing import Optional, TYPE_CHECKING
|
||||
from datetime import datetime
|
||||
from langflow.services.database.models.base import SQLModelSerializable
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.database.models.user import User
|
||||
|
||||
|
||||
class ApiKeyBase(SQLModelSerializable):
|
||||
name: Optional[str] = Field(index=True)
|
||||
created_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
last_used_at: Optional[datetime] = Field(default=None)
|
||||
total_uses: int = Field(default=0)
|
||||
is_active: bool = Field(default=True)
|
||||
|
||||
|
||||
class ApiKey(ApiKeyBase, table=True):
|
||||
id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True)
|
||||
|
||||
api_key: str = Field(index=True, unique=True)
|
||||
# User relationship
|
||||
user_id: UUID = Field(index=True, foreign_key="user.id")
|
||||
user: "User" = Relationship(back_populates="api_keys")
|
||||
|
||||
|
||||
class ApiKeyCreate(ApiKeyBase):
|
||||
api_key: Optional[str] = None
|
||||
user_id: Optional[UUID] = None
|
||||
|
||||
|
||||
class UnmaskedApiKeyRead(ApiKeyBase):
|
||||
id: UUID
|
||||
api_key: str = Field()
|
||||
user_id: UUID = Field()
|
||||
|
||||
|
||||
class ApiKeyRead(ApiKeyBase):
|
||||
id: UUID
|
||||
api_key: str = Field()
|
||||
user_id: UUID = Field()
|
||||
|
||||
@validator("api_key", always=True)
|
||||
def mask_api_key(cls, v):
|
||||
# This validator will always run, and will mask the API key
|
||||
return f"{v[:8]}{'*' * (len(v) - 8)}"
|
||||
|
|
@ -0,0 +1,71 @@
|
|||
import datetime
|
||||
import secrets
|
||||
import threading
|
||||
from uuid import UUID
|
||||
from typing import List, Optional
|
||||
from sqlmodel import Session, select
|
||||
from langflow.services.database.models.api_key import (
|
||||
ApiKey,
|
||||
ApiKeyCreate,
|
||||
UnmaskedApiKeyRead,
|
||||
ApiKeyRead,
|
||||
)
|
||||
|
||||
|
||||
def get_api_keys(session: Session, user_id: UUID) -> List[ApiKeyRead]:
|
||||
query = select(ApiKey).where(ApiKey.user_id == user_id)
|
||||
api_keys = session.exec(query).all()
|
||||
return [ApiKeyRead.from_orm(api_key) for api_key in api_keys]
|
||||
|
||||
|
||||
def create_api_key(
|
||||
session: Session, api_key_create: ApiKeyCreate, user_id: UUID
|
||||
) -> UnmaskedApiKeyRead:
|
||||
# Generate a random API key with 32 bytes of randomness
|
||||
generated_api_key = f"lf-{secrets.token_urlsafe(32)}"
|
||||
|
||||
api_key = ApiKey(
|
||||
api_key=generated_api_key,
|
||||
name=api_key_create.name,
|
||||
user_id=user_id,
|
||||
)
|
||||
|
||||
session.add(api_key)
|
||||
session.commit()
|
||||
session.refresh(api_key)
|
||||
unmasked = UnmaskedApiKeyRead.from_orm(api_key)
|
||||
unmasked.api_key = generated_api_key
|
||||
return unmasked
|
||||
|
||||
|
||||
def delete_api_key(session: Session, api_key_id: UUID) -> None:
|
||||
api_key = session.get(ApiKey, api_key_id)
|
||||
if api_key is None:
|
||||
raise ValueError("API Key not found")
|
||||
session.delete(api_key)
|
||||
session.commit()
|
||||
|
||||
|
||||
def check_key(session: Session, api_key: str) -> Optional[ApiKey]:
|
||||
"""Check if the API key is valid."""
|
||||
query = select(ApiKey).where(ApiKey.api_key == api_key)
|
||||
api_key_object: Optional[ApiKey] = session.exec(query).first()
|
||||
if api_key_object is not None:
|
||||
threading.Thread(
|
||||
target=update_total_uses,
|
||||
args=(
|
||||
session,
|
||||
api_key_object,
|
||||
),
|
||||
).start()
|
||||
return api_key_object
|
||||
|
||||
|
||||
def update_total_uses(session, api_key: ApiKey):
|
||||
"""Update the total uses and last used at."""
|
||||
api_key.total_uses += 1
|
||||
api_key.last_used_at = datetime.datetime.now(datetime.timezone.utc)
|
||||
session.add(api_key)
|
||||
session.commit()
|
||||
session.refresh(api_key)
|
||||
return api_key
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
from .component import Component, ComponentModel
|
||||
|
||||
__all__ = ["Component", "ComponentModel"]
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
from .flow import Flow, FlowCreate, FlowRead, FlowUpdate
|
||||
|
||||
__all__ = ["Flow", "FlowCreate", "FlowRead", "FlowUpdate"]
|
||||
|
|
@ -2,11 +2,12 @@
|
|||
|
||||
from langflow.services.database.models.base import SQLModelSerializable
|
||||
from pydantic import validator
|
||||
from sqlmodel import Field, JSON, Column
|
||||
from sqlmodel import Field, JSON, Column, Relationship
|
||||
from uuid import UUID, uuid4
|
||||
from typing import Dict, Optional
|
||||
from typing import Dict, Optional, TYPE_CHECKING
|
||||
|
||||
# if TYPE_CHECKING:
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.database.models.user import User
|
||||
|
||||
|
||||
class FlowBase(SQLModelSerializable):
|
||||
|
|
@ -16,7 +17,6 @@ class FlowBase(SQLModelSerializable):
|
|||
|
||||
@validator("data")
|
||||
def validate_json(v):
|
||||
# dict_keys(['description', 'name', 'id', 'data'])
|
||||
if not v:
|
||||
return v
|
||||
if not isinstance(v, dict):
|
||||
|
|
@ -34,14 +34,17 @@ class FlowBase(SQLModelSerializable):
|
|||
class Flow(FlowBase, table=True):
|
||||
id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True)
|
||||
data: Optional[Dict] = Field(default=None, sa_column=Column(JSON))
|
||||
user_id: UUID = Field(index=True, foreign_key="user.id")
|
||||
user: "User" = Relationship(back_populates="flows")
|
||||
|
||||
|
||||
class FlowCreate(FlowBase):
|
||||
pass
|
||||
user_id: Optional[UUID] = None
|
||||
|
||||
|
||||
class FlowRead(FlowBase):
|
||||
id: UUID
|
||||
user_id: UUID = Field()
|
||||
|
||||
|
||||
class FlowUpdate(SQLModelSerializable):
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
from .user import User, UserCreate, UserRead, UserUpdate
|
||||
|
||||
__all__ = [
|
||||
"User",
|
||||
"UserCreate",
|
||||
"UserRead",
|
||||
"UserUpdate",
|
||||
]
|
||||
53
src/backend/langflow/services/database/models/user/crud.py
Normal file
53
src/backend/langflow/services/database/models/user/crud.py
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
from datetime import datetime, timezone
|
||||
from typing import Union
|
||||
from uuid import UUID
|
||||
from fastapi import Depends, HTTPException
|
||||
from langflow.services.database.models.user.user import User, UserUpdate
|
||||
from langflow.services.utils import get_session
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlmodel import Session
|
||||
|
||||
|
||||
from sqlalchemy.orm.attributes import flag_modified
|
||||
|
||||
|
||||
def get_user_by_username(db: Session, username: str) -> Union[User, None]:
|
||||
return db.query(User).filter(User.username == username).first()
|
||||
|
||||
|
||||
def get_user_by_id(db: Session, id: UUID) -> Union[User, None]:
|
||||
return db.query(User).filter(User.id == id).first()
|
||||
|
||||
|
||||
def update_user(
|
||||
user_id: UUID, user: UserUpdate, db: Session = Depends(get_session)
|
||||
) -> User:
|
||||
user_db = get_user_by_id(db, user_id)
|
||||
if not user_db:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
user_db_by_username = get_user_by_username(db, user.username) # type: ignore
|
||||
if user_db_by_username and user_db_by_username.id != user_id:
|
||||
raise HTTPException(status_code=409, detail="Username already exists")
|
||||
|
||||
user_data = user.dict(exclude_unset=True)
|
||||
for attr, value in user_data.items():
|
||||
if hasattr(user_db, attr) and value is not None:
|
||||
setattr(user_db, attr, value)
|
||||
|
||||
user_db.updated_at = datetime.now(timezone.utc)
|
||||
flag_modified(user_db, "updated_at")
|
||||
|
||||
try:
|
||||
db.commit()
|
||||
except IntegrityError as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=400, detail=str(e)) from e
|
||||
|
||||
return user_db
|
||||
|
||||
|
||||
def update_user_last_login_at(user_id: UUID, db: Session = Depends(get_session)):
|
||||
user_data = UserUpdate(last_login_at=datetime.now(timezone.utc)) # type: ignore
|
||||
|
||||
return update_user(user_id, user_data, db)
|
||||
46
src/backend/langflow/services/database/models/user/user.py
Normal file
46
src/backend/langflow/services/database/models/user/user.py
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
from langflow.services.database.models.base import SQLModel, SQLModelSerializable
|
||||
from sqlmodel import Field, Relationship
|
||||
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional, TYPE_CHECKING
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.database.models.api_key import ApiKey
|
||||
from langflow.services.database.models.flow import Flow
|
||||
|
||||
|
||||
class User(SQLModelSerializable, table=True):
|
||||
id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True)
|
||||
username: str = Field(index=True, unique=True)
|
||||
password: str = Field()
|
||||
is_active: bool = Field(default=False)
|
||||
is_superuser: bool = Field(default=False)
|
||||
create_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
updated_at: datetime = Field(default_factory=datetime.utcnow)
|
||||
last_login_at: Optional[datetime] = Field()
|
||||
api_keys: list["ApiKey"] = Relationship(back_populates="user")
|
||||
flows: list["Flow"] = Relationship(back_populates="user")
|
||||
|
||||
|
||||
class UserCreate(SQLModel):
|
||||
username: str = Field()
|
||||
password: str = Field()
|
||||
|
||||
|
||||
class UserRead(SQLModel):
|
||||
id: UUID = Field(default_factory=uuid4)
|
||||
username: str = Field()
|
||||
is_active: bool = Field()
|
||||
is_superuser: bool = Field()
|
||||
create_at: datetime = Field()
|
||||
updated_at: datetime = Field()
|
||||
last_login_at: Optional[datetime] = Field()
|
||||
|
||||
|
||||
class UserUpdate(SQLModel):
|
||||
username: Optional[str] = Field()
|
||||
is_active: Optional[bool] = Field()
|
||||
is_superuser: Optional[bool] = Field()
|
||||
last_login_at: Optional[datetime] = Field()
|
||||
|
|
@ -1,3 +1,4 @@
|
|||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
from langflow.utils.logger import logger
|
||||
from contextlib import contextmanager
|
||||
|
|
@ -13,6 +14,11 @@ def initialize_database():
|
|||
from langflow.services import service_manager, ServiceType
|
||||
|
||||
database_manager = service_manager.get(ServiceType.DATABASE_MANAGER)
|
||||
try:
|
||||
database_manager.check_schema_health()
|
||||
except Exception as exc:
|
||||
logger.error(f"Error checking schema health: {exc}")
|
||||
raise RuntimeError("Error checking schema health") from exc
|
||||
try:
|
||||
database_manager.run_migrations()
|
||||
except CommandError as exc:
|
||||
|
|
@ -28,8 +34,11 @@ def initialize_database():
|
|||
session.execute("DROP TABLE alembic_version")
|
||||
database_manager.run_migrations()
|
||||
except Exception as exc:
|
||||
logger.error(f"Error running migrations: {exc}")
|
||||
raise RuntimeError("Error running migrations") from exc
|
||||
# if the exception involves tables already existing
|
||||
# we can ignore it
|
||||
if "already exists" not in str(exc):
|
||||
logger.error(f"Error running migrations: {exc}")
|
||||
raise RuntimeError("Error running migrations") from exc
|
||||
database_manager.create_db_and_tables()
|
||||
logger.debug("Database initialized")
|
||||
|
||||
|
|
@ -45,3 +54,16 @@ def session_getter(db_manager: "DatabaseManager"):
|
|||
raise
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@dataclass
|
||||
class Result:
|
||||
name: str
|
||||
type: str
|
||||
success: bool
|
||||
|
||||
|
||||
@dataclass
|
||||
class TableResults:
|
||||
table_name: str
|
||||
results: list[Result]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
from langflow.services.schema import ServiceType
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, List, Optional
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.factory import ServiceFactory
|
||||
|
|
@ -13,13 +13,21 @@ class ServiceManager:
|
|||
def __init__(self):
|
||||
self.services = {}
|
||||
self.factories = {}
|
||||
self.dependencies = {}
|
||||
|
||||
def register_factory(self, service_factory: "ServiceFactory"):
|
||||
def register_factory(
|
||||
self,
|
||||
service_factory: "ServiceFactory",
|
||||
dependencies: Optional[List[ServiceType]] = None,
|
||||
):
|
||||
"""
|
||||
Registers a new factory.
|
||||
Registers a new factory with dependencies.
|
||||
"""
|
||||
if service_factory.service_class.name not in self.factories:
|
||||
self.factories[service_factory.service_class.name] = service_factory
|
||||
if dependencies is None:
|
||||
dependencies = []
|
||||
service_name = service_factory.service_class.name
|
||||
self.factories[service_name] = service_factory
|
||||
self.dependencies[service_name] = dependencies
|
||||
|
||||
def get(self, service_name: ServiceType):
|
||||
"""
|
||||
|
|
@ -32,17 +40,25 @@ class ServiceManager:
|
|||
|
||||
def _create_service(self, service_name: ServiceType):
|
||||
"""
|
||||
Create a new service given its name.
|
||||
Create a new service given its name, handling dependencies.
|
||||
"""
|
||||
self._validate_service_creation(service_name)
|
||||
|
||||
if service_name == ServiceType.SETTINGS_MANAGER:
|
||||
self.services[service_name] = self.factories[service_name].create()
|
||||
else:
|
||||
settings_service = self.get(ServiceType.SETTINGS_MANAGER)
|
||||
self.services[service_name] = self.factories[service_name].create(
|
||||
settings_service
|
||||
)
|
||||
# Create dependencies first
|
||||
for dependency in self.dependencies.get(service_name, []):
|
||||
if dependency not in self.services:
|
||||
self._create_service(dependency)
|
||||
|
||||
# Collect the dependent services
|
||||
dependent_services = {
|
||||
dep.value: self.services[dep]
|
||||
for dep in self.dependencies.get(service_name, [])
|
||||
}
|
||||
|
||||
# Create the actual service
|
||||
self.services[service_name] = self.factories[service_name].create(
|
||||
**dependent_services
|
||||
)
|
||||
|
||||
def _validate_service_creation(self, service_name: ServiceType):
|
||||
"""
|
||||
|
|
@ -53,14 +69,6 @@ class ServiceManager:
|
|||
f"No factory registered for the service class '{service_name.name}'"
|
||||
)
|
||||
|
||||
if (
|
||||
ServiceType.SETTINGS_MANAGER not in self.factories
|
||||
and service_name != ServiceType.SETTINGS_MANAGER
|
||||
):
|
||||
raise ValueError(
|
||||
f"Cannot create service '{service_name.name}' before the settings service"
|
||||
)
|
||||
|
||||
def update(self, service_name: ServiceType):
|
||||
"""
|
||||
Update a service by its name.
|
||||
|
|
@ -81,12 +89,24 @@ def initialize_services():
|
|||
from langflow.services.cache import factory as cache_factory
|
||||
from langflow.services.chat import factory as chat_factory
|
||||
from langflow.services.settings import factory as settings_factory
|
||||
from langflow.services.auth import factory as auth_factory
|
||||
|
||||
service_manager.register_factory(settings_factory.SettingsManagerFactory())
|
||||
service_manager.register_factory(database_factory.DatabaseManagerFactory())
|
||||
service_manager.register_factory(
|
||||
auth_factory.AuthManagerFactory(), dependencies=[ServiceType.SETTINGS_MANAGER]
|
||||
)
|
||||
service_manager.register_factory(
|
||||
database_factory.DatabaseManagerFactory(),
|
||||
dependencies=[ServiceType.SETTINGS_MANAGER],
|
||||
)
|
||||
service_manager.register_factory(cache_factory.CacheManagerFactory())
|
||||
service_manager.register_factory(chat_factory.ChatManagerFactory())
|
||||
|
||||
# Test cache connection
|
||||
service_manager.get(ServiceType.CACHE_MANAGER)
|
||||
# Test database connection
|
||||
service_manager.get(ServiceType.DATABASE_MANAGER)
|
||||
|
||||
|
||||
def initialize_settings_manager():
|
||||
"""
|
||||
|
|
@ -95,3 +115,22 @@ def initialize_settings_manager():
|
|||
from langflow.services.settings import factory as settings_factory
|
||||
|
||||
service_manager.register_factory(settings_factory.SettingsManagerFactory())
|
||||
|
||||
|
||||
def initialize_session_manager():
|
||||
"""
|
||||
Initialize the session manager.
|
||||
"""
|
||||
from langflow.services.session import factory as session_manager_factory
|
||||
from langflow.services.cache import factory as cache_factory
|
||||
|
||||
initialize_settings_manager()
|
||||
|
||||
service_manager.register_factory(
|
||||
cache_factory.CacheManagerFactory(), dependencies=[ServiceType.SETTINGS_MANAGER]
|
||||
)
|
||||
|
||||
service_manager.register_factory(
|
||||
session_manager_factory.SessionManagerFactory(),
|
||||
dependencies=[ServiceType.CACHE_MANAGER],
|
||||
)
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ class ServiceType(str, Enum):
|
|||
registered with the service manager.
|
||||
"""
|
||||
|
||||
AUTH_MANAGER = "auth_manager"
|
||||
CACHE_MANAGER = "cache_manager"
|
||||
SETTINGS_MANAGER = "settings_manager"
|
||||
DATABASE_MANAGER = "database_manager"
|
||||
|
|
|
|||
33
src/backend/langflow/services/settings/auth.py
Normal file
33
src/backend/langflow/services/settings/auth.py
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
from typing import Optional
|
||||
import secrets
|
||||
|
||||
from pydantic import BaseSettings
|
||||
from passlib.context import CryptContext
|
||||
|
||||
|
||||
class AuthSettings(BaseSettings):
|
||||
# Login settings
|
||||
SECRET_KEY: str = secrets.token_hex(32)
|
||||
ALGORITHM: str = "HS256"
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES: int = 60
|
||||
REFRESH_TOKEN_EXPIRE_MINUTES: int = 70
|
||||
|
||||
# API Key to execute /process endpoint
|
||||
API_KEY_SECRET_KEY: Optional[
|
||||
str
|
||||
] = "b82818e0ad4ff76615c5721ee21004b07d84cd9b87ba4d9cb42374da134b841a"
|
||||
API_KEY_ALGORITHM: str = "HS256"
|
||||
API_V1_STR: str = "/api/v1"
|
||||
|
||||
# If AUTO_LOGIN = True
|
||||
# > The application does not request login and logs in automatically as a super user.
|
||||
AUTO_LOGIN: bool = False
|
||||
FIRST_SUPERUSER: str = "langflow"
|
||||
FIRST_SUPERUSER_PASSWORD: str = "langflow"
|
||||
|
||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||
|
||||
class Config:
|
||||
validate_assignment = True
|
||||
extra = "ignore"
|
||||
env_prefix = "LANGFLOW_"
|
||||
|
|
@ -3,7 +3,6 @@ import json
|
|||
import orjson
|
||||
import os
|
||||
from shutil import copy2
|
||||
import secrets
|
||||
from typing import Optional, List
|
||||
from pathlib import Path
|
||||
|
||||
|
|
@ -42,15 +41,6 @@ class Settings(BaseSettings):
|
|||
REMOVE_API_KEYS: bool = False
|
||||
COMPONENTS_PATH: List[str] = []
|
||||
|
||||
# Login settings
|
||||
SECRET_KEY: str = secrets.token_hex(32)
|
||||
ALGORITHM: str = "HS256"
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES: int = 60
|
||||
REFRESH_TOKEN_EXPIRE_MINUTES: int = 70
|
||||
# If AUTO_LOGIN = True
|
||||
# > The application does not request login and logs in automatically as a super user.
|
||||
AUTO_LOGIN: bool = True
|
||||
|
||||
@validator("CONFIG_DIR", pre=True, allow_reuse=True)
|
||||
def set_langflow_dir(cls, value):
|
||||
if not value:
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
from langflow.services.base import Service
|
||||
from langflow.services.settings.auth import AuthSettings
|
||||
from langflow.services.settings.base import Settings
|
||||
from langflow.utils.logger import logger
|
||||
import os
|
||||
|
|
@ -8,9 +9,10 @@ import yaml
|
|||
class SettingsManager(Service):
|
||||
name = "settings_manager"
|
||||
|
||||
def __init__(self, settings: Settings):
|
||||
def __init__(self, settings: Settings, auth_settings: AuthSettings):
|
||||
super().__init__()
|
||||
self.settings = settings
|
||||
self.auth_settings = auth_settings
|
||||
|
||||
@classmethod
|
||||
def load_settings_from_yaml(cls, file_path: str) -> "SettingsManager":
|
||||
|
|
@ -33,4 +35,5 @@ class SettingsManager(Service):
|
|||
)
|
||||
|
||||
settings = Settings(**settings_dict)
|
||||
return cls(settings)
|
||||
auth_settings = AuthSettings()
|
||||
return cls(settings, auth_settings)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
from langflow.services import ServiceType, service_manager
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langflow.services.database.manager import DatabaseManager
|
||||
from langflow.services.settings.manager import SettingsManager
|
||||
|
||||
|
||||
|
|
@ -9,7 +11,7 @@ def get_settings_manager() -> "SettingsManager":
|
|||
return service_manager.get(ServiceType.SETTINGS_MANAGER)
|
||||
|
||||
|
||||
def get_db_manager():
|
||||
def get_db_manager() -> "DatabaseManager":
|
||||
return service_manager.get(ServiceType.DATABASE_MANAGER)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -4,12 +4,10 @@ import importlib
|
|||
from functools import wraps
|
||||
from typing import Optional, Dict, Any, Union
|
||||
|
||||
from docstring_parser import parse # type: ignore
|
||||
from docstring_parser import parse
|
||||
|
||||
from langflow.template.frontend_node.constants import FORCE_SHOW_FIELDS
|
||||
from langflow.utils import constants
|
||||
from langflow.utils.logger import logger
|
||||
from multiprocess import cpu_count # type: ignore
|
||||
|
||||
|
||||
def build_template_from_function(
|
||||
|
|
@ -265,6 +263,9 @@ def format_dict(
|
|||
|
||||
_type: Union[str, type] = get_type(value)
|
||||
|
||||
if "BaseModel" in str(_type):
|
||||
continue
|
||||
|
||||
_type = remove_optional_wrapper(_type)
|
||||
_type = check_list_type(_type, value)
|
||||
_type = replace_mapping_with_dict(_type)
|
||||
|
|
@ -455,10 +456,3 @@ def add_options_to_field(
|
|||
value["options"] = options_map[class_name]
|
||||
value["list"] = True
|
||||
value["value"] = options_map[class_name][0]
|
||||
|
||||
|
||||
def get_number_of_workers(workers=None):
|
||||
if workers == -1 or workers is None:
|
||||
workers = (cpu_count() * 2) + 1
|
||||
logger.debug(f"Number of workers: {workers}")
|
||||
return workers
|
||||
|
|
|
|||
758
src/frontend/package-lock.json
generated
758
src/frontend/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
|
@ -40,6 +40,7 @@
|
|||
"esbuild": "^0.17.18",
|
||||
"lodash": "^4.17.21",
|
||||
"lucide-react": "^0.233.0",
|
||||
"moment": "^2.29.4",
|
||||
"react": "^18.2.0",
|
||||
"react-ace": "^10.1.0",
|
||||
"react-cookie": "^4.1.1",
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import _ from "lodash";
|
||||
import { useContext, useEffect, useState } from "react";
|
||||
import { useLocation } from "react-router-dom";
|
||||
import { useLocation, useNavigate } from "react-router-dom";
|
||||
import "reactflow/dist/style.css";
|
||||
import "./App.css";
|
||||
|
||||
|
|
@ -42,8 +42,11 @@ export default function App() {
|
|||
successData,
|
||||
successOpen,
|
||||
setSuccessOpen,
|
||||
setErrorData,
|
||||
loading,
|
||||
setLoading,
|
||||
} = useContext(alertContext);
|
||||
const navigate = useNavigate();
|
||||
const { fetchError } = useContext(typesContext);
|
||||
|
||||
// Initialize state variable for the list of alerts
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { useState } from "react";
|
||||
import { useEffect, useState } from "react";
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
|
|
@ -12,17 +12,21 @@ import { Button } from "../ui/button";
|
|||
|
||||
export default function PaginatorComponent({
|
||||
pageSize = 10,
|
||||
pageIndex = 1,
|
||||
rowsCount = [10, 20, 30],
|
||||
pageIndex = 0,
|
||||
rowsCount = [10, 20, 50, 100],
|
||||
totalRowsCount = 0,
|
||||
paginate,
|
||||
}: PaginatorComponentType) {
|
||||
const [size, setPageSize] = useState(pageSize);
|
||||
const [index, setPageIndex] = useState(pageIndex);
|
||||
|
||||
const [maxIndex, setMaxPageIndex] = useState(
|
||||
Math.ceil(totalRowsCount / pageSize)
|
||||
);
|
||||
const [currentPage, setCurrentPage] = useState(1);
|
||||
|
||||
useEffect(() => {
|
||||
setMaxPageIndex(Math.ceil(totalRowsCount / size));
|
||||
}, [totalRowsCount]);
|
||||
|
||||
return (
|
||||
<>
|
||||
|
|
@ -35,7 +39,7 @@ export default function PaginatorComponent({
|
|||
onValueChange={(pageSize: string) => {
|
||||
setPageSize(Number(pageSize));
|
||||
setMaxPageIndex(Math.ceil(totalRowsCount / Number(pageSize)));
|
||||
paginate(Number(pageSize), index);
|
||||
paginate(Number(pageSize), 0);
|
||||
}}
|
||||
>
|
||||
<SelectTrigger className="w-[100px]">
|
||||
|
|
@ -51,30 +55,30 @@ export default function PaginatorComponent({
|
|||
</Select>
|
||||
</div>
|
||||
<div className="flex w-[100px] items-center justify-center text-sm font-medium">
|
||||
Page {index} of {maxIndex}
|
||||
Page {currentPage} of {maxIndex}
|
||||
</div>
|
||||
<div className="flex items-center space-x-2">
|
||||
<Button
|
||||
disabled={index <= 0}
|
||||
variant="outline"
|
||||
className="hidden h-8 w-8 p-0 lg:flex"
|
||||
onClick={() => {
|
||||
setPageIndex(1);
|
||||
paginate(size, 1);
|
||||
setPageIndex(0);
|
||||
setCurrentPage(1);
|
||||
paginate(size, 0);
|
||||
}}
|
||||
>
|
||||
<span className="sr-only">Go to first page</span>
|
||||
<IconComponent name="ChevronsLeft" className="h-4 w-4" />
|
||||
</Button>
|
||||
<Button
|
||||
disabled={index <= 0}
|
||||
onClick={() => {
|
||||
if (index <= 1) {
|
||||
setPageIndex(1);
|
||||
paginate(size, 1);
|
||||
} else {
|
||||
{
|
||||
setPageIndex(index - 1);
|
||||
paginate(size, index - 1);
|
||||
}
|
||||
if (index > 0) {
|
||||
const pgIndex = size - index;
|
||||
setCurrentPage(currentPage - 1);
|
||||
setPageIndex(pgIndex);
|
||||
paginate(size, pgIndex);
|
||||
}
|
||||
}}
|
||||
variant="outline"
|
||||
|
|
@ -84,14 +88,12 @@ export default function PaginatorComponent({
|
|||
<IconComponent name="ChevronLeft" className="h-4 w-4" />
|
||||
</Button>
|
||||
<Button
|
||||
disabled={currentPage === maxIndex}
|
||||
onClick={() => {
|
||||
if (index >= maxIndex) {
|
||||
setPageIndex(maxIndex);
|
||||
paginate(size, maxIndex);
|
||||
} else {
|
||||
setPageIndex(index + 1);
|
||||
paginate(size, index + 1);
|
||||
}
|
||||
const pgIndex = size + index;
|
||||
setPageIndex(pgIndex);
|
||||
setCurrentPage(currentPage + 1);
|
||||
paginate(size, pgIndex);
|
||||
}}
|
||||
variant="outline"
|
||||
className="h-8 w-8 p-0"
|
||||
|
|
@ -100,11 +102,13 @@ export default function PaginatorComponent({
|
|||
<IconComponent name="ChevronRight" className="h-4 w-4" />
|
||||
</Button>
|
||||
<Button
|
||||
disabled={currentPage === maxIndex}
|
||||
variant="outline"
|
||||
className="hidden h-8 w-8 p-0 lg:flex"
|
||||
onClick={() => {
|
||||
setPageIndex(maxIndex);
|
||||
paginate(size, maxIndex);
|
||||
setPageIndex(maxIndex - 1);
|
||||
setCurrentPage(maxIndex);
|
||||
paginate(size, size);
|
||||
}}
|
||||
>
|
||||
<span className="sr-only">Go to last page</span>
|
||||
|
|
|
|||
30
src/frontend/src/components/authAdminGuard/index.tsx
Normal file
30
src/frontend/src/components/authAdminGuard/index.tsx
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
import { useContext, useEffect } from "react";
|
||||
import { Navigate } from "react-router-dom";
|
||||
import { AuthContext } from "../../contexts/authContext";
|
||||
|
||||
export const ProtectedAdminRoute = ({ children }) => {
|
||||
const {
|
||||
isAdmin,
|
||||
isAuthenticated,
|
||||
logout,
|
||||
getAuthentication,
|
||||
userData,
|
||||
autoLogin,
|
||||
} = useContext(AuthContext);
|
||||
useEffect(() => {
|
||||
if (!isAuthenticated && !getAuthentication()) {
|
||||
window.location.replace("/login");
|
||||
logout();
|
||||
}
|
||||
}, [isAuthenticated, getAuthentication, logout, userData]);
|
||||
|
||||
if (!isAuthenticated && !getAuthentication()) {
|
||||
return <Navigate to="/login" replace />;
|
||||
}
|
||||
|
||||
if ((userData && !isAdmin) || autoLogin) {
|
||||
return <Navigate to="/" replace />;
|
||||
}
|
||||
|
||||
return children;
|
||||
};
|
||||
14
src/frontend/src/components/authGuard/index.tsx
Normal file
14
src/frontend/src/components/authGuard/index.tsx
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
import { useContext } from "react";
|
||||
import { Navigate } from "react-router-dom";
|
||||
import { AuthContext } from "../../contexts/authContext";
|
||||
|
||||
export const ProtectedRoute = ({ children }) => {
|
||||
const { isAuthenticated, logout, getAuthentication } =
|
||||
useContext(AuthContext);
|
||||
if (!isAuthenticated && !getAuthentication()) {
|
||||
logout();
|
||||
return <Navigate to="/login" replace />;
|
||||
}
|
||||
|
||||
return children;
|
||||
};
|
||||
19
src/frontend/src/components/authLoginGuard/index.tsx
Normal file
19
src/frontend/src/components/authLoginGuard/index.tsx
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
import { useContext } from "react";
|
||||
import { Navigate } from "react-router-dom";
|
||||
import { AuthContext } from "../../contexts/authContext";
|
||||
|
||||
export const ProtectedLoginRoute = ({ children }) => {
|
||||
const { getAuthentication, autoLogin } = useContext(AuthContext);
|
||||
|
||||
if (autoLogin === true) {
|
||||
window.location.replace("/");
|
||||
return <Navigate to="/" replace />;
|
||||
}
|
||||
|
||||
if (getAuthentication()) {
|
||||
window.location.replace("/");
|
||||
return <Navigate to="/" replace />;
|
||||
}
|
||||
|
||||
return children;
|
||||
};
|
||||
13
src/frontend/src/components/catchAllRoutes/index.tsx
Normal file
13
src/frontend/src/components/catchAllRoutes/index.tsx
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
import { useEffect } from "react";
|
||||
import { useNavigate } from "react-router-dom";
|
||||
|
||||
export const CatchAllRoute = () => {
|
||||
const navigate = useNavigate();
|
||||
|
||||
// Redirect to the root ("/") when the catch-all route is matched
|
||||
useEffect(() => {
|
||||
navigate("/");
|
||||
}, []);
|
||||
|
||||
return null;
|
||||
};
|
||||
|
|
@ -1,17 +1,19 @@
|
|||
import { forwardRef } from "react";
|
||||
import { IconComponentProps } from "../../types/components";
|
||||
import { nodeIconsLucide } from "../../utils/styleUtils";
|
||||
|
||||
export default function IconComponent({
|
||||
name,
|
||||
className,
|
||||
iconColor,
|
||||
}: IconComponentProps): JSX.Element {
|
||||
const TargetIcon = nodeIconsLucide[name] ?? nodeIconsLucide["unknown"];
|
||||
return (
|
||||
<TargetIcon
|
||||
className={className}
|
||||
style={{ color: iconColor }}
|
||||
stroke-width={1.5}
|
||||
/>
|
||||
);
|
||||
}
|
||||
const ForwardedIconComponent = forwardRef(
|
||||
({ name, className, iconColor }: IconComponentProps, ref) => {
|
||||
const TargetIcon = nodeIconsLucide[name] ?? nodeIconsLucide["unknown"];
|
||||
return (
|
||||
<TargetIcon
|
||||
strokeWidth={1.5}
|
||||
className={className}
|
||||
style={iconColor ? { color: iconColor } : {}}
|
||||
ref={ref}
|
||||
/>
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
export default ForwardedIconComponent;
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
import { useContext, useEffect, useState } from "react";
|
||||
import { useContext } from "react";
|
||||
import { FaDiscord, FaGithub, FaTwitter } from "react-icons/fa";
|
||||
import { Link, useLocation } from "react-router-dom";
|
||||
import { Link, useLocation, useNavigate } from "react-router-dom";
|
||||
import AlertDropdown from "../../alerts/alertDropDown";
|
||||
import { USER_PROJECTS_HEADER } from "../../constants/constants";
|
||||
import { alertContext } from "../../contexts/alertContext";
|
||||
import { AuthContext } from "../../contexts/authContext";
|
||||
import { darkContext } from "../../contexts/darkContext";
|
||||
import { TabsContext } from "../../contexts/tabsContext";
|
||||
import { getRepoStars } from "../../controllers/API";
|
||||
import IconComponent from "../genericIconComponent";
|
||||
import { Button } from "../ui/button";
|
||||
import { Separator } from "../ui/separator";
|
||||
|
|
@ -17,29 +17,54 @@ export default function Header(): JSX.Element {
|
|||
const { dark, setDark } = useContext(darkContext);
|
||||
const { notificationCenter } = useContext(alertContext);
|
||||
const location = useLocation();
|
||||
const { logout, autoLogin, isAdmin } = useContext(AuthContext);
|
||||
const { stars } = useContext(darkContext);
|
||||
const navigate = useNavigate();
|
||||
|
||||
const [stars, setStars] = useState(null);
|
||||
|
||||
// Get and set numbers of stars on header
|
||||
useEffect(() => {
|
||||
async function fetchStars() {
|
||||
const starsCount = await getRepoStars("logspace-ai", "langflow");
|
||||
setStars(starsCount);
|
||||
}
|
||||
fetchStars();
|
||||
}, []);
|
||||
return (
|
||||
<div className="header-arrangement">
|
||||
<div className="header-start-display">
|
||||
<Link to="/">
|
||||
<span className="ml-4 text-2xl">⛓️</span>
|
||||
</Link>
|
||||
<Button variant="outline" className="">
|
||||
Sign out
|
||||
</Button>
|
||||
|
||||
{flows.findIndex((f) => tabId === f.id) !== -1 && tabId !== "" && (
|
||||
<MenuBar flows={flows} tabId={tabId} />
|
||||
)}
|
||||
{!autoLogin && location.pathname !== `/flow/${tabId}` && (
|
||||
<a
|
||||
onClick={() => {
|
||||
logout();
|
||||
navigate("/login");
|
||||
}}
|
||||
className="text-sm font-medium text-muted-foreground transition-colors hover:text-primary cursor-pointer mx-5"
|
||||
>
|
||||
Sign out
|
||||
</a>
|
||||
)}
|
||||
|
||||
{location.pathname === "/admin" && (
|
||||
<a
|
||||
onClick={() => {
|
||||
navigate("/");
|
||||
}}
|
||||
className="text-sm font-medium text-muted-foreground transition-colors hover:text-primary cursor-pointer"
|
||||
>
|
||||
Home
|
||||
</a>
|
||||
)}
|
||||
|
||||
{isAdmin &&
|
||||
!autoLogin &&
|
||||
location.pathname !== "/admin" &&
|
||||
location.pathname !== `/flow/${tabId}` && (
|
||||
<a
|
||||
className="text-sm font-medium text-muted-foreground transition-colors hover:text-primary cursor-pointer"
|
||||
onClick={() => navigate("/admin")}
|
||||
>
|
||||
Admin page
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
<div className="round-button-div">
|
||||
<Link to="/">
|
||||
|
|
@ -119,6 +144,18 @@ export default function Header(): JSX.Element {
|
|||
/>
|
||||
</div>
|
||||
</AlertDropdown>
|
||||
{!autoLogin && (
|
||||
<button
|
||||
onClick={() => {
|
||||
navigate("/account/api-keys");
|
||||
}}
|
||||
>
|
||||
<IconComponent
|
||||
name="Key"
|
||||
className="side-bar-button-size text-muted-foreground hover:text-accent-foreground"
|
||||
/>
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -81,7 +81,8 @@ export default function InputComponent({
|
|||
? "input-component-true-button"
|
||||
: "input-component-false-button"
|
||||
)}
|
||||
onClick={() => {
|
||||
onClick={(event) => {
|
||||
event.preventDefault();
|
||||
setPwdVisible(!pwdVisible);
|
||||
}}
|
||||
>
|
||||
|
|
|
|||
|
|
@ -508,6 +508,7 @@ export const URL_EXCLUDED_FROM_ERROR_RETRIES = [
|
|||
"/api/v1/validate/code",
|
||||
"/api/v1/custom_component",
|
||||
"/api/v1/validate/prompt",
|
||||
"http://localhost:7860/login",
|
||||
];
|
||||
|
||||
export const skipNodeUpdate = ["CustomComponent"];
|
||||
|
|
@ -522,6 +523,18 @@ export const CONTROL_LOGIN_STATE = {
|
|||
username: "",
|
||||
password: "",
|
||||
};
|
||||
|
||||
export const CONTROL_NEW_USER = {
|
||||
username: "",
|
||||
password: "",
|
||||
is_active: false,
|
||||
is_superuser: false,
|
||||
};
|
||||
|
||||
export const CONTROL_NEW_API_KEY = {
|
||||
apikeyname: "",
|
||||
};
|
||||
|
||||
export const tabsCode = [];
|
||||
|
||||
export function tabsArray(codes: string[], method: number) {
|
||||
|
|
@ -605,3 +618,21 @@ export function tabsArray(codes: string[], method: number) {
|
|||
export const FETCH_ERROR_MESSAGE = "Couldn't establish a connection.";
|
||||
export const FETCH_ERROR_DESCRIPION =
|
||||
"Check if everything is working properly and try again.";
|
||||
|
||||
export const BASE_URL_API = "/api/v1/";
|
||||
|
||||
export const SIGN_UP_SUCCESS = "Account created! Await admin activation. ";
|
||||
|
||||
export const API_PAGE_PARAGRAPH_1 =
|
||||
"Your secret API keys are listed below. Please note that we do not display your secret API keys again after you generate them.";
|
||||
|
||||
export const API_PAGE_PARAGRAPH_2 =
|
||||
"Do not share your API key with others, or expose it in the browser or other client-side code.";
|
||||
|
||||
export const API_PAGE_USER_KEYS =
|
||||
"This user does not have any keys assigned at the moment.";
|
||||
|
||||
export const LAST_USED_SPAN_1 = "The last time this key was used.";
|
||||
|
||||
export const LAST_USED_SPAN_2 =
|
||||
"Accurate to within the hour from the most recent usage.";
|
||||
|
|
|
|||
|
|
@ -1,74 +1,120 @@
|
|||
import { createContext, useEffect, useState } from "react";
|
||||
import { AuthContextType, userData } from "../types/contexts/auth";
|
||||
import { createContext, useContext, useEffect, useState } from "react";
|
||||
import Cookies from "universal-cookie";
|
||||
import { autoLogin as autoLoginApi, getLoggedUser } from "../controllers/API";
|
||||
import { Users } from "../types/api";
|
||||
import { AuthContextType } from "../types/contexts/auth";
|
||||
import { alertContext } from "./alertContext";
|
||||
|
||||
const initialValue: AuthContextType = {
|
||||
isAdmin: false,
|
||||
setIsAdmin: () => false,
|
||||
isAuthenticated: false,
|
||||
accessToken: null,
|
||||
refreshToken: null,
|
||||
login: () => {},
|
||||
logout: () => {},
|
||||
refreshAccessToken: () => Promise.resolve(),
|
||||
userData: null,
|
||||
setUserData: () => {},
|
||||
getAuthentication: () => false,
|
||||
authenticationErrorCount: 0,
|
||||
autoLogin: false,
|
||||
setAutoLogin: () => {},
|
||||
};
|
||||
|
||||
const AuthContext = createContext<AuthContextType>(initialValue);
|
||||
export const AuthContext = createContext<AuthContextType>(initialValue);
|
||||
|
||||
export function AuthProvider({ children }): React.ReactElement {
|
||||
const [accessToken, setAccessToken] = useState<string | null>(null);
|
||||
const [userData, setUserData] = useState<userData | null>(null);
|
||||
|
||||
const cookies = new Cookies();
|
||||
const [accessToken, setAccessToken] = useState<string | null>(
|
||||
cookies.get("access_token")
|
||||
);
|
||||
const [refreshToken, setRefreshToken] = useState<string | null>(
|
||||
cookies.get("refresh_token")
|
||||
);
|
||||
const [isAuthenticated, setIsAuthenticated] = useState<boolean>(false);
|
||||
const [isAdmin, setIsAdmin] = useState<boolean>(false);
|
||||
const [userData, setUserData] = useState<Users | null>(null);
|
||||
const [autoLogin, setAutoLogin] = useState<boolean>(false);
|
||||
const { setLoading } = useContext(alertContext);
|
||||
useEffect(() => {
|
||||
const storedAccessToken = localStorage.getItem("access_token");
|
||||
const storedAccessToken = cookies.get("access_token");
|
||||
if (storedAccessToken) {
|
||||
setAccessToken(storedAccessToken);
|
||||
}
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
const isLoginPage = location.pathname.includes("login");
|
||||
|
||||
autoLoginApi()
|
||||
.then((user) => {
|
||||
if (user && user["access_token"]) {
|
||||
user["refresh_token"] = "auto";
|
||||
login(user["access_token"], user["refresh_token"]);
|
||||
setUserData(user);
|
||||
setAutoLogin(true);
|
||||
setLoading(false);
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
setAutoLogin(false);
|
||||
if (getAuthentication() && !isLoginPage) {
|
||||
getLoggedUser()
|
||||
.then((user) => {
|
||||
setUserData(user);
|
||||
setLoading(false);
|
||||
const isSuperUser = user.is_superuser;
|
||||
setIsAdmin(isSuperUser);
|
||||
})
|
||||
.catch((error) => {});
|
||||
} else {
|
||||
setLoading(false);
|
||||
}
|
||||
});
|
||||
}, []);
|
||||
|
||||
function getAuthentication() {
|
||||
const storedRefreshToken = cookies.get("refresh_token");
|
||||
const storedAccess = cookies.get("access_token");
|
||||
const auth = storedAccess && storedRefreshToken ? true : false;
|
||||
return auth;
|
||||
}
|
||||
|
||||
function login(newAccessToken: string, refreshToken: string) {
|
||||
localStorage.setItem("access_token", newAccessToken);
|
||||
cookies.set("access_token", newAccessToken, { path: "/" });
|
||||
cookies.set("refresh_token", refreshToken, { path: "/" });
|
||||
setAccessToken(newAccessToken);
|
||||
// Store refreshToken if needed
|
||||
setRefreshToken(refreshToken);
|
||||
setIsAuthenticated(true);
|
||||
}
|
||||
|
||||
function logout() {
|
||||
localStorage.removeItem("access_token");
|
||||
// Clear refreshToken if used
|
||||
cookies.remove("access_token", { path: "/" });
|
||||
cookies.remove("refresh_token", { path: "/" });
|
||||
setIsAdmin(false);
|
||||
setUserData(null);
|
||||
setAccessToken(null);
|
||||
}
|
||||
|
||||
async function refreshAccessToken(refreshToken: string) {
|
||||
try {
|
||||
// Call your API to refresh the access token using the refresh token
|
||||
const response = await fetch("/api/refresh-token", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({ refreshToken }),
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
const data = await response.json();
|
||||
login(data.accessToken, refreshToken);
|
||||
} else {
|
||||
logout();
|
||||
}
|
||||
} catch (error) {
|
||||
logout();
|
||||
}
|
||||
setRefreshToken(null);
|
||||
setIsAuthenticated(false);
|
||||
}
|
||||
|
||||
return (
|
||||
// !! to convert string to boolean
|
||||
<AuthContext.Provider
|
||||
value={{
|
||||
isAdmin,
|
||||
setIsAdmin,
|
||||
isAuthenticated: !!accessToken,
|
||||
accessToken,
|
||||
refreshToken,
|
||||
login,
|
||||
logout,
|
||||
refreshAccessToken,
|
||||
setUserData,
|
||||
userData,
|
||||
getAuthentication,
|
||||
authenticationErrorCount: 0,
|
||||
setAutoLogin,
|
||||
autoLogin,
|
||||
}}
|
||||
>
|
||||
{children}
|
||||
|
|
|
|||
|
|
@ -1,9 +1,12 @@
|
|||
import { createContext, useEffect, useState } from "react";
|
||||
import { getRepoStars } from "../controllers/API";
|
||||
import { darkContextType } from "../types/typesContext";
|
||||
|
||||
const initialValue = {
|
||||
dark: {},
|
||||
setDark: () => {},
|
||||
stars: 0,
|
||||
setStars: (stars) => 0,
|
||||
};
|
||||
|
||||
export const darkContext = createContext<darkContextType>(initialValue);
|
||||
|
|
@ -12,6 +15,16 @@ export function DarkProvider({ children }) {
|
|||
const [dark, setDark] = useState(
|
||||
JSON.parse(window.localStorage.getItem("isDark")!) ?? false
|
||||
);
|
||||
const [stars, setStars] = useState<number>(0);
|
||||
|
||||
useEffect(() => {
|
||||
async function fetchStars() {
|
||||
const starsCount = await getRepoStars("logspace-ai", "langflow");
|
||||
setStars(starsCount);
|
||||
}
|
||||
fetchStars();
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (dark) {
|
||||
document.getElementById("body")!.classList.add("dark");
|
||||
|
|
@ -20,9 +33,12 @@ export function DarkProvider({ children }) {
|
|||
}
|
||||
window.localStorage.setItem("isDark", dark.toString());
|
||||
}, [dark]);
|
||||
|
||||
return (
|
||||
<darkContext.Provider
|
||||
value={{
|
||||
setStars,
|
||||
stars,
|
||||
dark,
|
||||
setDark,
|
||||
}}
|
||||
|
|
|
|||
|
|
@ -1,8 +1,11 @@
|
|||
import { ReactNode } from "react";
|
||||
import { BrowserRouter } from "react-router-dom";
|
||||
import { ReactFlowProvider } from "reactflow";
|
||||
import { TooltipProvider } from "../components/ui/tooltip";
|
||||
import { ApiInterceptor } from "../controllers/API/api";
|
||||
import { SSEProvider } from "./SSEContext";
|
||||
import { AlertProvider } from "./alertContext";
|
||||
import { AuthProvider } from "./authContext";
|
||||
import { DarkProvider } from "./darkContext";
|
||||
import { LocationProvider } from "./locationContext";
|
||||
import { TabsProvider } from "./tabsContext";
|
||||
|
|
@ -13,23 +16,28 @@ export default function ContextWrapper({ children }: { children: ReactNode }) {
|
|||
//element to wrap all context
|
||||
return (
|
||||
<>
|
||||
<TooltipProvider>
|
||||
<ReactFlowProvider>
|
||||
<DarkProvider>
|
||||
<AlertProvider>
|
||||
<TypesProvider>
|
||||
<LocationProvider>
|
||||
<SSEProvider>
|
||||
<TabsProvider>
|
||||
<UndoRedoProvider>{children}</UndoRedoProvider>
|
||||
</TabsProvider>
|
||||
</SSEProvider>
|
||||
</LocationProvider>
|
||||
</TypesProvider>
|
||||
</AlertProvider>
|
||||
</DarkProvider>
|
||||
</ReactFlowProvider>
|
||||
</TooltipProvider>
|
||||
<BrowserRouter>
|
||||
<AlertProvider>
|
||||
<AuthProvider>
|
||||
<TooltipProvider>
|
||||
<ReactFlowProvider>
|
||||
<DarkProvider>
|
||||
<TypesProvider>
|
||||
<LocationProvider>
|
||||
<ApiInterceptor />
|
||||
<SSEProvider>
|
||||
<TabsProvider>
|
||||
<UndoRedoProvider>{children}</UndoRedoProvider>
|
||||
</TabsProvider>
|
||||
</SSEProvider>
|
||||
</LocationProvider>
|
||||
</TypesProvider>
|
||||
</DarkProvider>
|
||||
</ReactFlowProvider>
|
||||
</TooltipProvider>
|
||||
</AuthProvider>
|
||||
</AlertProvider>
|
||||
</BrowserRouter>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import { AxiosError } from "axios";
|
||||
import _ from "lodash";
|
||||
import {
|
||||
ReactNode,
|
||||
|
|
@ -21,7 +22,7 @@ import {
|
|||
import { APIClassType, APITemplateType } from "../types/api";
|
||||
import { tweakType } from "../types/components";
|
||||
import { FlowType, NodeDataType, NodeType } from "../types/flow";
|
||||
import { TabsContextType, TabsState, errorsVarType } from "../types/tabs";
|
||||
import { TabsContextType, TabsState } from "../types/tabs";
|
||||
import {
|
||||
addVersionToDuplicates,
|
||||
updateIds,
|
||||
|
|
@ -29,6 +30,7 @@ import {
|
|||
} from "../utils/reactflowUtils";
|
||||
import { getRandomDescription, getRandomName } from "../utils/utils";
|
||||
import { alertContext } from "./alertContext";
|
||||
import { AuthContext } from "./authContext";
|
||||
import { typesContext } from "./typesContext";
|
||||
|
||||
const uid = new ShortUniqueId({ length: 5 });
|
||||
|
|
@ -68,7 +70,9 @@ export const TabsContext = createContext<TabsContextType>(
|
|||
);
|
||||
|
||||
export function TabsProvider({ children }: { children: ReactNode }) {
|
||||
const { setErrorData, setNoticeData } = useContext(alertContext);
|
||||
const { setErrorData, setNoticeData, setSuccessData } =
|
||||
useContext(alertContext);
|
||||
const { getAuthentication } = useContext(AuthContext);
|
||||
|
||||
const [tabId, setTabId] = useState("");
|
||||
|
||||
|
|
@ -117,24 +121,26 @@ export function TabsProvider({ children }: { children: ReactNode }) {
|
|||
try {
|
||||
processDBData(DbData);
|
||||
updateStateWithDbData(DbData);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
} catch (e) {}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
// get data from db
|
||||
//get tabs locally saved
|
||||
// let tabsData = getLocalStorageTabsData();
|
||||
refreshFlows();
|
||||
}, [templates]);
|
||||
// If the user is authenticated, fetch the types. This code is important to check if the user is auth because of the execution order of the useEffect hooks.
|
||||
if (getAuthentication() === true) {
|
||||
// get data from db
|
||||
//get tabs locally saved
|
||||
// let tabsData = getLocalStorageTabsData();
|
||||
refreshFlows();
|
||||
}
|
||||
}, [templates, getAuthentication()]);
|
||||
|
||||
function getTabsDataFromDB() {
|
||||
//get tabs from db
|
||||
return readFlowsFromDatabase();
|
||||
}
|
||||
|
||||
function processDBData(DbData: FlowType[]) {
|
||||
DbData.forEach((flow: FlowType) => {
|
||||
try {
|
||||
|
|
@ -143,9 +149,7 @@ export function TabsProvider({ children }: { children: ReactNode }) {
|
|||
}
|
||||
processFlowEdges(flow);
|
||||
processFlowNodes(flow);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
} catch (e) {}
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -478,7 +482,6 @@ export function TabsProvider({ children }: { children: ReactNode }) {
|
|||
return id;
|
||||
} catch (error) {
|
||||
// Handle the error if needed
|
||||
console.error("Error while adding flow:", error);
|
||||
throw error; // Re-throw the error so the caller can handle it if needed
|
||||
}
|
||||
} else {
|
||||
|
|
@ -579,6 +582,7 @@ export function TabsProvider({ children }: { children: ReactNode }) {
|
|||
const updatedFlow = await updateFlowInDatabase(newFlow);
|
||||
if (updatedFlow) {
|
||||
// updates flow in state
|
||||
setSuccessData({ title: "Changes saved successfully" });
|
||||
setFlows((prevState) => {
|
||||
const newFlows = [...prevState];
|
||||
const index = newFlows.findIndex((flow) => flow.id === newFlow.id);
|
||||
|
|
@ -601,7 +605,10 @@ export function TabsProvider({ children }: { children: ReactNode }) {
|
|||
});
|
||||
}
|
||||
} catch (err) {
|
||||
setErrorData(err as errorsVarType);
|
||||
setErrorData({
|
||||
title: "Error while saving changes",
|
||||
list: [(err as AxiosError).message],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ import { getAll, getHealth } from "../controllers/API";
|
|||
import { APIKindType } from "../types/api";
|
||||
import { typesContextType } from "../types/typesContext";
|
||||
import { alertContext } from "./alertContext";
|
||||
import { AuthContext } from "./authContext";
|
||||
|
||||
//context to share types adn functions from nodes to flow
|
||||
|
||||
|
|
@ -37,55 +38,56 @@ export function TypesProvider({ children }: { children: ReactNode }) {
|
|||
const [data, setData] = useState({});
|
||||
const [fetchError, setFetchError] = useState(false);
|
||||
const { setLoading } = useContext(alertContext);
|
||||
const { getAuthentication } = useContext(AuthContext);
|
||||
|
||||
useEffect(() => {
|
||||
// If the user is authenticated, fetch the types. This code is important to check if the user is auth because of the execution order of the useEffect hooks.
|
||||
if (getAuthentication() === true) {
|
||||
getTypes();
|
||||
}
|
||||
}, [getAuthentication()]);
|
||||
|
||||
async function getTypes(): Promise<void> {
|
||||
// We will keep a flag to handle the case where the component is unmounted before the API call resolves.
|
||||
let isMounted = true;
|
||||
|
||||
async function getTypes(): Promise<void> {
|
||||
try {
|
||||
const result = await getAll();
|
||||
// Make sure to only update the state if the component is still mounted.
|
||||
if (isMounted && result?.status === 200) {
|
||||
setLoading(false);
|
||||
setData(result.data);
|
||||
setTemplates(
|
||||
Object.keys(result.data).reduce((acc, curr) => {
|
||||
try {
|
||||
const result = await getAll();
|
||||
// Make sure to only update the state if the component is still mounted.
|
||||
if (isMounted && result?.status === 200) {
|
||||
setLoading(false);
|
||||
setData(result.data);
|
||||
setTemplates(
|
||||
Object.keys(result.data).reduce((acc, curr) => {
|
||||
Object.keys(result.data[curr]).forEach((c: keyof APIKindType) => {
|
||||
acc[c] = result.data[curr][c];
|
||||
});
|
||||
return acc;
|
||||
}, {})
|
||||
);
|
||||
// Set the types by reducing over the keys of the result data and updating the accumulator.
|
||||
setTypes(
|
||||
// Reverse the keys so the tool world does not overlap
|
||||
Object.keys(result.data)
|
||||
.reverse()
|
||||
.reduce((acc, curr) => {
|
||||
Object.keys(result.data[curr]).forEach((c: keyof APIKindType) => {
|
||||
acc[c] = result.data[curr][c];
|
||||
acc[c] = curr;
|
||||
// Add the base classes to the accumulator as well.
|
||||
result.data[curr][c].base_classes?.forEach((b) => {
|
||||
acc[b] = curr;
|
||||
});
|
||||
});
|
||||
return acc;
|
||||
}, {})
|
||||
);
|
||||
// Set the types by reducing over the keys of the result data and updating the accumulator.
|
||||
setTypes(
|
||||
// Reverse the keys so the tool world does not overlap
|
||||
Object.keys(result.data)
|
||||
.reverse()
|
||||
.reduce((acc, curr) => {
|
||||
Object.keys(result.data[curr]).forEach(
|
||||
(c: keyof APIKindType) => {
|
||||
acc[c] = curr;
|
||||
// Add the base classes to the accumulator as well.
|
||||
result.data[curr][c].base_classes?.forEach((b) => {
|
||||
acc[b] = curr;
|
||||
});
|
||||
}
|
||||
);
|
||||
return acc;
|
||||
}, {})
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("An error has occurred while fetching types.");
|
||||
await getHealth().catch((e) => {
|
||||
setFetchError(true);
|
||||
});
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("An error has occurred while fetching types.");
|
||||
await getHealth().catch((e) => {
|
||||
setFetchError(true);
|
||||
});
|
||||
}
|
||||
|
||||
getTypes();
|
||||
}, []);
|
||||
}
|
||||
|
||||
function deleteNode(idx: string) {
|
||||
reactFlowInstance!.setNodes(
|
||||
|
|
|
|||
|
|
@ -1,60 +1,117 @@
|
|||
import axios, { AxiosError, AxiosInstance } from "axios";
|
||||
import { useContext, useEffect, useRef } from "react";
|
||||
import { useContext, useEffect } from "react";
|
||||
import { Cookies } from "react-cookie";
|
||||
import { useNavigate } from "react-router-dom";
|
||||
import { renewAccessToken } from ".";
|
||||
import { alertContext } from "../../contexts/alertContext";
|
||||
import { AuthContext } from "../../contexts/authContext";
|
||||
|
||||
// Create a new Axios instance
|
||||
const api: AxiosInstance = axios.create({
|
||||
baseURL: "",
|
||||
});
|
||||
|
||||
function ApiInterceptor(): null {
|
||||
const retryCounts = useRef([]);
|
||||
function ApiInterceptor() {
|
||||
const { setErrorData } = useContext(alertContext);
|
||||
let { accessToken, login, logout, authenticationErrorCount } =
|
||||
useContext(AuthContext);
|
||||
const navigate = useNavigate();
|
||||
const cookies = new Cookies();
|
||||
|
||||
useEffect(() => {
|
||||
const interceptor = api.interceptors.response.use(
|
||||
(response) => response,
|
||||
async (error: AxiosError) => {
|
||||
// if (URL_EXCLUDED_FROM_ERROR_RETRIES.includes(error.config?.url)) {
|
||||
// return Promise.reject(error);
|
||||
// }
|
||||
// let retryCount = 0;
|
||||
// while (retryCount < 4) {
|
||||
// await sleep(5000); // Sleep for 5 seconds
|
||||
// retryCount++;
|
||||
// try {
|
||||
// const response = await axios.request(error.config);
|
||||
// return response;
|
||||
// } catch (error) {
|
||||
// if (retryCount === 3) {
|
||||
// setErrorData({
|
||||
// title: "There was an error on web connection, please: ",
|
||||
// list: [
|
||||
// "Refresh the page",
|
||||
// "Use a new flow tab",
|
||||
// "Check if the backend is up",
|
||||
// "Endpoint: " + error.config?.url,
|
||||
// ],
|
||||
// });
|
||||
// return Promise.reject(error);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
if (error.response?.status === 401) {
|
||||
const refreshToken = cookies.get("refresh_token");
|
||||
if (refreshToken && refreshToken !== "auto") {
|
||||
authenticationErrorCount = authenticationErrorCount + 1;
|
||||
if (authenticationErrorCount > 3) {
|
||||
authenticationErrorCount = 0;
|
||||
logout();
|
||||
navigate("/login");
|
||||
}
|
||||
|
||||
const res = await renewAccessToken(refreshToken);
|
||||
login(res.data.access_token, res.data.refresh_token);
|
||||
try {
|
||||
if (error?.config?.headers) {
|
||||
delete error.config.headers["Authorization"];
|
||||
error.config.headers["Authorization"] = `Bearer ${accessToken}`;
|
||||
const response = await axios.request(error.config);
|
||||
return response;
|
||||
}
|
||||
} catch (error) {
|
||||
if (axios.isAxiosError(error) && error.response?.status === 401) {
|
||||
logout();
|
||||
navigate("/login");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!refreshToken && error?.config?.url?.includes("login")) {
|
||||
return Promise.reject(error);
|
||||
} else {
|
||||
logout();
|
||||
navigate("/login");
|
||||
}
|
||||
} else {
|
||||
// if (URL_EXCLUDED_FROM_ERROR_RETRIES.includes(error.config?.url)) {
|
||||
return Promise.reject(error);
|
||||
// }
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const isAuthorizedURL = (url) => {
|
||||
const authorizedDomains = [
|
||||
"https://raw.githubusercontent.com/logspace-ai/langflow_examples/main/examples",
|
||||
"https://api.github.com/repos/logspace-ai/langflow_examples/contents/examples",
|
||||
"https://api.github.com/repos/logspace-ai/langflow",
|
||||
"auto_login",
|
||||
];
|
||||
|
||||
const authorizedEndpoints = ["auto_login"];
|
||||
|
||||
try {
|
||||
const parsedURL = new URL(url);
|
||||
|
||||
const isDomainAllowed = authorizedDomains.some(
|
||||
(domain) => parsedURL.origin === new URL(domain).origin
|
||||
);
|
||||
const isEndpointAllowed = authorizedEndpoints.some((endpoint) =>
|
||||
parsedURL.pathname.includes(endpoint)
|
||||
);
|
||||
|
||||
return isDomainAllowed || isEndpointAllowed;
|
||||
} catch (e) {
|
||||
// Invalid URL
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
// Request interceptor to add access token to every request
|
||||
const requestInterceptor = api.interceptors.request.use(
|
||||
(config) => {
|
||||
if (accessToken && !isAuthorizedURL(config?.url)) {
|
||||
config.headers["Authorization"] = `Bearer ${accessToken}`;
|
||||
}
|
||||
|
||||
return config;
|
||||
},
|
||||
(error) => {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
);
|
||||
|
||||
return () => {
|
||||
// Clean up the interceptor when the component unmounts
|
||||
// Clean up the interceptors when the component unmounts
|
||||
api.interceptors.response.eject(interceptor);
|
||||
api.interceptors.request.eject(requestInterceptor);
|
||||
};
|
||||
}, [retryCounts]);
|
||||
}, [accessToken, setErrorData]);
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Function to sleep for a given duration in milliseconds
|
||||
function sleep(ms: number) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
export { ApiInterceptor, api };
|
||||
|
|
|
|||
|
|
@ -1,7 +1,14 @@
|
|||
import { AxiosResponse } from "axios";
|
||||
import { ReactFlowJsonObject } from "reactflow";
|
||||
import { BASE_URL_API } from "../../constants/constants";
|
||||
import { api } from "../../controllers/API/api";
|
||||
import { APIObjectType, sendAllProps } from "../../types/api/index";
|
||||
import {
|
||||
APIObjectType,
|
||||
LoginType,
|
||||
Users,
|
||||
sendAllProps,
|
||||
} from "../../types/api/index";
|
||||
import { UserInputType } from "../../types/components";
|
||||
import { FlowStyleType, FlowType } from "../../types/flow";
|
||||
import {
|
||||
APIClassType,
|
||||
|
|
@ -18,7 +25,7 @@ import {
|
|||
* @returns {Promise<AxiosResponse<APIObjectType>>} A promise that resolves to an AxiosResponse containing all the objects.
|
||||
*/
|
||||
export async function getAll(): Promise<AxiosResponse<APIObjectType>> {
|
||||
return await api.get(`/api/v1/all`);
|
||||
return await api.get(`${BASE_URL_API}all`);
|
||||
}
|
||||
|
||||
const GITHUB_API_URL = "https://api.github.com";
|
||||
|
|
@ -40,13 +47,13 @@ export async function getRepoStars(owner: string, repo: string) {
|
|||
* @returns {AxiosResponse<any>} The API response.
|
||||
*/
|
||||
export async function sendAll(data: sendAllProps) {
|
||||
return await api.post(`/api/v1/predict`, data);
|
||||
return await api.post(`${BASE_URL_API}predict`, data);
|
||||
}
|
||||
|
||||
export async function postValidateCode(
|
||||
code: string
|
||||
): Promise<AxiosResponse<errorsTypeAPI>> {
|
||||
return await api.post("/api/v1/validate/code", { code });
|
||||
return await api.post(`${BASE_URL_API}validate/code`, { code });
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -61,7 +68,7 @@ export async function postValidatePrompt(
|
|||
template: string,
|
||||
frontend_node: APIClassType
|
||||
): Promise<AxiosResponse<PromptTypeAPI>> {
|
||||
return await api.post("/api/v1/validate/prompt", {
|
||||
return await api.post(`${BASE_URL_API}validate/prompt`, {
|
||||
name: name,
|
||||
template: template,
|
||||
frontend_node: frontend_node,
|
||||
|
|
@ -105,7 +112,7 @@ export async function saveFlowToDatabase(newFlow: {
|
|||
style?: FlowStyleType;
|
||||
}): Promise<FlowType> {
|
||||
try {
|
||||
const response = await api.post("/api/v1/flows/", {
|
||||
const response = await api.post(`${BASE_URL_API}flows/`, {
|
||||
name: newFlow.name,
|
||||
data: newFlow.data,
|
||||
description: newFlow.description,
|
||||
|
|
@ -131,7 +138,7 @@ export async function updateFlowInDatabase(
|
|||
updatedFlow: FlowType
|
||||
): Promise<FlowType> {
|
||||
try {
|
||||
const response = await api.patch(`/api/v1/flows/${updatedFlow.id}`, {
|
||||
const response = await api.patch(`${BASE_URL_API}flows/${updatedFlow.id}`, {
|
||||
name: updatedFlow.name,
|
||||
data: updatedFlow.data,
|
||||
description: updatedFlow.description,
|
||||
|
|
@ -155,8 +162,8 @@ export async function updateFlowInDatabase(
|
|||
*/
|
||||
export async function readFlowsFromDatabase() {
|
||||
try {
|
||||
const response = await api.get("/api/v1/flows/");
|
||||
if (response.status !== 200) {
|
||||
const response = await api.get(`${BASE_URL_API}flows/`);
|
||||
if (response?.status !== 200) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
return response.data;
|
||||
|
|
@ -168,8 +175,8 @@ export async function readFlowsFromDatabase() {
|
|||
|
||||
export async function downloadFlowsFromDatabase() {
|
||||
try {
|
||||
const response = await api.get("/api/v1/flows/download/");
|
||||
if (response.status !== 200) {
|
||||
const response = await api.get(`${BASE_URL_API}flows/download/`);
|
||||
if (response?.status !== 200) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
return response.data;
|
||||
|
|
@ -181,7 +188,7 @@ export async function downloadFlowsFromDatabase() {
|
|||
|
||||
export async function uploadFlowsToDatabase(flows: FormData) {
|
||||
try {
|
||||
const response = await api.post(`/api/v1/flows/upload/`, flows);
|
||||
const response = await api.post(`${BASE_URL_API}flows/upload/`, flows);
|
||||
|
||||
if (response.status !== 201) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
|
|
@ -202,7 +209,7 @@ export async function uploadFlowsToDatabase(flows: FormData) {
|
|||
*/
|
||||
export async function deleteFlowFromDatabase(flowId: string) {
|
||||
try {
|
||||
const response = await api.delete(`/api/v1/flows/${flowId}`);
|
||||
const response = await api.delete(`${BASE_URL_API}flows/${flowId}`);
|
||||
if (response.status !== 200) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
|
|
@ -222,7 +229,7 @@ export async function deleteFlowFromDatabase(flowId: string) {
|
|||
*/
|
||||
export async function getFlowFromDatabase(flowId: number) {
|
||||
try {
|
||||
const response = await api.get(`/api/v1/flows/${flowId}`);
|
||||
const response = await api.get(`${BASE_URL_API}flows/${flowId}`);
|
||||
if (response.status !== 200) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
|
|
@ -241,7 +248,7 @@ export async function getFlowFromDatabase(flowId: number) {
|
|||
*/
|
||||
export async function getFlowStylesFromDatabase() {
|
||||
try {
|
||||
const response = await api.get("/api/v1/flow_styles/");
|
||||
const response = await api.get(`${BASE_URL_API}flow_styles/`);
|
||||
if (response.status !== 200) {
|
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
}
|
||||
|
|
@ -261,7 +268,7 @@ export async function getFlowStylesFromDatabase() {
|
|||
*/
|
||||
export async function saveFlowStyleToDatabase(flowStyle: FlowStyleType) {
|
||||
try {
|
||||
const response = await api.post("/api/v1/flow_styles/", flowStyle, {
|
||||
const response = await api.post(`${BASE_URL_API}flow_styles/`, flowStyle, {
|
||||
headers: {
|
||||
accept: "application/json",
|
||||
"Content-Type": "application/json",
|
||||
|
|
@ -284,7 +291,7 @@ export async function saveFlowStyleToDatabase(flowStyle: FlowStyleType) {
|
|||
* @returns {Promise<AxiosResponse<any>>} A promise that resolves to an AxiosResponse containing the version information.
|
||||
*/
|
||||
export async function getVersion() {
|
||||
const respnose = await api.get("/api/v1/version");
|
||||
const respnose = await api.get(`${BASE_URL_API}version`);
|
||||
return respnose.data;
|
||||
}
|
||||
|
||||
|
|
@ -306,7 +313,7 @@ export async function getHealth() {
|
|||
export async function getBuildStatus(
|
||||
flowId: string
|
||||
): Promise<BuildStatusTypeAPI> {
|
||||
return await api.get(`/api/v1/build/${flowId}/status`);
|
||||
return await api.get(`${BASE_URL_API}build/${flowId}/status`);
|
||||
}
|
||||
|
||||
//docs for postbuildinit
|
||||
|
|
@ -319,7 +326,7 @@ export async function getBuildStatus(
|
|||
export async function postBuildInit(
|
||||
flow: FlowType
|
||||
): Promise<AxiosResponse<InitTypeAPI>> {
|
||||
return await api.post(`/api/v1/build/init/${flow.id}`, flow);
|
||||
return await api.post(`${BASE_URL_API}build/init/${flow.id}`, flow);
|
||||
}
|
||||
|
||||
// fetch(`/upload/${id}`, {
|
||||
|
|
@ -337,12 +344,160 @@ export async function uploadFile(
|
|||
): Promise<AxiosResponse<UploadFileTypeAPI>> {
|
||||
const formData = new FormData();
|
||||
formData.append("file", file);
|
||||
return await api.post(`/api/v1/upload/${id}`, formData);
|
||||
return await api.post(`${BASE_URL_API}upload/${id}`, formData);
|
||||
}
|
||||
|
||||
export async function postCustomComponent(
|
||||
code: string,
|
||||
apiClass: APIClassType
|
||||
): Promise<AxiosResponse<APIClassType>> {
|
||||
return await api.post(`/api/v1/custom_component`, { code });
|
||||
return await api.post(`${BASE_URL_API}custom_component`, { code });
|
||||
}
|
||||
|
||||
export async function onLogin(user: LoginType) {
|
||||
try {
|
||||
const response = await api.post(
|
||||
`${BASE_URL_API}login`,
|
||||
new URLSearchParams({
|
||||
username: user.username,
|
||||
password: user.password,
|
||||
}).toString(),
|
||||
{
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
if (response.status === 200) {
|
||||
const data = response.data;
|
||||
return data;
|
||||
}
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export async function autoLogin() {
|
||||
try {
|
||||
const response = await api.get(`${BASE_URL_API}auto_login`);
|
||||
|
||||
if (response.status === 200) {
|
||||
const data = response.data;
|
||||
return data;
|
||||
}
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export async function renewAccessToken(token: string) {
|
||||
try {
|
||||
return await api.post(`${BASE_URL_API}refresh?token=${token}`);
|
||||
} catch (error) {
|
||||
console.log("Error:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export async function getLoggedUser(): Promise<Users> {
|
||||
try {
|
||||
const res = await api.get(`${BASE_URL_API}user`);
|
||||
|
||||
if (res.status === 200) {
|
||||
return res.data;
|
||||
}
|
||||
} catch (error) {
|
||||
console.log("Error:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export async function addUser(user: UserInputType): Promise<Users> {
|
||||
try {
|
||||
const res = await api.post(`${BASE_URL_API}user`, user);
|
||||
if (res.status === 200) {
|
||||
return res.data;
|
||||
}
|
||||
} catch (error) {
|
||||
console.log("Error:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export async function getUsersPage(
|
||||
skip: number,
|
||||
limit: number
|
||||
): Promise<[Users]> {
|
||||
try {
|
||||
const res = await api.get(
|
||||
`${BASE_URL_API}users?skip=${skip}&limit=${limit}`
|
||||
);
|
||||
if (res.status === 200) {
|
||||
return res.data;
|
||||
}
|
||||
} catch (error) {
|
||||
console.log("Error:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteUser(user_id: string) {
|
||||
try {
|
||||
const res = await api.delete(`${BASE_URL_API}user/${user_id}`);
|
||||
if (res.status === 200) {
|
||||
return res.data;
|
||||
}
|
||||
} catch (error) {
|
||||
console.log("Error:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export async function updateUser(user_id: string, user: Users) {
|
||||
try {
|
||||
const res = await api.patch(`${BASE_URL_API}user/${user_id}`, user);
|
||||
if (res.status === 200) {
|
||||
return res.data;
|
||||
}
|
||||
} catch (error) {
|
||||
console.log("Error:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export async function getApiKey() {
|
||||
try {
|
||||
const res = await api.get(`${BASE_URL_API}api_key`);
|
||||
if (res.status === 200) {
|
||||
return res.data;
|
||||
}
|
||||
} catch (error) {
|
||||
console.log("Error:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export async function createApiKey(name: string) {
|
||||
try {
|
||||
const res = await api.post(`${BASE_URL_API}api_key`, { name });
|
||||
if (res.status === 200) {
|
||||
return res.data;
|
||||
}
|
||||
} catch (error) {
|
||||
console.log("Error:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteApiKey(api_key: string) {
|
||||
try {
|
||||
const res = await api.delete(`${BASE_URL_API}api_key/${api_key}`);
|
||||
if (res.status === 200) {
|
||||
return res.data;
|
||||
}
|
||||
} catch (error) {
|
||||
console.log("Error:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,10 +1,8 @@
|
|||
import ReactDOM from "react-dom/client";
|
||||
import { BrowserRouter } from "react-router-dom";
|
||||
import App from "./App";
|
||||
import ContextWrapper from "./contexts";
|
||||
import reportWebVitals from "./reportWebVitals";
|
||||
|
||||
import { ApiInterceptor } from "./controllers/API/api";
|
||||
// @ts-ignore
|
||||
import "./style/index.css";
|
||||
// @ts-ignore
|
||||
|
|
@ -17,10 +15,7 @@ const root = ReactDOM.createRoot(
|
|||
);
|
||||
root.render(
|
||||
<ContextWrapper>
|
||||
<BrowserRouter>
|
||||
<App />
|
||||
<ApiInterceptor />
|
||||
</BrowserRouter>
|
||||
<App />
|
||||
</ContextWrapper>
|
||||
);
|
||||
reportWebVitals();
|
||||
|
|
|
|||
202
src/frontend/src/modals/SecretKeyModal/index.tsx
Normal file
202
src/frontend/src/modals/SecretKeyModal/index.tsx
Normal file
|
|
@ -0,0 +1,202 @@
|
|||
import * as Form from "@radix-ui/react-form";
|
||||
import { useContext, useEffect, useRef, useState } from "react";
|
||||
import IconComponent from "../../components/genericIconComponent";
|
||||
import { Button } from "../../components/ui/button";
|
||||
import { Input } from "../../components/ui/input";
|
||||
import { CONTROL_NEW_API_KEY } from "../../constants/constants";
|
||||
import { alertContext } from "../../contexts/alertContext";
|
||||
import { createApiKey } from "../../controllers/API";
|
||||
import {
|
||||
ApiKeyInputType,
|
||||
ApiKeyType,
|
||||
inputHandlerEventType,
|
||||
} from "../../types/components";
|
||||
import { nodeIconsLucide } from "../../utils/styleUtils";
|
||||
import BaseModal from "../baseModal";
|
||||
|
||||
export default function SecretKeyModal({
|
||||
title,
|
||||
cancelText,
|
||||
confirmationText,
|
||||
children,
|
||||
icon,
|
||||
data,
|
||||
onCloseModal,
|
||||
}: ApiKeyType) {
|
||||
const Icon: any = nodeIconsLucide[icon];
|
||||
const [open, setOpen] = useState(false);
|
||||
const [apiKeyName, setApiKeyName] = useState(data?.apikeyname ?? "");
|
||||
const [apiKeyValue, setApiKeyValue] = useState("");
|
||||
const [inputState, setInputState] =
|
||||
useState<ApiKeyInputType>(CONTROL_NEW_API_KEY);
|
||||
const [renderKey, setRenderKey] = useState(false);
|
||||
const [textCopied, setTextCopied] = useState(true);
|
||||
const { setSuccessData } = useContext(alertContext);
|
||||
const inputRef = useRef<HTMLInputElement | null>(null);
|
||||
|
||||
function handleInput({
|
||||
target: { name, value },
|
||||
}: inputHandlerEventType): void {
|
||||
setInputState((prev) => ({ ...prev, [name]: value }));
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (open) {
|
||||
setRenderKey(false);
|
||||
resetForm();
|
||||
} else {
|
||||
onCloseModal();
|
||||
}
|
||||
}, [open]);
|
||||
|
||||
function resetForm() {
|
||||
setApiKeyName("");
|
||||
setApiKeyValue("");
|
||||
}
|
||||
|
||||
const handleCopyClick = async () => {
|
||||
if (apiKeyValue) {
|
||||
await navigator.clipboard.writeText(apiKeyValue);
|
||||
inputRef?.current?.focus();
|
||||
inputRef?.current?.select();
|
||||
setSuccessData({
|
||||
title: "API Key copied!",
|
||||
});
|
||||
setTextCopied(false);
|
||||
|
||||
setTimeout(() => {
|
||||
setTextCopied(true);
|
||||
}, 3000);
|
||||
}
|
||||
};
|
||||
|
||||
function handleAddNewKey() {
|
||||
createApiKey(apiKeyName)
|
||||
.then((res) => {
|
||||
setApiKeyValue(res["api_key"]);
|
||||
})
|
||||
.catch((err) => {});
|
||||
}
|
||||
|
||||
return (
|
||||
<BaseModal size="small-h-full" open={open} setOpen={setOpen}>
|
||||
<BaseModal.Trigger>{children}</BaseModal.Trigger>
|
||||
<BaseModal.Header description={""}>
|
||||
<span className="pr-2">{title}</span>
|
||||
<Icon
|
||||
name="icon"
|
||||
className="h-6 w-6 pl-1 text-foreground"
|
||||
aria-hidden="true"
|
||||
/>
|
||||
</BaseModal.Header>
|
||||
<BaseModal.Content>
|
||||
{renderKey === true && (
|
||||
<>
|
||||
<span className="text-xs">
|
||||
Please save this secret key somewhere safe and accessible. For
|
||||
security reasons,{" "}
|
||||
<strong>you won't be able to view it again</strong> through your
|
||||
account. If you lose this secret key, you'll need to generate a
|
||||
new one.
|
||||
</span>
|
||||
<div className="flex pt-3">
|
||||
<div className="w-full">
|
||||
<Input
|
||||
ref={inputRef}
|
||||
onChange={(event) => {
|
||||
setApiKeyValue(event.target.value);
|
||||
}}
|
||||
readOnly={true}
|
||||
value={apiKeyValue}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<Button
|
||||
className="ml-3"
|
||||
onClick={() => {
|
||||
handleCopyClick();
|
||||
}}
|
||||
>
|
||||
{textCopied ? (
|
||||
<IconComponent name="Copy" className="h-4 w-4" />
|
||||
) : (
|
||||
<IconComponent name="Check" className="h-4 w-4" />
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
<Form.Root
|
||||
onSubmit={(event) => {
|
||||
setRenderKey(true);
|
||||
handleAddNewKey();
|
||||
event.preventDefault();
|
||||
}}
|
||||
>
|
||||
{renderKey === false && (
|
||||
<div className="grid gap-5">
|
||||
<Form.Field name="username">
|
||||
<div
|
||||
style={{
|
||||
display: "flex",
|
||||
alignItems: "baseline",
|
||||
justifyContent: "space-between",
|
||||
}}
|
||||
>
|
||||
<Form.Label className="data-[invalid]:label-invalid">
|
||||
Name (optional){" "}
|
||||
</Form.Label>
|
||||
</div>
|
||||
<Form.Control asChild>
|
||||
<input
|
||||
onChange={({ target: { value } }) => {
|
||||
handleInput({ target: { name: "apikeyname", value } });
|
||||
setApiKeyName(value);
|
||||
}}
|
||||
value={apiKeyName}
|
||||
className="primary-input"
|
||||
placeholder="My key name"
|
||||
/>
|
||||
</Form.Control>
|
||||
</Form.Field>
|
||||
</div>
|
||||
)}
|
||||
{renderKey === false && (
|
||||
<div className="float-right">
|
||||
<Button
|
||||
className="mr-3"
|
||||
variant="outline"
|
||||
onClick={() => {
|
||||
setOpen(false);
|
||||
}}
|
||||
>
|
||||
{cancelText}
|
||||
</Button>
|
||||
|
||||
<Form.Submit asChild>
|
||||
<Button className="mt-8">{confirmationText}</Button>
|
||||
</Form.Submit>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{renderKey === true && (
|
||||
<div className="float-right">
|
||||
<Button
|
||||
onClick={() => {
|
||||
setOpen(false);
|
||||
setRenderKey(false);
|
||||
}}
|
||||
className="mt-8"
|
||||
>
|
||||
Done
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</Form.Root>
|
||||
</BaseModal.Content>
|
||||
</BaseModal>
|
||||
);
|
||||
}
|
||||
|
|
@ -1,8 +1,15 @@
|
|||
import * as Form from "@radix-ui/react-form";
|
||||
import { useEffect, useState } from "react";
|
||||
import InputComponent from "../../components/inputComponent";
|
||||
import { Eye, EyeOff } from "lucide-react";
|
||||
import { useContext, useEffect, useState } from "react";
|
||||
import { Button } from "../../components/ui/button";
|
||||
import { UserManagementType } from "../../types/components";
|
||||
import { Checkbox } from "../../components/ui/checkbox";
|
||||
import { CONTROL_NEW_USER } from "../../constants/constants";
|
||||
import { AuthContext } from "../../contexts/authContext";
|
||||
import {
|
||||
UserInputType,
|
||||
UserManagementType,
|
||||
inputHandlerEventType,
|
||||
} from "../../types/components";
|
||||
import { nodeIconsLucide } from "../../utils/styleUtils";
|
||||
import BaseModal from "../baseModal";
|
||||
|
||||
|
|
@ -18,18 +25,32 @@ export default function UserManagementModal({
|
|||
onConfirm,
|
||||
}: UserManagementType) {
|
||||
const Icon: any = nodeIconsLucide[icon];
|
||||
|
||||
const [pwdVisible, setPwdVisible] = useState(false);
|
||||
const [confirmPwdVisible, setConfirmPwdVisible] = useState(false);
|
||||
const [open, setOpen] = useState(false);
|
||||
|
||||
const [password, setPassword] = useState(data?.password ?? "");
|
||||
const [username, setUserName] = useState(data?.user ?? "");
|
||||
const [username, setUserName] = useState(data?.username ?? "");
|
||||
const [confirmPassword, setConfirmPassword] = useState(data?.password ?? "");
|
||||
const [isActive, setIsActive] = useState(data?.is_active ?? false);
|
||||
const [isSuperUser, setIsSuperUser] = useState(data?.is_superuser ?? false);
|
||||
const [inputState, setInputState] = useState<UserInputType>(CONTROL_NEW_USER);
|
||||
const { userData } = useContext(AuthContext);
|
||||
|
||||
function handleInput({
|
||||
target: { name, value },
|
||||
}: inputHandlerEventType): void {
|
||||
setInputState((prev) => ({ ...prev, [name]: value }));
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (!data) {
|
||||
resetForm();
|
||||
} else {
|
||||
handleInput({ target: { name: "username", value: username } });
|
||||
handleInput({ target: { name: "is_active", value: isActive } });
|
||||
handleInput({ target: { name: "is_superuser", value: isSuperUser } });
|
||||
}
|
||||
}, [data, open]);
|
||||
}, [open]);
|
||||
|
||||
function resetForm() {
|
||||
setPassword("");
|
||||
|
|
@ -55,10 +76,8 @@ export default function UserManagementModal({
|
|||
event.preventDefault();
|
||||
return;
|
||||
}
|
||||
|
||||
const data = Object.fromEntries(new FormData(event.currentTarget));
|
||||
resetForm();
|
||||
onConfirm(index ?? -1, data);
|
||||
onConfirm(1, inputState);
|
||||
setOpen(false);
|
||||
event.preventDefault();
|
||||
}}
|
||||
|
|
@ -79,8 +98,9 @@ export default function UserManagementModal({
|
|||
</div>
|
||||
<Form.Control asChild>
|
||||
<input
|
||||
onChange={(input) => {
|
||||
setUserName(input.target.value);
|
||||
onChange={({ target: { value } }) => {
|
||||
handleInput({ target: { name: "username", value } });
|
||||
setUserName(value);
|
||||
}}
|
||||
value={username}
|
||||
className="primary-input"
|
||||
|
|
@ -106,22 +126,40 @@ export default function UserManagementModal({
|
|||
justifyContent: "space-between",
|
||||
}}
|
||||
>
|
||||
<Form.Label className="data-[invalid]:label-invalid">
|
||||
<Form.Label className="data-[invalid]:label-invalid flex">
|
||||
Password{" "}
|
||||
<span className="font-medium text-destructive">*</span>
|
||||
<span className="ml-1 mr-1 font-medium text-destructive">
|
||||
*
|
||||
</span>
|
||||
{pwdVisible && (
|
||||
<Eye
|
||||
onClick={() => setPwdVisible(!pwdVisible)}
|
||||
className="h-5 cursor-pointer"
|
||||
strokeWidth={1.5}
|
||||
/>
|
||||
)}
|
||||
{!pwdVisible && (
|
||||
<EyeOff
|
||||
onClick={() => setPwdVisible(!pwdVisible)}
|
||||
className="h-5 cursor-pointer"
|
||||
strokeWidth={1.5}
|
||||
/>
|
||||
)}
|
||||
</Form.Label>
|
||||
</div>
|
||||
<InputComponent
|
||||
onChange={(input) => {
|
||||
setPassword(input);
|
||||
}}
|
||||
value={password}
|
||||
password={true}
|
||||
isForm
|
||||
className="primary-input"
|
||||
required
|
||||
placeholder="Password"
|
||||
/>
|
||||
<Form.Control asChild>
|
||||
<input
|
||||
onChange={({ target: { value } }) => {
|
||||
handleInput({ target: { name: "password", value } });
|
||||
setPassword(value);
|
||||
}}
|
||||
value={password}
|
||||
className="primary-input"
|
||||
required={data ? false : true}
|
||||
type={pwdVisible ? "text" : "password"}
|
||||
/>
|
||||
</Form.Control>
|
||||
|
||||
<Form.Message className="field-invalid" match="valueMissing">
|
||||
Please enter a password
|
||||
</Form.Message>
|
||||
|
|
@ -146,93 +184,108 @@ export default function UserManagementModal({
|
|||
justifyContent: "space-between",
|
||||
}}
|
||||
>
|
||||
<Form.Label className="data-[invalid]:label-invalid">
|
||||
<Form.Label className="data-[invalid]:label-invalid flex">
|
||||
Confirm password{" "}
|
||||
<span className="font-medium text-destructive">*</span>
|
||||
<span className="ml-1 mr-1 font-medium text-destructive">
|
||||
*
|
||||
</span>
|
||||
{confirmPwdVisible && (
|
||||
<Eye
|
||||
onClick={() =>
|
||||
setConfirmPwdVisible(!confirmPwdVisible)
|
||||
}
|
||||
className="h-5 cursor-pointer"
|
||||
strokeWidth={1.5}
|
||||
/>
|
||||
)}
|
||||
{!confirmPwdVisible && (
|
||||
<EyeOff
|
||||
onClick={() =>
|
||||
setConfirmPwdVisible(!confirmPwdVisible)
|
||||
}
|
||||
className="h-5 cursor-pointer"
|
||||
strokeWidth={1.5}
|
||||
/>
|
||||
)}
|
||||
</Form.Label>
|
||||
</div>
|
||||
<InputComponent
|
||||
onChange={(input) => {
|
||||
setConfirmPassword(input);
|
||||
}}
|
||||
value={confirmPassword}
|
||||
password={true}
|
||||
isForm
|
||||
className="primary-input"
|
||||
required
|
||||
placeholder="Confirm your password"
|
||||
/>
|
||||
<Form.Control asChild>
|
||||
<input
|
||||
onChange={(input) => {
|
||||
setConfirmPassword(input.target.value);
|
||||
}}
|
||||
value={confirmPassword}
|
||||
className="primary-input"
|
||||
required={data ? false : true}
|
||||
type={confirmPwdVisible ? "text" : "password"}
|
||||
/>
|
||||
</Form.Control>
|
||||
<Form.Message className="field-invalid" match="valueMissing">
|
||||
Please confirm your password
|
||||
</Form.Message>
|
||||
</Form.Field>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/*
|
||||
<Form.Field name="email">
|
||||
<div
|
||||
style={{
|
||||
display: "flex",
|
||||
alignItems: "baseline",
|
||||
justifyContent: "space-between",
|
||||
}}
|
||||
>
|
||||
<Form.Label className="data-[invalid]:label-invalid">
|
||||
Email <span className="font-medium text-destructive">*</span>
|
||||
</Form.Label>
|
||||
<Form.Message className="field-invalid" match="valueMissing">
|
||||
Please enter your email
|
||||
</Form.Message>
|
||||
<Form.Message className="field-invalid" match="typeMismatch">
|
||||
Please provide a valid email
|
||||
</Form.Message>
|
||||
</div>
|
||||
<Form.Control asChild>
|
||||
<input className="primary-input" type="email" required />
|
||||
</Form.Control>
|
||||
</Form.Field> */}
|
||||
|
||||
{/*
|
||||
<Form.Field name="birth">
|
||||
<div
|
||||
style={{
|
||||
display: "flex",
|
||||
alignItems: "baseline",
|
||||
justifyContent: "space-between",
|
||||
}}
|
||||
>
|
||||
<Form.Label className="data-[invalid]:label-invalid">
|
||||
Date of birth{" "}
|
||||
<span className="font-medium text-destructive">*</span>
|
||||
</Form.Label>
|
||||
<Form.Message className="field-invalid" match="valueMissing">
|
||||
Please enter your date of birth
|
||||
</Form.Message>
|
||||
</div>
|
||||
<Form.Control asChild>
|
||||
<input
|
||||
type="date"
|
||||
className="primary-input"
|
||||
required
|
||||
max={new Date().toISOString().split("T")[0]}
|
||||
/>
|
||||
</Form.Control>
|
||||
</Form.Field> */}
|
||||
<div className="flex gap-8">
|
||||
<Form.Field name="is_active">
|
||||
<div>
|
||||
<Form.Label className="data-[invalid]:label-invalid mr-3">
|
||||
Active
|
||||
</Form.Label>
|
||||
<Form.Control asChild>
|
||||
<Checkbox
|
||||
value={isActive}
|
||||
checked={isActive}
|
||||
id="is_active"
|
||||
className="relative top-0.5"
|
||||
onCheckedChange={(value) => {
|
||||
handleInput({ target: { name: "is_active", value } });
|
||||
setIsActive(value);
|
||||
}}
|
||||
/>
|
||||
</Form.Control>
|
||||
</div>
|
||||
</Form.Field>
|
||||
{userData?.is_superuser && (
|
||||
<Form.Field name="is_superuser">
|
||||
<div>
|
||||
<Form.Label className="data-[invalid]:label-invalid mr-3">
|
||||
Superuser
|
||||
</Form.Label>
|
||||
<Form.Control asChild>
|
||||
<Checkbox
|
||||
checked={isSuperUser}
|
||||
value={isSuperUser}
|
||||
id="is_superuser"
|
||||
className="relative top-0.5"
|
||||
onCheckedChange={(value) => {
|
||||
handleInput({
|
||||
target: { name: "is_superuser", value },
|
||||
});
|
||||
setIsSuperUser(value);
|
||||
}}
|
||||
/>
|
||||
</Form.Control>
|
||||
</div>
|
||||
</Form.Field>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="float-right">
|
||||
<Form.Submit asChild>
|
||||
<Button className="mr-3 mt-8">{confirmationText}</Button>
|
||||
</Form.Submit>
|
||||
<Button
|
||||
variant="outline"
|
||||
onClick={() => {
|
||||
setOpen(false);
|
||||
}}
|
||||
className="mr-3"
|
||||
>
|
||||
{cancelText}
|
||||
</Button>
|
||||
|
||||
<Form.Submit asChild>
|
||||
<Button className="mt-8">{confirmationText}</Button>
|
||||
</Form.Submit>
|
||||
</div>
|
||||
</Form.Root>
|
||||
</BaseModal.Content>
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ import EditFlowSettings from "../../components/EditFlowSettingsComponent";
|
|||
import IconComponent from "../../components/genericIconComponent";
|
||||
import { Button } from "../../components/ui/button";
|
||||
import { SETTINGS_DIALOG_SUBTITLE } from "../../constants/constants";
|
||||
import { alertContext } from "../../contexts/alertContext";
|
||||
import { TabsContext } from "../../contexts/tabsContext";
|
||||
import { FlowSettingsPropsType } from "../../types/components";
|
||||
import BaseModal from "../baseModal";
|
||||
|
|
@ -12,15 +11,14 @@ export default function FlowSettingsModal({
|
|||
open,
|
||||
setOpen,
|
||||
}: FlowSettingsPropsType): JSX.Element {
|
||||
const { setSuccessData } = useContext(alertContext);
|
||||
const { flows, tabId, updateFlow, saveFlow } = useContext(TabsContext);
|
||||
const flow = flows.find((f) => f.id === tabId);
|
||||
useEffect(() => {
|
||||
setName(flow.name);
|
||||
setDescription(flow.description);
|
||||
}, [flow.name, flow.description]);
|
||||
const [name, setName] = useState(flow.name);
|
||||
const [description, setDescription] = useState(flow.description);
|
||||
setName(flow!.name);
|
||||
setDescription(flow!.description);
|
||||
}, [flow!.name, flow!.description]);
|
||||
const [name, setName] = useState(flow!.name);
|
||||
const [description, setDescription] = useState(flow!.description);
|
||||
const [invalidName, setInvalidName] = useState(false);
|
||||
|
||||
function handleClick(): void {
|
||||
|
|
@ -28,7 +26,6 @@ export default function FlowSettingsModal({
|
|||
savedFlow!.name = name;
|
||||
savedFlow!.description = description;
|
||||
saveFlow(savedFlow!);
|
||||
setSuccessData({ title: "Changes saved successfully" });
|
||||
setOpen(false);
|
||||
}
|
||||
return (
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ import {
|
|||
} from "../../components/ui/dialog";
|
||||
import { Textarea } from "../../components/ui/textarea";
|
||||
import { CHAT_FORM_DIALOG_SUBTITLE } from "../../constants/constants";
|
||||
import { AuthContext } from "../../contexts/authContext";
|
||||
import { TabsContext } from "../../contexts/tabsContext";
|
||||
import { TabsState } from "../../types/tabs";
|
||||
import { validateNodes } from "../../utils/reactflowUtils";
|
||||
|
|
@ -60,6 +61,7 @@ export default function FormModal({
|
|||
|
||||
const [chatHistory, setChatHistory] = useState<ChatMessageType[]>([]);
|
||||
const { reactFlowInstance } = useContext(typesContext);
|
||||
const { accessToken } = useContext(AuthContext);
|
||||
const { setErrorData } = useContext(alertContext);
|
||||
const ws = useRef<WebSocket | null>(null);
|
||||
const [lockChat, setLockChat] = useState(false);
|
||||
|
|
@ -160,7 +162,7 @@ export default function FormModal({
|
|||
}, 1000);
|
||||
}
|
||||
}
|
||||
|
||||
//TODO improve check of user authentication
|
||||
function getWebSocketUrl(
|
||||
chatId: string,
|
||||
isDevelopment: boolean = false
|
||||
|
|
@ -173,7 +175,7 @@ export default function FormModal({
|
|||
|
||||
return `${
|
||||
isDevelopment ? "ws" : webSocketProtocol
|
||||
}://${host}${chatEndpoint}`;
|
||||
}://${host}${chatEndpoint}?token=${accessToken}`;
|
||||
}
|
||||
|
||||
function handleWsMessage(data: any) {
|
||||
|
|
|
|||
|
|
@ -147,7 +147,6 @@ export default function GenericModal({
|
|||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
console.log(error);
|
||||
setIsEdit(true);
|
||||
return setErrorData({
|
||||
title: "There is something wrong with this prompt, please review it",
|
||||
|
|
|
|||
|
|
@ -1,12 +1,62 @@
|
|||
import { useContext, useState } from "react";
|
||||
import { useNavigate } from "react-router-dom";
|
||||
import { Button } from "../../../components/ui/button";
|
||||
import { Input } from "../../../components/ui/input";
|
||||
import { CONTROL_LOGIN_STATE } from "../../../constants/constants";
|
||||
import { alertContext } from "../../../contexts/alertContext";
|
||||
import { AuthContext } from "../../../contexts/authContext";
|
||||
import { getLoggedUser, onLogin } from "../../../controllers/API";
|
||||
import { LoginType } from "../../../types/api";
|
||||
import {
|
||||
inputHandlerEventType,
|
||||
loginInputStateType,
|
||||
} from "../../../types/components";
|
||||
|
||||
export default function LoginAdminPage() {
|
||||
const navigate = useNavigate();
|
||||
|
||||
function loginAdmin() {
|
||||
navigate("/admin/");
|
||||
const [inputState, setInputState] =
|
||||
useState<loginInputStateType>(CONTROL_LOGIN_STATE);
|
||||
const { login, getAuthentication, setUserData } = useContext(AuthContext);
|
||||
|
||||
const { password, username } = inputState;
|
||||
const { setErrorData } = useContext(alertContext);
|
||||
|
||||
function handleInput({
|
||||
target: { name, value },
|
||||
}: inputHandlerEventType): void {
|
||||
setInputState((prev) => ({ ...prev, [name]: value }));
|
||||
}
|
||||
|
||||
function signIn() {
|
||||
const user: LoginType = {
|
||||
username: username,
|
||||
password: password,
|
||||
};
|
||||
onLogin(user)
|
||||
.then((user) => {
|
||||
login(user.access_token, user.refresh_token);
|
||||
getUser();
|
||||
navigate("/admin/");
|
||||
})
|
||||
.catch((error) => {
|
||||
setErrorData({
|
||||
title: "Error signing in",
|
||||
list: [error["response"]["data"]["detail"]],
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function getUser() {
|
||||
if (getAuthentication) {
|
||||
setTimeout(() => {
|
||||
getLoggedUser()
|
||||
.then((user) => {
|
||||
setUserData(user);
|
||||
})
|
||||
.catch((error) => {});
|
||||
}, 1000);
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
|
|
@ -14,11 +64,24 @@ export default function LoginAdminPage() {
|
|||
<div className="flex w-72 flex-col items-center justify-center gap-2">
|
||||
<span className="mb-4 text-5xl">⛓️</span>
|
||||
<span className="mb-6 text-2xl font-semibold text-primary">Admin</span>
|
||||
<Input className="bg-background" placeholder="Email address" />
|
||||
<Input className="bg-background" placeholder="Password" />
|
||||
<Input
|
||||
onChange={({ target: { value } }) => {
|
||||
handleInput({ target: { name: "username", value } });
|
||||
}}
|
||||
className="bg-background"
|
||||
placeholder="Username"
|
||||
/>
|
||||
<Input
|
||||
type="password"
|
||||
onChange={({ target: { value } }) => {
|
||||
handleInput({ target: { name: "password", value } });
|
||||
}}
|
||||
className="bg-background"
|
||||
placeholder="Password"
|
||||
/>
|
||||
<Button
|
||||
onClick={() => {
|
||||
loginAdmin();
|
||||
signIn();
|
||||
}}
|
||||
variant="default"
|
||||
className="w-full"
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
import _ from "lodash";
|
||||
import { cloneDeep } from "lodash";
|
||||
import { X } from "lucide-react";
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import { useContext, useEffect, useRef, useState } from "react";
|
||||
import PaginatorComponent from "../../components/PaginatorComponent";
|
||||
import ShadTooltip from "../../components/ShadTooltipComponent";
|
||||
import IconComponent from "../../components/genericIconComponent";
|
||||
import { Button } from "../../components/ui/button";
|
||||
import { Checkbox } from "../../components/ui/checkbox";
|
||||
import { Input } from "../../components/ui/input";
|
||||
import {
|
||||
Table,
|
||||
|
|
@ -14,265 +15,202 @@ import {
|
|||
TableHeader,
|
||||
TableRow,
|
||||
} from "../../components/ui/table";
|
||||
import { alertContext } from "../../contexts/alertContext";
|
||||
import { AuthContext } from "../../contexts/authContext";
|
||||
import {
|
||||
addUser,
|
||||
deleteUser,
|
||||
getUsersPage,
|
||||
updateUser,
|
||||
} from "../../controllers/API";
|
||||
import ConfirmationModal from "../../modals/ConfirmationModal";
|
||||
import UserManagementModal from "../../modals/UserManagementModal";
|
||||
import { UserInputType } from "../../types/components";
|
||||
import Header from "../../components/headerComponent";
|
||||
import { Users } from "../../types/api";
|
||||
|
||||
export default function AdminPage() {
|
||||
const [inputValue, setInputValue] = useState("");
|
||||
|
||||
const [size, setPageSize] = useState(10);
|
||||
const [index, setPageIndex] = useState(1);
|
||||
const [index, setPageIndex] = useState(0);
|
||||
const [loadingUsers, setLoadingUsers] = useState(true);
|
||||
const { setErrorData, setSuccessData } = useContext(alertContext);
|
||||
const { userData } = useContext(AuthContext);
|
||||
const [totalRowsCount, setTotalRowsCount] = useState(0);
|
||||
|
||||
const userList = useRef([
|
||||
{
|
||||
user: generateRandomString(50),
|
||||
email: generateRandomString(50) + "@example.com",
|
||||
password: generateRandomString(50),
|
||||
register_date: generateRandomDate(),
|
||||
},
|
||||
{
|
||||
user: generateRandomString(8),
|
||||
email: generateRandomString(10) + "@example.com",
|
||||
password: generateRandomString(12),
|
||||
register_date: generateRandomDate(),
|
||||
},
|
||||
{
|
||||
user: generateRandomString(8),
|
||||
email: generateRandomString(10) + "@example.com",
|
||||
password: generateRandomString(12),
|
||||
register_date: generateRandomDate(),
|
||||
},
|
||||
{
|
||||
user: generateRandomString(8),
|
||||
email: generateRandomString(10) + "@example.com",
|
||||
password: generateRandomString(12),
|
||||
register_date: generateRandomDate(),
|
||||
},
|
||||
{
|
||||
user: generateRandomString(8),
|
||||
email: generateRandomString(10) + "@example.com",
|
||||
password: generateRandomString(12),
|
||||
register_date: generateRandomDate(),
|
||||
},
|
||||
{
|
||||
user: generateRandomString(8),
|
||||
email: generateRandomString(10) + "@example.com",
|
||||
password: generateRandomString(12),
|
||||
register_date: generateRandomDate(),
|
||||
},
|
||||
{
|
||||
user: generateRandomString(8),
|
||||
email: generateRandomString(10) + "@example.com",
|
||||
password: generateRandomString(12),
|
||||
register_date: generateRandomDate(),
|
||||
},
|
||||
{
|
||||
user: generateRandomString(8),
|
||||
email: generateRandomString(10) + "@example.com",
|
||||
password: generateRandomString(12),
|
||||
register_date: generateRandomDate(),
|
||||
},
|
||||
{
|
||||
user: generateRandomString(8),
|
||||
email: generateRandomString(10) + "@example.com",
|
||||
password: generateRandomString(12),
|
||||
register_date: generateRandomDate(),
|
||||
},
|
||||
{
|
||||
user: generateRandomString(8),
|
||||
email: generateRandomString(10) + "@example.com",
|
||||
password: generateRandomString(12),
|
||||
register_date: generateRandomDate(),
|
||||
},
|
||||
{
|
||||
user: generateRandomString(8),
|
||||
email: generateRandomString(10) + "@example.com",
|
||||
password: generateRandomString(12),
|
||||
register_date: generateRandomDate(),
|
||||
},
|
||||
{
|
||||
user: generateRandomString(8),
|
||||
email: generateRandomString(10) + "@example.com",
|
||||
password: generateRandomString(12),
|
||||
register_date: generateRandomDate(),
|
||||
},
|
||||
{
|
||||
user: generateRandomString(8),
|
||||
email: generateRandomString(10) + "@example.com",
|
||||
password: generateRandomString(12),
|
||||
register_date: generateRandomDate(),
|
||||
},
|
||||
{
|
||||
user: generateRandomString(8),
|
||||
email: generateRandomString(10) + "@example.com",
|
||||
password: generateRandomString(12),
|
||||
register_date: generateRandomDate(),
|
||||
},
|
||||
{
|
||||
user: generateRandomString(8),
|
||||
email: generateRandomString(10) + "@example.com",
|
||||
password: generateRandomString(12),
|
||||
register_date: generateRandomDate(),
|
||||
},
|
||||
{
|
||||
user: generateRandomString(8),
|
||||
email: generateRandomString(10) + "@example.com",
|
||||
password: generateRandomString(12),
|
||||
register_date: generateRandomDate(),
|
||||
},
|
||||
{
|
||||
user: generateRandomString(8),
|
||||
email: generateRandomString(10) + "@example.com",
|
||||
password: generateRandomString(12),
|
||||
register_date: generateRandomDate(),
|
||||
},
|
||||
]);
|
||||
const userList = useRef([]);
|
||||
|
||||
useEffect(() => {
|
||||
setTimeout(() => {
|
||||
getUsers();
|
||||
}, 500);
|
||||
}, []);
|
||||
|
||||
const [filterUserList, setFilterUserList] = useState(userList.current);
|
||||
|
||||
function generateRandomString(length) {
|
||||
const characters =
|
||||
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
|
||||
let result = "";
|
||||
for (let i = 0; i < length; i++) {
|
||||
const randomIndex = Math.floor(Math.random() * characters.length);
|
||||
result += characters.charAt(randomIndex);
|
||||
}
|
||||
return result;
|
||||
function getUsers() {
|
||||
setLoadingUsers(true);
|
||||
getUsersPage(index, size)
|
||||
.then((users) => {
|
||||
setTotalRowsCount(users["total_count"]);
|
||||
userList.current = users["users"];
|
||||
setFilterUserList(users["users"]);
|
||||
setLoadingUsers(false);
|
||||
})
|
||||
.catch((error) => {
|
||||
setLoadingUsers(false);
|
||||
});
|
||||
}
|
||||
|
||||
function generateRandomDate() {
|
||||
const start = new Date(2010, 0, 1);
|
||||
const end = new Date();
|
||||
const randomTimestamp =
|
||||
start.getTime() + Math.random() * (end.getTime() - start.getTime());
|
||||
const randomDate = new Date(randomTimestamp);
|
||||
|
||||
const options = { year: "numeric", month: "short", day: "numeric" };
|
||||
return randomDate.toLocaleDateString("en-US");
|
||||
}
|
||||
|
||||
const [editUser, setEditUser] = useState(-1);
|
||||
const [editedUser, setEditedUser] = useState("");
|
||||
const [modalEditOpen, setModalEditOpen] = useState(false);
|
||||
const [modalDeleteOpen, setModalDeleteOpen] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
resetFilter();
|
||||
}, []);
|
||||
|
||||
const handleInputChange = (event, index) => {
|
||||
const user = _.cloneDeepWith(userList.current);
|
||||
user[index].password = event.target.value;
|
||||
userList.current = user;
|
||||
|
||||
const userFilter = _.cloneDeepWith(filterUserList);
|
||||
userFilter[index].password = event.target.value;
|
||||
setFilterUserList(userFilter);
|
||||
|
||||
setEditedUser(event.target.value);
|
||||
};
|
||||
|
||||
function handleChangePagination(pageIndex: number, pageSize: number) {
|
||||
setPageIndex(pageIndex);
|
||||
setPageSize(pageSize);
|
||||
|
||||
const startIndex = (pageIndex - 1) * pageSize;
|
||||
const endIndex = startIndex + pageSize;
|
||||
const newList = userList.current.slice(startIndex, endIndex);
|
||||
|
||||
setFilterUserList(newList);
|
||||
setLoadingUsers(true);
|
||||
getUsersPage(pageIndex, pageSize)
|
||||
.then((users) => {
|
||||
setTotalRowsCount(users["total_count"]);
|
||||
userList.current = users["users"];
|
||||
setFilterUserList(users["users"]);
|
||||
setLoadingUsers(false);
|
||||
})
|
||||
.catch((error) => {
|
||||
setLoadingUsers(false);
|
||||
});
|
||||
}
|
||||
|
||||
function resetFilter() {
|
||||
setFilterUserList(userList.current);
|
||||
setPageIndex(1);
|
||||
setPageIndex(0);
|
||||
setPageSize(10);
|
||||
|
||||
const startIndex = (index - 1) * size;
|
||||
const endIndex = index + size - 1;
|
||||
const newList = userList.current.slice(startIndex, endIndex);
|
||||
|
||||
console.log(userList.current);
|
||||
|
||||
setFilterUserList(newList);
|
||||
getUsers();
|
||||
}
|
||||
|
||||
function handleFilterUsers(input: string) {
|
||||
setInputValue(input);
|
||||
|
||||
if (input === "") {
|
||||
resetFilter();
|
||||
setFilterUserList(userList.current);
|
||||
} else {
|
||||
const filteredList = userList.current.filter(
|
||||
(user) =>
|
||||
user.user.toLowerCase().includes(input.toLowerCase()) ||
|
||||
user.email.toLowerCase().includes(input.toLowerCase())
|
||||
const filteredList = userList.current.filter((user:Users) =>
|
||||
user.username.toLowerCase().includes(input.toLowerCase())
|
||||
);
|
||||
setFilterUserList(filteredList);
|
||||
}
|
||||
}
|
||||
|
||||
function handleDeleteUser(index) {
|
||||
const user = _.cloneDeepWith(userList.current);
|
||||
user.splice(index, 1);
|
||||
userList.current = user;
|
||||
|
||||
const userFilter = _.cloneDeepWith(filterUserList);
|
||||
userFilter.splice(index, 1);
|
||||
setFilterUserList(userFilter);
|
||||
|
||||
resetFilter();
|
||||
function handleDeleteUser(user) {
|
||||
deleteUser(user.id)
|
||||
.then((res) => {
|
||||
resetFilter();
|
||||
setSuccessData({
|
||||
title: "Success! User deleted!",
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
setErrorData({
|
||||
title: "Error on delete user",
|
||||
list: [error["response"]["data"]["detail"]],
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function handleEditUser(index, user) {
|
||||
const newUser = _.cloneDeepWith(userList.current);
|
||||
newUser[index].password = user.password;
|
||||
newUser[index].user = user.username;
|
||||
userList.current = newUser;
|
||||
resetFilter();
|
||||
function handleEditUser(userId, user) {
|
||||
updateUser(userId, user)
|
||||
.then((res) => {
|
||||
resetFilter();
|
||||
setSuccessData({
|
||||
title: "Success! User edited!",
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
setErrorData({
|
||||
title: "Error on edit user",
|
||||
list: [error["response"]["data"]["detail"]],
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function handleNewUser(user) {
|
||||
const newUser = {
|
||||
user: user.username,
|
||||
email: generateRandomString(50) + "@example.com",
|
||||
password: user.password,
|
||||
register_date: generateRandomDate(),
|
||||
};
|
||||
function handleDisableUser(check, userId, user) {
|
||||
const userEdit = cloneDeep(user);
|
||||
userEdit.is_active = !check;
|
||||
|
||||
userList.current.unshift(newUser);
|
||||
console.log(userList.current);
|
||||
updateUser(userId, userEdit)
|
||||
.then((res) => {
|
||||
resetFilter();
|
||||
setSuccessData({
|
||||
title: "Success! User edited!",
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
setErrorData({
|
||||
title: "Error on edit user",
|
||||
list: [error["response"]["data"]["detail"]],
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
resetFilter();
|
||||
function handleSuperUserEdit(check, userId, user) {
|
||||
const userEdit = cloneDeep(user);
|
||||
userEdit.is_superuser = !check;
|
||||
updateUser(userId, userEdit)
|
||||
.then((res) => {
|
||||
resetFilter();
|
||||
setSuccessData({
|
||||
title: "Success! User edited!",
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
setErrorData({
|
||||
title: "Error on edit user",
|
||||
list: [error["response"]["data"]["detail"]],
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function handleNewUser(user: UserInputType) {
|
||||
addUser(user)
|
||||
.then((res) => {
|
||||
resetFilter();
|
||||
setSuccessData({
|
||||
title: "Success! New user added!",
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
setErrorData({
|
||||
title: "Error on add new user",
|
||||
list: [error["response"]["data"]["detail"]],
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="main-page-panel">
|
||||
<div className="m-auto flex h-full flex-row justify-center">
|
||||
<div className="basis-5/6">
|
||||
<div className="m-auto flex h-full flex-col space-y-8 p-8 ">
|
||||
<div className="flex items-center justify-between space-y-2">
|
||||
<div>
|
||||
<h2 className="text-2xl font-bold tracking-tight">
|
||||
Welcome back!
|
||||
</h2>
|
||||
<p className="text-muted-foreground">
|
||||
Here's a list of all users!
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex items-center space-x-2"></div>
|
||||
</div>
|
||||
|
||||
{userList.current.length === 0 && (
|
||||
<>
|
||||
<div className="flex items-center justify-between">
|
||||
<h2>There's no users left :)</h2>
|
||||
<div className="flex flex-col">
|
||||
<Header />
|
||||
{userData && (
|
||||
<div className="main-page-panel">
|
||||
<div className="m-auto flex h-full flex-row justify-center">
|
||||
<div className="basis-5/6">
|
||||
<div className="m-auto flex h-full flex-col space-y-8 p-8 ">
|
||||
<div className="flex items-center justify-between space-y-2">
|
||||
<div>
|
||||
<h2 className="text-2xl font-bold tracking-tight">
|
||||
Welcome back!
|
||||
</h2>
|
||||
<p className="text-muted-foreground">
|
||||
Navigate through this section to efficiently oversee all
|
||||
application users. From here, you can seamlessly manage
|
||||
user accounts.
|
||||
</p>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
{userList.current.length > 0 && (
|
||||
<div className="flex items-center space-x-2"></div>
|
||||
</div>
|
||||
|
||||
{userList.current.length === 0 && !loadingUsers && (
|
||||
<>
|
||||
<div className="flex items-center justify-between">
|
||||
<h2>There's no users registered :)</h2>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
<>
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex flex-1 items-center space-x-2">
|
||||
|
|
@ -285,8 +223,8 @@ export default function AdminPage() {
|
|||
{inputValue.length > 0 && (
|
||||
<Button
|
||||
onClick={() => {
|
||||
resetFilter();
|
||||
setInputValue("");
|
||||
setFilterUserList(userList.current);
|
||||
}}
|
||||
variant="ghost"
|
||||
className="h-8 px-2 lg:px-3"
|
||||
|
|
@ -311,90 +249,183 @@ export default function AdminPage() {
|
|||
</UserManagementModal>
|
||||
</div>
|
||||
</div>
|
||||
{loadingUsers && (
|
||||
<div>
|
||||
<strong>Loading...</strong>
|
||||
</div>
|
||||
)}
|
||||
<div
|
||||
className="overflow-scroll overflow-x-hidden rounded-md border-2 bg-muted
|
||||
custom-scroll"
|
||||
className={
|
||||
"max-h-[26rem] min-h-[26rem] overflow-scroll overflow-x-hidden rounded-md border-2 bg-muted custom-scroll" +
|
||||
(loadingUsers ? " border-0" : "")
|
||||
}
|
||||
>
|
||||
<Table className="table-fixed bg-muted outline-1 ">
|
||||
<TableHeader>
|
||||
<Table className={"table-fixed bg-muted outline-1"}>
|
||||
<TableHeader
|
||||
className={
|
||||
loadingUsers
|
||||
? "hidden"
|
||||
: "table-fixed bg-muted outline-1"
|
||||
}
|
||||
>
|
||||
<TableRow>
|
||||
<TableHead className="h-10">User</TableHead>
|
||||
<TableHead className="h-10">Password</TableHead>
|
||||
<TableHead className="h-10">Id</TableHead>
|
||||
<TableHead className="h-10">Username</TableHead>
|
||||
<TableHead className="h-10">Active</TableHead>
|
||||
<TableHead className="h-10">Superuser</TableHead>
|
||||
<TableHead className="h-10">Created At</TableHead>
|
||||
<TableHead className="h-10">Updated At</TableHead>
|
||||
<TableHead className="h-10 w-[100px] text-right"></TableHead>
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
{filterUserList.map((user, index) => (
|
||||
<TableRow key={user.user}>
|
||||
<TableCell className="truncate py-2 font-medium">
|
||||
{user.user}
|
||||
</TableCell>
|
||||
<TableCell className="truncate py-2">
|
||||
{user.password}
|
||||
</TableCell>
|
||||
<TableCell className="flex w-[100px] py-2 text-right">
|
||||
<div className="flex">
|
||||
<UserManagementModal
|
||||
title="Edit"
|
||||
titleHeader={`${user.user}`}
|
||||
cancelText="Cancel"
|
||||
confirmationText="Edit"
|
||||
icon={"UserPlus2"}
|
||||
data={user}
|
||||
index={index}
|
||||
onConfirm={(index, user) => {
|
||||
handleEditUser(index, user);
|
||||
}}
|
||||
>
|
||||
<ShadTooltip content="Edit" side="top">
|
||||
<IconComponent
|
||||
name="Pencil"
|
||||
className="h-4 w-4 cursor-pointer"
|
||||
/>
|
||||
</ShadTooltip>
|
||||
</UserManagementModal>
|
||||
|
||||
{!loadingUsers && (
|
||||
<TableBody>
|
||||
{filterUserList.map((user:UserInputType, index) => (
|
||||
<TableRow key={index}>
|
||||
<TableCell className="truncate py-2 font-medium">
|
||||
<ShadTooltip content={user.id}>
|
||||
<span className="cursor-default">
|
||||
{user.id}
|
||||
</span>
|
||||
</ShadTooltip>
|
||||
</TableCell>
|
||||
<TableCell className="truncate py-2">
|
||||
<ShadTooltip content={user.username}>
|
||||
<span className="cursor-default">
|
||||
{user.username}
|
||||
</span>
|
||||
</ShadTooltip>
|
||||
</TableCell>
|
||||
<TableCell className="relative left-5 truncate py-2 text-align-last-left">
|
||||
<ConfirmationModal
|
||||
title="Delete"
|
||||
titleHeader="Delete User"
|
||||
title="Edit"
|
||||
titleHeader={`${user.username}`}
|
||||
modalContentTitle="Attention!"
|
||||
modalContent="Are you sure you want to delete this user? This action cannot be undone."
|
||||
modalContent="Are you completely confident about the changes you are making to this user?"
|
||||
cancelText="Cancel"
|
||||
confirmationText="Delete"
|
||||
icon={"UserMinus2"}
|
||||
confirmationText="Confirm"
|
||||
icon={"UserCog2"}
|
||||
data={user}
|
||||
index={index}
|
||||
onConfirm={(index, user) => {
|
||||
handleDeleteUser(index);
|
||||
handleDisableUser(
|
||||
user.is_active,
|
||||
user.id,
|
||||
user
|
||||
);
|
||||
}}
|
||||
>
|
||||
<ShadTooltip content="Delete" side="top">
|
||||
<IconComponent
|
||||
name="Trash2"
|
||||
className="ml-2 h-4 w-4 cursor-pointer"
|
||||
/>
|
||||
</ShadTooltip>
|
||||
<Checkbox
|
||||
id="is_active"
|
||||
checked={user.is_active}
|
||||
/>
|
||||
</ConfirmationModal>
|
||||
</div>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
</TableCell>
|
||||
<TableCell className="relative left-5 truncate py-2 text-align-last-left">
|
||||
<ConfirmationModal
|
||||
title="Edit"
|
||||
titleHeader={`${user.username}`}
|
||||
modalContentTitle="Attention!"
|
||||
modalContent="Are you completely confident about the changes you are making to this user?"
|
||||
cancelText="Cancel"
|
||||
confirmationText="Confirm"
|
||||
icon={"UserCog2"}
|
||||
data={user}
|
||||
index={index}
|
||||
onConfirm={(index, user) => {
|
||||
handleSuperUserEdit(
|
||||
user.is_superuser,
|
||||
user.id,
|
||||
user
|
||||
);
|
||||
}}
|
||||
>
|
||||
<Checkbox
|
||||
id="is_superuser"
|
||||
checked={user.is_superuser}
|
||||
/>
|
||||
</ConfirmationModal>
|
||||
</TableCell>
|
||||
<TableCell className="truncate py-2 ">
|
||||
{
|
||||
new Date(user.create_at!)
|
||||
.toISOString()
|
||||
.split("T")[0]
|
||||
}
|
||||
</TableCell>
|
||||
<TableCell className="truncate py-2">
|
||||
{
|
||||
new Date(user.updated_at!)
|
||||
.toISOString()
|
||||
.split("T")[0]
|
||||
}
|
||||
</TableCell>
|
||||
<TableCell className="flex w-[100px] py-2 text-right">
|
||||
<div className="flex">
|
||||
<UserManagementModal
|
||||
title="Edit"
|
||||
titleHeader={`${user.id}`}
|
||||
cancelText="Cancel"
|
||||
confirmationText="Save"
|
||||
icon={"UserPlus2"}
|
||||
data={user}
|
||||
index={index}
|
||||
onConfirm={(index, editUser) => {
|
||||
handleEditUser(user.id, editUser);
|
||||
}}
|
||||
>
|
||||
<ShadTooltip content="Edit" side="top">
|
||||
<IconComponent
|
||||
name="Pencil"
|
||||
className="h-4 w-4 cursor-pointer"
|
||||
/>
|
||||
</ShadTooltip>
|
||||
</UserManagementModal>
|
||||
|
||||
<ConfirmationModal
|
||||
title="Delete"
|
||||
titleHeader="Delete User"
|
||||
modalContentTitle="Attention!"
|
||||
modalContent="Are you sure you want to delete this user? This action cannot be undone."
|
||||
cancelText="Cancel"
|
||||
confirmationText="Delete"
|
||||
icon={"UserMinus2"}
|
||||
data={user}
|
||||
index={index}
|
||||
onConfirm={(index, user) => {
|
||||
handleDeleteUser(user);
|
||||
}}
|
||||
>
|
||||
<ShadTooltip content="Delete" side="top">
|
||||
<IconComponent
|
||||
name="Trash2"
|
||||
className="ml-2 h-4 w-4 cursor-pointer"
|
||||
/>
|
||||
</ShadTooltip>
|
||||
</ConfirmationModal>
|
||||
</div>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
)}
|
||||
</Table>
|
||||
</div>
|
||||
|
||||
<PaginatorComponent
|
||||
pageIndex={index}
|
||||
pageSize={size}
|
||||
totalRowsCount={filterUserList.length}
|
||||
totalRowsCount={totalRowsCount}
|
||||
paginate={(pageIndex, pageSize) => {
|
||||
handleChangePagination(pageSize, pageIndex);
|
||||
}}
|
||||
></PaginatorComponent>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
|
|
|
|||
283
src/frontend/src/pages/ApiKeysPage/index.tsx
Normal file
283
src/frontend/src/pages/ApiKeysPage/index.tsx
Normal file
|
|
@ -0,0 +1,283 @@
|
|||
import { useContext, useEffect, useRef, useState } from "react";
|
||||
import ShadTooltip from "../../components/ShadTooltipComponent";
|
||||
import IconComponent from "../../components/genericIconComponent";
|
||||
import { Button } from "../../components/ui/button";
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
TableHead,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from "../../components/ui/table";
|
||||
import { alertContext } from "../../contexts/alertContext";
|
||||
import { AuthContext } from "../../contexts/authContext";
|
||||
import { deleteApiKey, getApiKey } from "../../controllers/API";
|
||||
import ConfirmationModal from "../../modals/ConfirmationModal";
|
||||
import SecretKeyModal from "../../modals/SecretKeyModal";
|
||||
|
||||
import moment from "moment";
|
||||
import Header from "../../components/headerComponent";
|
||||
import {
|
||||
API_PAGE_PARAGRAPH_1,
|
||||
API_PAGE_PARAGRAPH_2,
|
||||
API_PAGE_USER_KEYS,
|
||||
LAST_USED_SPAN_1,
|
||||
LAST_USED_SPAN_2,
|
||||
} from "../../constants/constants";
|
||||
import { ApiKey } from "../../types/components";
|
||||
|
||||
export default function ApiKeysPage() {
|
||||
const [loadingKeys, setLoadingKeys] = useState(true);
|
||||
const { setErrorData, setSuccessData } = useContext(alertContext);
|
||||
const { userData } = useContext(AuthContext);
|
||||
const [userId, setUserId] = useState("");
|
||||
const keysList = useRef([]);
|
||||
|
||||
useEffect(() => {
|
||||
getKeys();
|
||||
}, [userData]);
|
||||
|
||||
function getKeys() {
|
||||
setLoadingKeys(true);
|
||||
if (userData) {
|
||||
getApiKey()
|
||||
.then((keys: [ApiKey]) => {
|
||||
keysList.current = keys["api_keys"];
|
||||
setUserId(keys["user_id"]);
|
||||
setLoadingKeys(false);
|
||||
})
|
||||
.catch((error) => {
|
||||
setLoadingKeys(false);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function resetFilter() {
|
||||
getKeys();
|
||||
}
|
||||
|
||||
function handleDeleteKey(keys) {
|
||||
deleteApiKey(keys)
|
||||
.then((res) => {
|
||||
resetFilter();
|
||||
setSuccessData({
|
||||
title: "Success! Key deleted!",
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
setErrorData({
|
||||
title: "Error on delete key",
|
||||
list: [error["response"]["data"]["detail"]],
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function lastUsedMessage() {
|
||||
return (
|
||||
<div className="text-xs">
|
||||
<span>
|
||||
{LAST_USED_SPAN_1}
|
||||
<br></br> {LAST_USED_SPAN_2}
|
||||
</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<Header></Header>
|
||||
{userData && (
|
||||
<div className="main-page-panel">
|
||||
<div className="m-auto flex h-full flex-row justify-center">
|
||||
<div className="basis-5/6">
|
||||
<div className="m-auto flex h-full flex-col space-y-8 p-8 ">
|
||||
<div className="flex items-center justify-between space-y-2">
|
||||
<div>
|
||||
<h2 className="text-2xl font-bold tracking-tight">
|
||||
API keys
|
||||
</h2>
|
||||
<p className="text-muted-foreground">
|
||||
{API_PAGE_PARAGRAPH_1}
|
||||
<br />
|
||||
{API_PAGE_PARAGRAPH_2}
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex items-center space-x-2"></div>
|
||||
</div>
|
||||
|
||||
{keysList.current &&
|
||||
keysList.current.length === 0 &&
|
||||
!loadingKeys && (
|
||||
<>
|
||||
<div className="flex items-center justify-between">
|
||||
<h2>{API_PAGE_USER_KEYS}</h2>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
<>
|
||||
{loadingKeys && (
|
||||
<div>
|
||||
<strong>Loading...</strong>
|
||||
</div>
|
||||
)}
|
||||
<div
|
||||
className={
|
||||
"max-h-[15rem] overflow-scroll overflow-x-hidden rounded-md border-2 bg-muted custom-scroll" +
|
||||
(loadingKeys ? " border-0" : "")
|
||||
}
|
||||
>
|
||||
{keysList.current &&
|
||||
keysList.current.length > 0 &&
|
||||
!loadingKeys && (
|
||||
<Table className={"table-fixed bg-muted outline-1"}>
|
||||
<TableHeader
|
||||
className={
|
||||
loadingKeys
|
||||
? "hidden"
|
||||
: "table-fixed bg-muted outline-1"
|
||||
}
|
||||
>
|
||||
<TableRow>
|
||||
<TableHead className="h-10">Name</TableHead>
|
||||
<TableHead className="h-10">Key</TableHead>
|
||||
<TableHead className="h-10">Created</TableHead>
|
||||
<TableHead className="flex h-10 items-center">
|
||||
Last Used
|
||||
<ShadTooltip
|
||||
side="top"
|
||||
content={lastUsedMessage()}
|
||||
>
|
||||
<div>
|
||||
<IconComponent
|
||||
name="Info"
|
||||
className="ml-1 h-3 w-3"
|
||||
/>
|
||||
</div>
|
||||
</ShadTooltip>
|
||||
</TableHead>
|
||||
<TableHead className="h-10">Total Uses</TableHead>
|
||||
<TableHead className="h-10 w-[100px] text-right"></TableHead>
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
{!loadingKeys && (
|
||||
<TableBody>
|
||||
{keysList.current.map(
|
||||
(api_keys: ApiKey, index: number) => (
|
||||
<TableRow key={index}>
|
||||
<TableCell className="truncate py-2">
|
||||
<ShadTooltip content={api_keys.name}>
|
||||
<span className="cursor-default">
|
||||
{api_keys.name ? api_keys.name : "-"}
|
||||
</span>
|
||||
</ShadTooltip>
|
||||
</TableCell>
|
||||
<TableCell className="truncate py-2">
|
||||
<span className="cursor-default">
|
||||
{api_keys.api_key}
|
||||
</span>
|
||||
</TableCell>
|
||||
<TableCell className="truncate py-2 ">
|
||||
<ShadTooltip
|
||||
side="top"
|
||||
content={moment(
|
||||
api_keys.created_at
|
||||
).format("YYYY-MM-DD HH:mm")}
|
||||
>
|
||||
<div>
|
||||
{moment(api_keys.created_at).format(
|
||||
"YYYY-MM-DD HH:mm"
|
||||
)}
|
||||
</div>
|
||||
</ShadTooltip>
|
||||
</TableCell>
|
||||
<TableCell className="truncate py-2">
|
||||
<ShadTooltip
|
||||
side="top"
|
||||
content={
|
||||
moment(api_keys.last_used_at).format(
|
||||
"YYYY-MM-DD HH:mm"
|
||||
) === "Invalid date"
|
||||
? "Never"
|
||||
: moment(
|
||||
api_keys.last_used_at
|
||||
).format("YYYY-MM-DD HH:mm")
|
||||
}
|
||||
>
|
||||
<div>
|
||||
{moment(api_keys.last_used_at).format(
|
||||
"YYYY-MM-DD HH:mm"
|
||||
) === "Invalid date"
|
||||
? "Never"
|
||||
: moment(
|
||||
api_keys.last_used_at
|
||||
).format("YYYY-MM-DD HH:mm")}
|
||||
</div>
|
||||
</ShadTooltip>
|
||||
</TableCell>
|
||||
<TableCell className="truncate py-2">
|
||||
{api_keys.total_uses}
|
||||
</TableCell>
|
||||
<TableCell className="flex w-[100px] py-2 text-right">
|
||||
<div className="flex">
|
||||
<ConfirmationModal
|
||||
title="Delete"
|
||||
titleHeader="Delete User"
|
||||
modalContentTitle="Attention!"
|
||||
modalContent="Are you sure you want to delete this key? This action cannot be undone."
|
||||
cancelText="Cancel"
|
||||
confirmationText="Delete"
|
||||
icon={"UserMinus2"}
|
||||
data={api_keys.id}
|
||||
index={index}
|
||||
onConfirm={(index, keys) => {
|
||||
handleDeleteKey(keys);
|
||||
}}
|
||||
>
|
||||
<ShadTooltip
|
||||
content="Delete"
|
||||
side="top"
|
||||
>
|
||||
<IconComponent
|
||||
name="Trash2"
|
||||
className="ml-2 h-4 w-4 cursor-pointer"
|
||||
/>
|
||||
</ShadTooltip>
|
||||
</ConfirmationModal>
|
||||
</div>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
)
|
||||
)}
|
||||
</TableBody>
|
||||
)}
|
||||
</Table>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="flex items-center justify-between">
|
||||
<div>
|
||||
<SecretKeyModal
|
||||
title="Create new secret key"
|
||||
cancelText="Cancel"
|
||||
confirmationText="Create secret key"
|
||||
icon={"Key"}
|
||||
data={userId}
|
||||
onCloseModal={getKeys}
|
||||
>
|
||||
<Button>
|
||||
<IconComponent name="Plus" className="mr-1 h-5 w-5" />
|
||||
Create new secret key
|
||||
</Button>
|
||||
</SecretKeyModal>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
|
@ -125,7 +125,6 @@ export default function ExtraSidebar(): JSX.Element {
|
|||
}
|
||||
onClick={(event) => {
|
||||
saveFlow(flow!);
|
||||
setSuccessData({ title: "Changes saved successfully" });
|
||||
}}
|
||||
>
|
||||
<IconComponent
|
||||
|
|
|
|||
|
|
@ -1,10 +1,14 @@
|
|||
import * as Form from "@radix-ui/react-form";
|
||||
import { useState } from "react";
|
||||
import { Link } from "react-router-dom";
|
||||
import { useContext, useState } from "react";
|
||||
import { Link, useNavigate } from "react-router-dom";
|
||||
import InputComponent from "../../components/inputComponent";
|
||||
import { Button } from "../../components/ui/button";
|
||||
import { Input } from "../../components/ui/input";
|
||||
import { CONTROL_LOGIN_STATE } from "../../constants/constants";
|
||||
import { alertContext } from "../../contexts/alertContext";
|
||||
import { AuthContext } from "../../contexts/authContext";
|
||||
import { getLoggedUser, onLogin } from "../../controllers/API";
|
||||
import { LoginType } from "../../types/api";
|
||||
import {
|
||||
inputHandlerEventType,
|
||||
loginInputStateType,
|
||||
|
|
@ -15,12 +19,49 @@ export default function LoginPage(): JSX.Element {
|
|||
useState<loginInputStateType>(CONTROL_LOGIN_STATE);
|
||||
|
||||
const { password, username } = inputState;
|
||||
const { login, getAuthentication, setUserData, setIsAdmin } = useContext(AuthContext);
|
||||
const navigate = useNavigate();
|
||||
const { setErrorData } = useContext(alertContext);
|
||||
|
||||
function handleInput({
|
||||
target: { name, value },
|
||||
}: inputHandlerEventType): void {
|
||||
setInputState((prev) => ({ ...prev, [name]: value }));
|
||||
}
|
||||
|
||||
function signIn() {
|
||||
const user: LoginType = {
|
||||
username: username,
|
||||
password: password,
|
||||
};
|
||||
onLogin(user)
|
||||
.then((user) => {
|
||||
login(user.access_token, user.refresh_token);
|
||||
getUser();
|
||||
navigate("/");
|
||||
})
|
||||
.catch((error) => {
|
||||
setErrorData({
|
||||
title: "Error signing in",
|
||||
list: [error["response"]["data"]["detail"]],
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function getUser() {
|
||||
if (getAuthentication()) {
|
||||
setTimeout(() => {
|
||||
getLoggedUser()
|
||||
.then((user) => {
|
||||
const isSuperUser = user.is_superuser;
|
||||
setIsAdmin(isSuperUser);
|
||||
setUserData(user);
|
||||
})
|
||||
.catch((error) => {});
|
||||
}, 500);
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<Form.Root
|
||||
onSubmit={(event) => {
|
||||
|
|
@ -28,7 +69,7 @@ export default function LoginPage(): JSX.Element {
|
|||
event.preventDefault();
|
||||
return;
|
||||
}
|
||||
|
||||
signIn();
|
||||
const data = Object.fromEntries(new FormData(event.currentTarget));
|
||||
event.preventDefault();
|
||||
}}
|
||||
|
|
@ -92,7 +133,7 @@ export default function LoginPage(): JSX.Element {
|
|||
</Form.Submit>
|
||||
</div>
|
||||
<div className="w-full">
|
||||
<Link to="">
|
||||
<Link to="/signup">
|
||||
<Button className="w-full" variant="outline">
|
||||
Don't have an account? <b>Sign Up</b>
|
||||
</Button>
|
||||
|
|
|
|||
|
|
@ -1,11 +1,17 @@
|
|||
import * as Form from "@radix-ui/react-form";
|
||||
import { FormEvent, useState } from "react";
|
||||
import { Link } from "react-router-dom";
|
||||
import { FormEvent, useContext, useState } from "react";
|
||||
import { Link, useNavigate } from "react-router-dom";
|
||||
import InputComponent from "../../components/inputComponent";
|
||||
import { Button } from "../../components/ui/button";
|
||||
import { Input } from "../../components/ui/input";
|
||||
import { CONTROL_INPUT_STATE } from "../../constants/constants";
|
||||
import {
|
||||
CONTROL_INPUT_STATE,
|
||||
SIGN_UP_SUCCESS,
|
||||
} from "../../constants/constants";
|
||||
import { alertContext } from "../../contexts/alertContext";
|
||||
import { addUser } from "../../controllers/API";
|
||||
import {
|
||||
UserInputType,
|
||||
inputHandlerEventType,
|
||||
signUpInputStateType,
|
||||
} from "../../types/components";
|
||||
|
|
@ -15,12 +21,42 @@ export default function SignUp(): JSX.Element {
|
|||
useState<signUpInputStateType>(CONTROL_INPUT_STATE);
|
||||
|
||||
const { password, cnfPassword, username } = inputState;
|
||||
const { setErrorData, setSuccessData } = useContext(alertContext);
|
||||
const navigate = useNavigate();
|
||||
|
||||
function handleInput({
|
||||
target: { name, value },
|
||||
}: inputHandlerEventType): void {
|
||||
setInputState((prev) => ({ ...prev, [name]: value }));
|
||||
}
|
||||
|
||||
function handleSignup(): void {
|
||||
const { username, password } = inputState;
|
||||
const newUser: UserInputType = {
|
||||
username,
|
||||
password,
|
||||
};
|
||||
addUser(newUser)
|
||||
.then((user) => {
|
||||
setSuccessData({
|
||||
title: SIGN_UP_SUCCESS,
|
||||
});
|
||||
navigate("/login");
|
||||
})
|
||||
.catch((error) => {
|
||||
const {
|
||||
response: {
|
||||
data: { detail },
|
||||
},
|
||||
} = error;
|
||||
setErrorData({
|
||||
title: "Error signing up",
|
||||
list: [detail],
|
||||
});
|
||||
return;
|
||||
});
|
||||
}
|
||||
|
||||
return (
|
||||
<Form.Root
|
||||
onSubmit={(event: FormEvent<HTMLFormElement>) => {
|
||||
|
|
@ -120,7 +156,14 @@ export default function SignUp(): JSX.Element {
|
|||
</div>
|
||||
<div className="w-full">
|
||||
<Form.Submit asChild>
|
||||
<Button className="mr-3 mt-6 w-full">Sign up</Button>
|
||||
<Button
|
||||
className="mr-3 mt-6 w-full"
|
||||
onClick={() => {
|
||||
handleSignup();
|
||||
}}
|
||||
>
|
||||
Sign up
|
||||
</Button>
|
||||
</Form.Submit>
|
||||
</div>
|
||||
<div className="w-full">
|
||||
|
|
|
|||
|
|
@ -1,32 +1,116 @@
|
|||
import { Route, Routes } from "react-router-dom";
|
||||
import { ProtectedAdminRoute } from "./components/authAdminGuard";
|
||||
import { ProtectedRoute } from "./components/authGuard";
|
||||
import { ProtectedLoginRoute } from "./components/authLoginGuard";
|
||||
import { CatchAllRoute } from "./components/catchAllRoutes";
|
||||
import AdminPage from "./pages/AdminPage";
|
||||
import LoginAdminPage from "./pages/AdminPage/LoginPage";
|
||||
import ApiKeysPage from "./pages/ApiKeysPage";
|
||||
import CommunityPage from "./pages/CommunityPage";
|
||||
import FlowPage from "./pages/FlowPage";
|
||||
import HomePage from "./pages/MainPage";
|
||||
import ViewPage from "./pages/ViewPage";
|
||||
import DeleteAccountPage from "./pages/deleteAccountPage";
|
||||
import LoginPage from "./pages/loginPage";
|
||||
import SignUp from "./pages/signUpPage";
|
||||
|
||||
const Router = () => {
|
||||
return (
|
||||
<Routes>
|
||||
<Route path="/" element={<HomePage />} />
|
||||
<Route path="/community" element={<CommunityPage />} />
|
||||
<Route
|
||||
path="/"
|
||||
element={
|
||||
<ProtectedRoute>
|
||||
<HomePage />
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/community"
|
||||
element={
|
||||
<ProtectedRoute>
|
||||
<CommunityPage />
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route path="/flow/:id/">
|
||||
<Route path="" element={<FlowPage />} />
|
||||
<Route path="view" element={<ViewPage />} />
|
||||
<Route
|
||||
path=""
|
||||
element={
|
||||
<ProtectedRoute>
|
||||
<FlowPage />
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="view"
|
||||
element={
|
||||
<ProtectedRoute>
|
||||
<ViewPage />
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
</Route>
|
||||
<Route path="*" element={<HomePage />} />
|
||||
<Route
|
||||
path="*"
|
||||
element={
|
||||
<ProtectedRoute>
|
||||
<CatchAllRoute />
|
||||
</ProtectedRoute>
|
||||
}
|
||||
/>
|
||||
|
||||
<Route path="/login" element={<LoginPage />} />
|
||||
{/* <Route path="/signup" element={<SignUp />} /> */}
|
||||
<Route path="/login/admin" element={<LoginAdminPage />} />
|
||||
<Route
|
||||
path="/login"
|
||||
element={
|
||||
<ProtectedLoginRoute>
|
||||
<LoginPage />
|
||||
</ProtectedLoginRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/signup"
|
||||
element={
|
||||
<ProtectedLoginRoute>
|
||||
<SignUp />
|
||||
</ProtectedLoginRoute>
|
||||
}
|
||||
/>
|
||||
<Route
|
||||
path="/login/admin"
|
||||
element={
|
||||
<ProtectedLoginRoute>
|
||||
<LoginAdminPage />
|
||||
</ProtectedLoginRoute>
|
||||
}
|
||||
/>
|
||||
|
||||
<Route path="/admin" element={<AdminPage />} />
|
||||
<Route
|
||||
path="/admin"
|
||||
element={
|
||||
<ProtectedAdminRoute>
|
||||
<AdminPage />
|
||||
</ProtectedAdminRoute>
|
||||
}
|
||||
/>
|
||||
|
||||
<Route path="/account">
|
||||
<Route path="delete" element={<DeleteAccountPage />}></Route>
|
||||
<Route
|
||||
path="delete"
|
||||
element={
|
||||
<ProtectedRoute>
|
||||
<DeleteAccountPage />
|
||||
</ProtectedRoute>
|
||||
}
|
||||
></Route>
|
||||
<Route
|
||||
path="api-keys"
|
||||
element={
|
||||
<ProtectedRoute>
|
||||
<ApiKeysPage />
|
||||
</ProtectedRoute>
|
||||
}
|
||||
></Route>
|
||||
</Route>
|
||||
</Routes>
|
||||
);
|
||||
|
|
|
|||
|
|
@ -62,3 +62,27 @@ export type UploadFileTypeAPI = {
|
|||
file_path: string;
|
||||
flowId: string;
|
||||
};
|
||||
|
||||
export type LoginType = {
|
||||
grant_type?: string;
|
||||
username: string;
|
||||
password: string;
|
||||
scrope?: string;
|
||||
client_id?: string;
|
||||
client_secret?: string;
|
||||
};
|
||||
|
||||
export type LoginAuthType = {
|
||||
access_token: string;
|
||||
refresh_token: string;
|
||||
token_type?: string;
|
||||
};
|
||||
|
||||
export type Users = {
|
||||
id: string;
|
||||
username: string;
|
||||
is_active: boolean;
|
||||
is_superuser: boolean;
|
||||
create_at: Date;
|
||||
updated_at: Date;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -218,7 +218,7 @@ export type signUpInputStateType = {
|
|||
|
||||
export type inputHandlerEventType = {
|
||||
target: {
|
||||
value: string;
|
||||
value: string | boolean;
|
||||
name: string;
|
||||
};
|
||||
};
|
||||
|
|
@ -261,6 +261,29 @@ export type loginInputStateType = {
|
|||
password: string;
|
||||
};
|
||||
|
||||
export type UserInputType = {
|
||||
username: string;
|
||||
password: string;
|
||||
is_active?: boolean;
|
||||
is_superuser?: boolean;
|
||||
id?: string;
|
||||
create_at?: string;
|
||||
updated_at?:string;
|
||||
};
|
||||
|
||||
export type ApiKeyType = {
|
||||
title: string;
|
||||
cancelText: string;
|
||||
confirmationText: string;
|
||||
children: ReactElement;
|
||||
icon: string;
|
||||
data?: any;
|
||||
onCloseModal: () => void;
|
||||
};
|
||||
|
||||
export type ApiKeyInputType = {
|
||||
apikeyname: string;
|
||||
};
|
||||
export type groupedObjType = {
|
||||
family: string;
|
||||
type: string;
|
||||
|
|
@ -508,6 +531,15 @@ export type validationStatusType = {
|
|||
progress: number;
|
||||
valid: boolean;
|
||||
};
|
||||
|
||||
export type ApiKey = {
|
||||
id: string;
|
||||
api_key: string;
|
||||
name: string;
|
||||
created_at: string;
|
||||
last_used_at: string;
|
||||
total_uses: number;
|
||||
};
|
||||
export type fetchErrorComponentType = {
|
||||
message: string;
|
||||
description: string;
|
||||
|
|
|
|||
|
|
@ -1,16 +1,17 @@
|
|||
import { Users } from "../api";
|
||||
|
||||
export type AuthContextType = {
|
||||
isAdmin: boolean;
|
||||
setIsAdmin: (isAdmin: boolean) => void;
|
||||
isAuthenticated: boolean;
|
||||
accessToken: string | null;
|
||||
refreshToken: string | null;
|
||||
login: (accessToken: string, refreshToken: string) => void;
|
||||
logout: () => void;
|
||||
refreshAccessToken: (refreshToken: string) => Promise<void>;
|
||||
userData: userData | null;
|
||||
setUserData: (userData: userData | null) => void;
|
||||
};
|
||||
|
||||
export type userData = {
|
||||
id: string;
|
||||
name: string;
|
||||
email: string;
|
||||
role: string;
|
||||
userData: Users | null;
|
||||
setUserData: (userData: Users | null) => void;
|
||||
getAuthentication: () => boolean;
|
||||
authenticationErrorCount: number;
|
||||
autoLogin: boolean;
|
||||
setAutoLogin: (autoLogin: boolean) => void;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -48,6 +48,8 @@ export type alertContextType = {
|
|||
export type darkContextType = {
|
||||
dark: {};
|
||||
setDark: (newState: {}) => void;
|
||||
stars: number;
|
||||
setStars: (stars: number) => void;
|
||||
};
|
||||
|
||||
export type locationContextType = {
|
||||
|
|
|
|||
|
|
@ -19,6 +19,8 @@ import {
|
|||
Edit,
|
||||
Eraser,
|
||||
ExternalLink,
|
||||
Eye,
|
||||
EyeOff,
|
||||
File,
|
||||
FileDown,
|
||||
FileSearch,
|
||||
|
|
@ -33,6 +35,7 @@ import {
|
|||
HelpCircle,
|
||||
Home,
|
||||
Info,
|
||||
Key,
|
||||
Laptop2,
|
||||
Layers,
|
||||
Lightbulb,
|
||||
|
|
@ -61,6 +64,7 @@ import {
|
|||
Undo,
|
||||
Unplug,
|
||||
Upload,
|
||||
UserCog2,
|
||||
UserMinus2,
|
||||
UserPlus2,
|
||||
Users2,
|
||||
|
|
@ -291,5 +295,9 @@ export const nodeIconsLucide: iconsType = {
|
|||
ChevronsLeft,
|
||||
FaGithub,
|
||||
FaApple,
|
||||
EyeOff,
|
||||
Eye,
|
||||
UserCog2,
|
||||
Key,
|
||||
Unplug,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -201,6 +201,12 @@ module.exports = {
|
|||
".dark .theme-attribution .react-flow__attribution a": {
|
||||
color: "black",
|
||||
},
|
||||
".text-align-last-left": {
|
||||
"text-align-last": "left",
|
||||
},
|
||||
".text-align-last-right": {
|
||||
"text-align-last": "right",
|
||||
},
|
||||
});
|
||||
}),
|
||||
require("@tailwindcss/typography"),
|
||||
|
|
|
|||
|
|
@ -5,6 +5,9 @@ from typing import AsyncGenerator, TYPE_CHECKING
|
|||
from langflow.api.v1.flows import get_session
|
||||
|
||||
from langflow.graph.graph.base import Graph
|
||||
from langflow.services.auth.utils import get_password_hash
|
||||
from langflow.services.database.models.flow.flow import Flow
|
||||
from langflow.services.database.models.user.user import User, UserCreate
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
from httpx import AsyncClient
|
||||
|
|
@ -43,7 +46,7 @@ async def async_client() -> AsyncGenerator:
|
|||
|
||||
|
||||
# Create client fixture for FastAPI
|
||||
@pytest.fixture(scope="module")
|
||||
@pytest.fixture(scope="module", autouse=True)
|
||||
def client():
|
||||
from langflow.main import create_app
|
||||
|
||||
|
|
@ -155,3 +158,53 @@ def session_getter_fixture(client):
|
|||
@pytest.fixture
|
||||
def runner():
|
||||
return CliRunner()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_user(client):
|
||||
user_data = UserCreate(
|
||||
username="testuser",
|
||||
password="testpassword",
|
||||
)
|
||||
response = client.post("/api/v1/user", json=user_data.dict())
|
||||
return response.json()
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def active_user(client, session):
|
||||
user = User(
|
||||
username="activeuser",
|
||||
password=get_password_hash(
|
||||
"testpassword"
|
||||
), # Assuming password needs to be hashed
|
||||
is_active=True,
|
||||
is_superuser=False,
|
||||
)
|
||||
session.add(user)
|
||||
session.commit()
|
||||
return user
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def logged_in_headers(client, active_user):
|
||||
login_data = {"username": active_user.username, "password": "testpassword"}
|
||||
response = client.post("/api/v1/login", data=login_data)
|
||||
assert response.status_code == 200
|
||||
tokens = response.json()
|
||||
a_token = tokens["access_token"]
|
||||
return {"Authorization": f"Bearer {a_token}"}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def flow(client, json_flow: str, session, active_user):
|
||||
from langflow.services.database.models.flow.flow import FlowCreate
|
||||
|
||||
loaded_json = json.loads(json_flow)
|
||||
flow_data = FlowCreate(
|
||||
name="test_flow", data=loaded_json.get("data"), user_id=active_user.id
|
||||
)
|
||||
flow = Flow(**flow_data.dict())
|
||||
session.add(flow)
|
||||
session.commit()
|
||||
|
||||
return flow
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
from fastapi.testclient import TestClient
|
||||
|
||||
|
||||
def test_zero_shot_agent(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_zero_shot_agent(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
agents = json_response["agents"]
|
||||
|
|
@ -113,8 +113,8 @@ def test_zero_shot_agent(client: TestClient):
|
|||
}
|
||||
|
||||
|
||||
def test_json_agent(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_json_agent(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
agents = json_response["agents"]
|
||||
|
|
@ -152,8 +152,8 @@ def test_json_agent(client: TestClient):
|
|||
}
|
||||
|
||||
|
||||
def test_csv_agent(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_csv_agent(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
agents = json_response["agents"]
|
||||
|
|
@ -195,8 +195,8 @@ def test_csv_agent(client: TestClient):
|
|||
}
|
||||
|
||||
|
||||
def test_initialize_agent(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_initialize_agent(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
agents = json_response["agents"]
|
||||
|
|
|
|||
50
tests/test_api_key.py
Normal file
50
tests/test_api_key.py
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
import pytest
|
||||
from langflow.services.database.models.api_key import ApiKeyCreate
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def api_key(client, logged_in_headers, active_user):
|
||||
api_key = ApiKeyCreate(name="test-api-key")
|
||||
|
||||
response = client.post(
|
||||
"api/v1/api_key", data=api_key.json(), headers=logged_in_headers
|
||||
)
|
||||
assert response.status_code == 200, response.text
|
||||
return response.json()
|
||||
|
||||
|
||||
def test_get_api_keys(client, logged_in_headers, api_key):
|
||||
response = client.get("api/v1/api_key", headers=logged_in_headers)
|
||||
assert response.status_code == 200, response.text
|
||||
data = response.json()
|
||||
assert "total_count" in data
|
||||
assert "user_id" in data
|
||||
assert "api_keys" in data
|
||||
assert any("test-api-key" in api_key["name"] for api_key in data["api_keys"])
|
||||
# assert all api keys in data["api_keys"] are masked
|
||||
assert all("**" in api_key["api_key"] for api_key in data["api_keys"])
|
||||
# Add more assertions as needed based on the expected data structure and content
|
||||
|
||||
|
||||
def test_create_api_key(client, logged_in_headers):
|
||||
api_key_name = "test-api-key"
|
||||
response = client.post(
|
||||
"api/v1/api_key", json={"name": api_key_name}, headers=logged_in_headers
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "name" in data and data["name"] == api_key_name
|
||||
assert "api_key" in data
|
||||
# When creating the API key is returned which is
|
||||
# the only time the API key is unmasked
|
||||
assert "**" not in data["api_key"]
|
||||
|
||||
|
||||
def test_delete_api_key(client, logged_in_headers, active_user, api_key):
|
||||
# Assuming a function to create a test API key, returning the key ID
|
||||
api_key_id = api_key["id"]
|
||||
response = client.delete(f"api/v1/api_key/{api_key_id}", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["detail"] == "API Key deleted"
|
||||
# Optionally, add a follow-up check to ensure that the key is actually removed from the database
|
||||
|
|
@ -1,8 +1,8 @@
|
|||
from fastapi.testclient import TestClient
|
||||
|
||||
|
||||
# def test_chains_settings(client: TestClient):
|
||||
# response = client.get("api/v1/all")
|
||||
# def test_chains_settings(client: TestClient, logged_in_headers):
|
||||
# response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
# assert response.status_code == 200
|
||||
# json_response = response.json()
|
||||
# chains = json_response["chains"]
|
||||
|
|
@ -10,8 +10,8 @@ from fastapi.testclient import TestClient
|
|||
|
||||
|
||||
# Test the ConversationChain object
|
||||
def test_conversation_chain(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_conversation_chain(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
chains = json_response["chains"]
|
||||
|
|
@ -102,8 +102,8 @@ def test_conversation_chain(client: TestClient):
|
|||
)
|
||||
|
||||
|
||||
def test_llm_chain(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_llm_chain(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
chains = json_response["chains"]
|
||||
|
|
@ -173,8 +173,8 @@ def test_llm_chain(client: TestClient):
|
|||
}
|
||||
|
||||
|
||||
def test_llm_checker_chain(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_llm_checker_chain(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
chains = json_response["chains"]
|
||||
|
|
@ -207,8 +207,8 @@ def test_llm_checker_chain(client: TestClient):
|
|||
assert chain["description"] == ""
|
||||
|
||||
|
||||
def test_llm_math_chain(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_llm_math_chain(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
chains = json_response["chains"]
|
||||
|
|
@ -299,8 +299,8 @@ def test_llm_math_chain(client: TestClient):
|
|||
)
|
||||
|
||||
|
||||
def test_series_character_chain(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_series_character_chain(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
chains = json_response["chains"]
|
||||
|
|
@ -367,8 +367,8 @@ def test_series_character_chain(client: TestClient):
|
|||
)
|
||||
|
||||
|
||||
def test_mid_journey_prompt_chain(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_mid_journey_prompt_chain(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
chains = json_response["chains"]
|
||||
|
|
@ -408,8 +408,8 @@ def test_mid_journey_prompt_chain(client: TestClient):
|
|||
)
|
||||
|
||||
|
||||
def test_time_travel_guide_chain(client: TestClient):
|
||||
response = client.get("api/v1/all")
|
||||
def test_time_travel_guide_chain(client: TestClient, logged_in_headers):
|
||||
response = client.get("api/v1/all", headers=logged_in_headers)
|
||||
assert response.status_code == 200
|
||||
json_response = response.json()
|
||||
chains = json_response["chains"]
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue