Merge remote-tracking branch 'origin/dev' into celery

This commit is contained in:
Gabriel Luiz Freitas Almeida 2023-09-05 08:19:40 -03:00
commit 2e09d53ced
57 changed files with 230 additions and 211 deletions

View file

@ -27,7 +27,7 @@ format:
cd src/frontend && npm run format
lint:
poetry run mypy --exclude .venv .
poetry run mypy src/backend/langflow
poetry run black . --check
poetry run ruff . --fix

174
poetry.lock generated
View file

@ -269,17 +269,17 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"]
[[package]]
name = "asttokens"
version = "2.2.1"
version = "2.4.0"
description = "Annotate AST trees with source code positions"
optional = false
python-versions = "*"
files = [
{file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"},
{file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"},
{file = "asttokens-2.4.0-py2.py3-none-any.whl", hash = "sha256:cf8fc9e61a86461aa9fb161a14a0841a03c405fa829ac6b202670b3495d2ce69"},
{file = "asttokens-2.4.0.tar.gz", hash = "sha256:2e0171b991b2c959acc6c49318049236844a5da1d65ba2672c4880c1c894834e"},
]
[package.dependencies]
six = "*"
six = ">=1.12.0"
[package.extras]
test = ["astroid", "pytest"]
@ -591,13 +591,13 @@ files = [
[[package]]
name = "celery"
version = "5.3.3"
version = "5.3.4"
description = "Distributed Task Queue."
optional = true
python-versions = ">=3.8"
files = [
{file = "celery-5.3.3-py3-none-any.whl", hash = "sha256:d65c0be70d0949fcda8893876a071a7cfd9f248f9ad92e1919845e5cbc268db7"},
{file = "celery-5.3.3.tar.gz", hash = "sha256:bac90ef99b70b9b5b5d4cfcebf6f1ab5168b86c6120bc7c5814cd8234dfd9381"},
{file = "celery-5.3.4-py3-none-any.whl", hash = "sha256:1e6ed40af72695464ce98ca2c201ad0ef8fd192246f6c9eac8bba343b980ad34"},
{file = "celery-5.3.4.tar.gz", hash = "sha256:9023df6a8962da79eb30c0c84d5f4863d9793a466354cc931d7f72423996de28"},
]
[package.dependencies]
@ -1023,13 +1023,13 @@ sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"]
[[package]]
name = "cohere"
version = "4.21"
version = "4.22"
description = ""
optional = false
python-versions = ">=3.7,<4.0"
files = [
{file = "cohere-4.21-py3-none-any.whl", hash = "sha256:5eb81db62e78b3156e734421cc3e657054f9d9f1d68b9f38cf48fe3a8ae40dbc"},
{file = "cohere-4.21.tar.gz", hash = "sha256:f611438f409dfc5d5a0a153a585349f5a80b169c7102b5994d9999ecf8440866"},
{file = "cohere-4.22-py3-none-any.whl", hash = "sha256:e09cc3967c34a15cde63e29783c05bfe80617b2a60817ad1d13c12845f6e71b9"},
{file = "cohere-4.22.tar.gz", hash = "sha256:aa235ea8d598bde03c2a5c6671f2785b6707a6f297c765ea87422f62f19a9021"},
]
[package.dependencies]
@ -1874,13 +1874,13 @@ files = [
[[package]]
name = "fsspec"
version = "2023.6.0"
version = "2023.9.0"
description = "File-system specification"
optional = false
python-versions = ">=3.8"
files = [
{file = "fsspec-2023.6.0-py3-none-any.whl", hash = "sha256:1cbad1faef3e391fba6dc005ae9b5bdcbf43005c9167ce78c915549c352c869a"},
{file = "fsspec-2023.6.0.tar.gz", hash = "sha256:d0b2f935446169753e7a5c5c55681c54ea91996cc67be93c39a154fb3a2742af"},
{file = "fsspec-2023.9.0-py3-none-any.whl", hash = "sha256:d55b9ab2a4c1f2b759888ae9f93e40c2aa72c0808132e87e282b549f9e6c4254"},
{file = "fsspec-2023.9.0.tar.gz", hash = "sha256:4dbf0fefee035b7c6d3bbbe6bc99b2f201f40d4dca95b67c2b719be77bcd917f"},
]
[package.extras]
@ -1909,42 +1909,46 @@ tqdm = ["tqdm"]
[[package]]
name = "gevent"
version = "23.7.0"
version = "23.9.0.post1"
description = "Coroutine-based network library"
optional = false
python-versions = ">=3.8"
files = [
{file = "gevent-23.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:add904a7ef960cd4e133e61eb7413982c5e4203928160be1c09752ac06a25e71"},
{file = "gevent-23.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bd9ea1b5fbdc7e5921a9e515f34a450eb3927a902253a33caedcce2d19d7d96"},
{file = "gevent-23.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c7c349aa23d67cf5cc3b2c87aaedcfead976d0577b1cfcd07ffeba63baba79c"},
{file = "gevent-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c92b837b60e850c50fc6d723d1e363e786d37fd9d51e564e07df52ad5e8a86d4"},
{file = "gevent-23.7.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6a51a8e3cdaa6901e47d56f84cb5f92b1bf3deea920bce69cf7a245df16159ac"},
{file = "gevent-23.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1dba07207b15b371e50372369edf256a142cb5cdf8599849cbf8660327efa06"},
{file = "gevent-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:34086bcc1252ae41e1cb81cf13c4a7678031595c12f4e9a1c3d0ab433f20826a"},
{file = "gevent-23.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5da07d65dfa23fe419c37cea110bf951b42af6bf3a1fff244043a75c9185dbd5"},
{file = "gevent-23.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df4d7be3352126458cc818309ca6a3b678c209b1ae33e56b6975c6a8309f2068"},
{file = "gevent-23.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76ca6f893953ab898ebbff5d772103318a85044e55d0bad401d6b49d71bb76e7"},
{file = "gevent-23.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aeb1511cf0786152af741c47ee462dac81b57bbd1fbbe08ab562b6c8c9ad75ed"},
{file = "gevent-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:919423e803939726c99ab2d29ea46b8676af549cee72d263f2b24758ec607b2c"},
{file = "gevent-23.7.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cea93f4f77badbddc711620cca164ad75c74056603908e621a5ba1b97adbc39c"},
{file = "gevent-23.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dec7b08daf08385fb281b81ec2e7e703243975d867f40ae0a8a3e30b380eb9ea"},
{file = "gevent-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:f522b6b015f1bfa9d8d3716ddffb23e3d4a8933df3e4ebf0a29a65a9fa74382b"},
{file = "gevent-23.7.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:746a1e12f280dab07389e6709164b1e1a6caaf50493ea5b1dcaa73cff005174c"},
{file = "gevent-23.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b230007a665d2cf5cf8878c9f56a2b8bacbdc4fe0235afc5269b71cd00528e5"},
{file = "gevent-23.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1d2f1e67d04fde47ca2deac89733df28ef3a7ec1d7359a79f57d4778cced16d"},
{file = "gevent-23.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:debc177e88a8c876cb1a4d974f985d03670177bdc61e1c084a8d525f1a50b12d"},
{file = "gevent-23.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b3dd449c80814357f6568eb095a2be2421b805d59fa97c65094707e04a181f9"},
{file = "gevent-23.7.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:769e8811ded08fe7d8b09ad8ebb72d47aecc112411e0726e7296b7ed187ed629"},
{file = "gevent-23.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11b9bb0bce45170ff992760385a86e6955ccb88dba4a82a64d5ce9459290d8d6"},
{file = "gevent-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e0d76a7848726e0646324a1adc011355dcd91875e7913badd1ada2e5eeb8a6e"},
{file = "gevent-23.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a226b42cb9a49580ca7729572a4f8289d1fa28cd2529c9f4eed3e14b995d1c9c"},
{file = "gevent-23.7.0-cp38-cp38-win32.whl", hash = "sha256:1234849b0bc4df560924aa92f7c01ca3f310677735fb508a2b0d7a61bb946916"},
{file = "gevent-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:a8f62e8d37913512823923e05607a296389aeb50ccca8a271ae7cedb5b17faeb"},
{file = "gevent-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:369241d1a6a3fe3ef4eba454b71e0168026560c5344fc4bc37196867041982ac"},
{file = "gevent-23.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:94b013587f7c4697d620c129627f7b12d7d9f6e40ab198635891ca2098cd8556"},
{file = "gevent-23.7.0-cp39-cp39-win32.whl", hash = "sha256:83b6d61a8e9da25edb304ca7fba19ee57bb1ffa801f9df3e668bfed7bb8386cb"},
{file = "gevent-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:8c284390f0f6d0b5be3bf805fa8e0ae1329065f2b0ac5af5423c67183197deb8"},
{file = "gevent-23.7.0.tar.gz", hash = "sha256:d0d3630674c1b344b256a298ab1ff43220f840b12af768131b5d74e485924237"},
{file = "gevent-23.9.0.post1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:c4b2efc68fb3aef5dde8204d0f71c3585ba621c57e9b937b46ff5678f1cd7404"},
{file = "gevent-23.9.0.post1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b3a813ff1151d75538bb5ec821332627cd2c4685cc72702640d203a426041ca"},
{file = "gevent-23.9.0.post1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2cf108ee9c18c0ea5cf81d3fc7859f512dab61c2d76937b2510c7bf8cfaabfe7"},
{file = "gevent-23.9.0.post1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ff1771bc8f2ed343f32c2f40dbd25f04fdfe2d83eb02e0401945dc61115dbe"},
{file = "gevent-23.9.0.post1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:26e308815fb2d4d84e7a55eebd00c4014e5cb07ead8f3f66236e5a797937340c"},
{file = "gevent-23.9.0.post1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5fd8941f5c5cc998114b89e032e1ebabd779d99faa60d004b960587b866195ba"},
{file = "gevent-23.9.0.post1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:deb353bf15ab724fe8bf587433519d558ddfd89fa35b77f7886de4312517eee4"},
{file = "gevent-23.9.0.post1-cp310-cp310-win_amd64.whl", hash = "sha256:9a4c1afd3fa2103f11c27f19b060c2ed122ed487cbdf79e7987ef261aa04429f"},
{file = "gevent-23.9.0.post1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:29ccc476077a317d082ddad4dabf5c68ccf7079aaf14aa5be8e0529b06f569a6"},
{file = "gevent-23.9.0.post1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cb909b0649b0e15c069527a61af83f067e4c59ff03a07aa40aa2d5e8e355d20"},
{file = "gevent-23.9.0.post1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f463a131df0e8d466a8caf7909ad73c80f793ed97c6376e78c7c75a51f19cba0"},
{file = "gevent-23.9.0.post1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:edb9ceb5f88154e83ee8fc2e4b2d8ca070c62f1266d73f88578109b9c4564003"},
{file = "gevent-23.9.0.post1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ee6382fde487a84a4a21711988d9eb97ed63c69be085b442e1665dc44022be60"},
{file = "gevent-23.9.0.post1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9d21796a54dcccabe9fc0053c1bd991dfa63e554873e5a5f9c0885984068b2a"},
{file = "gevent-23.9.0.post1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d33f997d97f267e9f62db9cd03d42f711df2ddba944173853773b220187ca7a0"},
{file = "gevent-23.9.0.post1-cp311-cp311-win_amd64.whl", hash = "sha256:4bdca1bd1fb0c3524dbe0a273c87eb9a0428ea7f2533d579a3194426fbb93c92"},
{file = "gevent-23.9.0.post1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:bccd4e3d21e7c5f7b72e3382523702ce58add691417633dfafa305978bebee84"},
{file = "gevent-23.9.0.post1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c24bd27f8a75fe70475e72dde519d569d58f0f5e8f4f6d009493ee660855c3d1"},
{file = "gevent-23.9.0.post1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc5b637870c325899eb9fc44915670deb2ef413c5c90ad0d96c335e41de1f751"},
{file = "gevent-23.9.0.post1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4bcff1fc4bc0e5610aa541ad14fead244e8b789fda98acbacd268668089c7373"},
{file = "gevent-23.9.0.post1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c3d665d252903982469b0933f31dd346a249d2e2c45dd0e1c9263889a5dbfbc6"},
{file = "gevent-23.9.0.post1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f23a560f1731a2b4f582b89e8d8afcbfd66695b025712e295f21aeec3d786413"},
{file = "gevent-23.9.0.post1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1b2804d7e2909074b0cf6e2371595935a699edc8bd403211a414752e68f7e0ad"},
{file = "gevent-23.9.0.post1-cp312-cp312-win_amd64.whl", hash = "sha256:f7aa27b8585b66fb5fff3a54e3e7bb837258bda39bb65a788304c8d45b9bb9d4"},
{file = "gevent-23.9.0.post1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:bc836d91b834fa4ce18ee062861dc6e488f35254def8301ffcac6900331941a7"},
{file = "gevent-23.9.0.post1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:a21b9c7356e9ab0baaa8afa85fb18406cbff54d3cf8033e1e97e7186a3deb391"},
{file = "gevent-23.9.0.post1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3c4acda344e9864b2d0755fad1c736dc4effae95b0fd8915a261ff6ace09416f"},
{file = "gevent-23.9.0.post1-cp38-cp38-win32.whl", hash = "sha256:22d7fdbfc7127c5d59511c3de9f8394a125f32bccc1254915944d95522876a8e"},
{file = "gevent-23.9.0.post1-cp38-cp38-win_amd64.whl", hash = "sha256:3e6b6c53e1e81b3f22180da316769ac55a41085655971e0e086899f0ddb017b0"},
{file = "gevent-23.9.0.post1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:f0dbee943865313331ece9f9675a30848d027df653b0ff4881d2be14d0c2ea1c"},
{file = "gevent-23.9.0.post1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:98de0f1eecd772df87018e04ef8e274b72c3b3127d2e15f76b8b761ed135b803"},
{file = "gevent-23.9.0.post1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17ebb6f981389c17321b95bc59ff6a65edeb98f3205884babaec9cb514aaa0d3"},
{file = "gevent-23.9.0.post1-cp39-cp39-win32.whl", hash = "sha256:f731574d908cbe505e103f4c5b4d64fe4e0a82cef371e925212689194ee22198"},
{file = "gevent-23.9.0.post1-cp39-cp39-win_amd64.whl", hash = "sha256:595706422f1832f2dd29bb9cb3219780f1e158d5a771199fe26b00da1bae8214"},
{file = "gevent-23.9.0.post1.tar.gz", hash = "sha256:943f26edada39dfd5f50551157bb9011191c7367be36e341d0f1cdecfe07a229"},
]
[package.dependencies]
@ -2099,13 +2103,13 @@ smmap = ">=3.0.1,<6"
[[package]]
name = "gitpython"
version = "3.1.33"
version = "3.1.34"
description = "GitPython is a Python library used to interact with Git repositories"
optional = false
python-versions = ">=3.7"
files = [
{file = "GitPython-3.1.33-py3-none-any.whl", hash = "sha256:11f22466f982211ad8f3bdb456c03be8466c71d4da8774f3a9f68344e89559cb"},
{file = "GitPython-3.1.33.tar.gz", hash = "sha256:13aaa3dff88a23afec2d00eb3da3f2e040e2282e41de484c5791669b31146084"},
{file = "GitPython-3.1.34-py3-none-any.whl", hash = "sha256:5d3802b98a3bae1c2b8ae0e1ff2e4aa16bcdf02c145da34d092324f599f01395"},
{file = "GitPython-3.1.34.tar.gz", hash = "sha256:85f7d365d1f6bf677ae51039c1ef67ca59091c7ebd5a3509aa399d4eda02d6dd"},
]
[package.dependencies]
@ -3040,13 +3044,13 @@ files = [
[[package]]
name = "ipykernel"
version = "6.25.1"
version = "6.25.2"
description = "IPython Kernel for Jupyter"
optional = false
python-versions = ">=3.8"
files = [
{file = "ipykernel-6.25.1-py3-none-any.whl", hash = "sha256:c8a2430b357073b37c76c21c52184db42f6b4b0e438e1eb7df3c4440d120497c"},
{file = "ipykernel-6.25.1.tar.gz", hash = "sha256:050391364c0977e768e354bdb60cbbfbee7cbb943b1af1618382021136ffd42f"},
{file = "ipykernel-6.25.2-py3-none-any.whl", hash = "sha256:2e2ee359baba19f10251b99415bb39de1e97d04e1fab385646f24f0596510b77"},
{file = "ipykernel-6.25.2.tar.gz", hash = "sha256:f468ddd1f17acb48c8ce67fcfa49ba6d46d4f9ac0438c1f441be7c3d1372230b"},
]
[package.dependencies]
@ -3544,13 +3548,13 @@ test = ["psutil", "pytest", "pytest-asyncio"]
[[package]]
name = "langsmith"
version = "0.0.32"
version = "0.0.33"
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
optional = false
python-versions = ">=3.8.1,<4.0"
files = [
{file = "langsmith-0.0.32-py3-none-any.whl", hash = "sha256:de40a65c81b9aa26dcabe3d2aee84a43a6e1703a5381d624b6d58166b4b3e00c"},
{file = "langsmith-0.0.32.tar.gz", hash = "sha256:6ec660b991057f7f4217ddfb9e10d75ae3159d1410af55818b848e2ede3e019c"},
{file = "langsmith-0.0.33-py3-none-any.whl", hash = "sha256:cdff11a6272d3cba72c151960c0319b1d36e0770d37f05061d6c31ef1a2404a4"},
{file = "langsmith-0.0.33.tar.gz", hash = "sha256:c9c640ac238d4cabc8f9744e04346d3dfaf0ca6c9dc37bd2a25b8031eda35dc3"},
]
[package.dependencies]
@ -3624,13 +3628,13 @@ Werkzeug = ">=2.0.0"
[[package]]
name = "loguru"
version = "0.7.0"
version = "0.7.1"
description = "Python logging made (stupidly) simple"
optional = false
python-versions = ">=3.5"
files = [
{file = "loguru-0.7.0-py3-none-any.whl", hash = "sha256:b93aa30099fa6860d4727f1b81f8718e965bb96253fa190fab2077aaad6d15d3"},
{file = "loguru-0.7.0.tar.gz", hash = "sha256:1612053ced6ae84d7959dd7d5e431a0532642237ec21f7fd83ac73fe539e03e1"},
{file = "loguru-0.7.1-py3-none-any.whl", hash = "sha256:046bf970cb3cad77a28d607cbf042ac25a407db987a1e801c7f7e692469982f9"},
{file = "loguru-0.7.1.tar.gz", hash = "sha256:7ba2a7d81b79a412b0ded69bd921e012335e80fd39937a633570f273a343579e"},
]
[package.dependencies]
@ -3638,7 +3642,7 @@ colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""}
win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""}
[package.extras]
dev = ["Sphinx (==5.3.0)", "colorama (==0.4.5)", "colorama (==0.4.6)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v0.990)", "pre-commit (==3.2.1)", "pytest (==6.1.2)", "pytest (==7.2.1)", "pytest-cov (==2.12.1)", "pytest-cov (==4.0.0)", "pytest-mypy-plugins (==1.10.1)", "pytest-mypy-plugins (==1.9.3)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.2.0)", "tox (==3.27.1)", "tox (==4.4.6)"]
dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "pre-commit (==3.3.1)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"]
[[package]]
name = "lxml"
@ -5826,13 +5830,13 @@ diagrams = ["jinja2", "railroad-diagrams"]
[[package]]
name = "pypdf"
version = "3.15.4"
version = "3.15.5"
description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files"
optional = false
python-versions = ">=3.6"
files = [
{file = "pypdf-3.15.4-py3-none-any.whl", hash = "sha256:791f0a52ddf390709f1f1b0c05c4d8cde13829b4f7cb91b4003b9bdd352bc944"},
{file = "pypdf-3.15.4.tar.gz", hash = "sha256:a2780ed01dc4da23ac1542209f58fd3d951d8dd37c3c0309d123cd2f2679fb03"},
{file = "pypdf-3.15.5-py3-none-any.whl", hash = "sha256:8e003c4ee4875450612c2571ba9a5cc12d63a46b226a484314b21b7f013d2717"},
{file = "pypdf-3.15.5.tar.gz", hash = "sha256:81cf6e8a206450726555023a36c13fb40f680c047b8fcc0bcbfd4d1908c33d31"},
]
[package.dependencies]
@ -5871,13 +5875,13 @@ chardet = "*"
[[package]]
name = "pytest"
version = "7.4.0"
version = "7.4.1"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.7"
files = [
{file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"},
{file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"},
{file = "pytest-7.4.1-py3-none-any.whl", hash = "sha256:460c9a59b14e27c602eb5ece2e47bec99dc5fc5f6513cf924a7d03a578991b1f"},
{file = "pytest-7.4.1.tar.gz", hash = "sha256:2f2301e797521b23e4d2585a0a3d7b5e50fdddaaf7e7d6773ea26ddb17c213ab"},
]
[package.dependencies]
@ -6100,13 +6104,13 @@ test = ["coverage (>=5,<6)", "mock (==1.3.0)", "pytest (>=7,<8)", "pytest-mock (
[[package]]
name = "pytz"
version = "2023.3"
version = "2023.3.post1"
description = "World timezone definitions, modern and historical"
optional = false
python-versions = "*"
files = [
{file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"},
{file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"},
{file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"},
{file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"},
]
[[package]]
@ -6975,13 +6979,13 @@ files = [
[[package]]
name = "soupsieve"
version = "2.4.1"
version = "2.5"
description = "A modern CSS selector implementation for Beautiful Soup."
optional = false
python-versions = ">=3.7"
python-versions = ">=3.8"
files = [
{file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"},
{file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"},
{file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"},
{file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"},
]
[[package]]
@ -7527,13 +7531,13 @@ test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"]
[[package]]
name = "transformers"
version = "4.32.1"
version = "4.33.0"
description = "State-of-the-art Machine Learning for JAX, PyTorch and TensorFlow"
optional = true
python-versions = ">=3.8.0"
files = [
{file = "transformers-4.32.1-py3-none-any.whl", hash = "sha256:b930d3dbd907a3f300cf49e54d63a56f8a0ab16b01a2c2a61ecff37c6de1da08"},
{file = "transformers-4.32.1.tar.gz", hash = "sha256:1edc8ae1de357d97c3d36b04412aa63d55e6fc0c4b39b419a7d380ed947d2252"},
{file = "transformers-4.33.0-py3-none-any.whl", hash = "sha256:c3b7f818e90c4361bb50ad541ab94e28329aa0d97a1c45ffafa5a8b693bc73ec"},
{file = "transformers-4.33.0.tar.gz", hash = "sha256:9e894dc62cfb02d92c1201e57219765b53be6486a306db8b60414e93fc2e39a5"},
]
[package.dependencies]
@ -7550,16 +7554,16 @@ tqdm = ">=4.27"
[package.extras]
accelerate = ["accelerate (>=0.20.3)"]
agents = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch (>=1.9,!=1.12.0)"]
all = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision"]
agents = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "datasets (!=2.5.0)", "diffusers", "opencv-python", "sentencepiece (>=0.1.91,!=0.1.92)", "torch (>=1.10,!=1.12.0)"]
all = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision"]
audio = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"]
codecarbon = ["codecarbon (==1.2.0)"]
deepspeed = ["accelerate (>=0.20.3)", "deepspeed (>=0.9.3)"]
deepspeed-testing = ["GitPython (<3.1.19)", "accelerate (>=0.20.3)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "deepspeed (>=0.9.3)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf", "psutil", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "sentencepiece (>=0.1.91,!=0.1.92)", "timeout-decorator"]
dev = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorflow (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"]
dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "urllib3 (<2.0.0)"]
dev-torch = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "accelerate (>=0.20.3)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"]
docs = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "hf-doc-builder", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "torchaudio", "torchvision"]
dev = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "decord (==0.6.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flax (>=0.4.1,<=0.7.0)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "tensorflow (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"]
dev-tensorflow = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "urllib3 (<2.0.0)"]
dev-torch = ["GitPython (<3.1.19)", "Pillow (<10.0.0)", "accelerate (>=0.20.3)", "beautifulsoup4", "black (>=23.1,<24.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf", "psutil", "pyctcdecode (>=0.4.0)", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "ray[tune]", "rhoknp (>=1.1.0,<1.3.1)", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "ruff (>=0.0.241,<=0.0.259)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)", "urllib3 (<2.0.0)"]
docs = ["Pillow (<10.0.0)", "accelerate (>=0.20.3)", "av (==9.2.0)", "codecarbon (==1.2.0)", "decord (==0.6.0)", "flax (>=0.4.1,<=0.7.0)", "hf-doc-builder", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8,<=0.1.4)", "optuna", "phonemizer", "protobuf", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.10,!=1.12.0)", "torchaudio", "torchvision"]
docs-specific = ["hf-doc-builder"]
fairscale = ["fairscale (>0.3)"]
flax = ["flax (>=0.4.1,<=0.7.0)", "jax (>=0.4.1,<=0.4.13)", "jaxlib (>=0.4.1,<=0.4.13)", "optax (>=0.0.8,<=0.1.4)"]
@ -7582,15 +7586,15 @@ sigopt = ["sigopt"]
sklearn = ["scikit-learn"]
speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"]
testing = ["GitPython (<3.1.19)", "beautifulsoup4", "black (>=23.1,<24.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf", "psutil", "pytest (>=7.2.0)", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "timeout-decorator"]
tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx"]
tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.6,<2.14)", "tensorflow-text (<2.14)", "tf2onnx"]
tf = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx"]
tf-cpu = ["keras-nlp (>=0.3.1)", "onnxconverter-common", "tensorflow-cpu (>=2.6,<2.15)", "tensorflow-text (<2.15)", "tf2onnx"]
tf-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"]
timm = ["timm"]
tokenizers = ["tokenizers (>=0.11.1,!=0.11.3,<0.14)"]
torch = ["accelerate (>=0.20.3)", "torch (>=1.9,!=1.12.0)"]
torch = ["accelerate (>=0.20.3)", "torch (>=1.10,!=1.12.0)"]
torch-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)", "torchaudio"]
torch-vision = ["Pillow (<10.0.0)", "torchvision"]
torchhub = ["filelock", "huggingface-hub (>=0.15.1,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.9,!=1.12.0)", "tqdm (>=4.27)"]
torchhub = ["filelock", "huggingface-hub (>=0.15.1,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.10,!=1.12.0)", "tqdm (>=4.27)"]
video = ["av (==9.2.0)", "decord (==0.6.0)"]
vision = ["Pillow (<10.0.0)"]
@ -8554,4 +8558,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<3.11"
content-hash = "2fce18d49910e554b4d6cb9ee7f8e317fcda6064ed3cce49ad3785ee6f7a1e6d"
content-hash = "c75a76d10e72558d53d8421ff8dee3f741a382f0cf0ea1c44ff0a0ec7a4e78d0"

View file

@ -87,6 +87,7 @@ python-jose = "^3.3.0"
metaphor-python = "^0.1.11"
markupsafe = "^2.1.3"
pywin32 = { version = "^306", markers = "sys_platform == 'win32'" }
loguru = "^0.7.1"
[tool.poetry.group.dev.dependencies]

View file

@ -30,10 +30,10 @@ def upgrade() -> None:
# and other related indices
if "flowstyle" in existing_tables:
op.drop_table("flowstyle")
if "ix_flowstyle_flow_id" in [
index["name"] for index in inspector.get_indexes("flowstyle")
]:
op.drop_index("ix_flowstyle_flow_id", table_name="flowstyle")
if "ix_flowstyle_flow_id" in [
index["name"] for index in inspector.get_indexes("flowstyle")
]:
op.drop_index("ix_flowstyle_flow_id", table_name="flowstyle")
existing_indices_flow = []
existing_fks_flow = []

View file

@ -22,7 +22,7 @@ class ValidatePromptRequest(BaseModel):
name: str
template: str
# optional for tweak call
frontend_node: Optional[FrontendNodeRequest]
frontend_node: Optional[FrontendNodeRequest] = None
# Build ValidationResponse class for {"imports": {"errors": []}, "function": {"errors": []}}
@ -42,7 +42,7 @@ class CodeValidationResponse(BaseModel):
class PromptValidationResponse(BaseModel):
input_variables: list
# object return for tweak call
frontend_node: FrontendNodeRequest | object
frontend_node: Optional[FrontendNodeRequest] = None
INVALID_CHARACTERS = {

View file

@ -10,7 +10,7 @@ from fastapi import WebSocket
from langchain.schema import AgentAction, LLMResult, AgentFinish
from langflow.utils.logger import logger
from loguru import logger
# https://github.com/hwchase17/chat-langchain/blob/master/callback.py

View file

@ -11,16 +11,15 @@ from fastapi.responses import StreamingResponse
from langflow.api.utils import build_input_keys_response
from langflow.api.v1.schemas import BuildStatus, BuiltResponse, InitResponse, StreamData
from langflow.services import service_manager, ServiceType
from langflow.graph.graph.base import Graph
from langflow.services.auth.utils import get_current_active_user, get_current_user
from langflow.services.utils import get_session
from langflow.utils.logger import logger
from langflow.services.utils import get_cache_manager, get_session
from loguru import logger
from langflow.services.utils import get_chat_manager
from sqlmodel import Session
from typing import TYPE_CHECKING
from langflow.services.chat.manager import ChatManager
from langflow.services.cache.manager import BaseCacheManager
if TYPE_CHECKING:
from langflow.services.chat.manager import ChatManager
router = APIRouter(tags=["Chat"])
@ -31,6 +30,7 @@ async def chat(
websocket: WebSocket,
token: str = Query(...),
db: Session = Depends(get_session),
chat_manager: "ChatManager" = Depends(get_chat_manager),
):
"""Websocket endpoint for chat."""
try:
@ -45,7 +45,6 @@ async def chat(
code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized"
)
chat_manager: "ChatManager" = service_manager.get(ServiceType.CHAT_MANAGER)
if client_id in chat_manager.in_memory_cache:
await chat_manager.handle_websocket(client_id, websocket)
else:
@ -69,10 +68,13 @@ async def chat(
@router.post("/build/init/{flow_id}", response_model=InitResponse, status_code=201)
async def init_build(
graph_data: dict, flow_id: str, current_user=Depends(get_current_active_user)
graph_data: dict,
flow_id: str,
current_user=Depends(get_current_active_user),
chat_manager: "ChatManager" = Depends(get_chat_manager),
cache_manager: "BaseCacheManager" = Depends(get_cache_manager),
):
"""Initialize the build by storing graph data and returning a unique session ID."""
cache_manager = service_manager.get(ServiceType.CACHE_MANAGER)
try:
if flow_id is None:
raise ValueError("No ID provided")
@ -85,7 +87,6 @@ async def init_build(
return InitResponse(flowId=flow_id)
# Delete from cache if already exists
chat_manager = service_manager.get(ServiceType.CHAT_MANAGER)
if flow_id in chat_manager.cache_manager:
chat_manager.cache_manager.delete(flow_id)
logger.debug(f"Deleted flow {flow_id} from cache")
@ -102,9 +103,10 @@ async def init_build(
@router.get("/build/{flow_id}/status", response_model=BuiltResponse)
async def build_status(flow_id: str):
async def build_status(
flow_id: str, cache_manager: "BaseCacheManager" = Depends(get_cache_manager)
):
"""Check the flow_id is in the cache_manager."""
cache_manager = service_manager.get(ServiceType.CACHE_MANAGER)
try:
built = (
flow_id in cache_manager
@ -121,9 +123,12 @@ async def build_status(flow_id: str):
@router.get("/build/stream/{flow_id}", response_class=StreamingResponse)
async def stream_build(flow_id: str):
async def stream_build(
flow_id: str,
chat_manager: "ChatManager" = Depends(get_chat_manager),
cache_manager: "BaseCacheManager" = Depends(get_cache_manager),
):
"""Stream the build process based on stored flow data."""
cache_manager = service_manager.get(ServiceType.CACHE_MANAGER)
async def event_stream(flow_id):
final_response = {"end_of_stream": True}
@ -204,7 +209,6 @@ async def stream_build(flow_id: str):
"handle_keys": [],
}
yield str(StreamData(event="message", data=input_keys_response))
chat_manager = service_manager.get(ServiceType.CHAT_MANAGER)
chat_manager.set_cache(flow_id, langchain_object)
# We need to reset the chat history
chat_manager.chat_history.empty_history(flow_id)

View file

@ -8,7 +8,7 @@ from langflow.services.database.models.flow import Flow
from langflow.processing.process import process_graph_cached, process_tweaks
from langflow.services.database.models.user.user import User
from langflow.services.utils import get_settings_manager, get_task_manager
from langflow.utils.logger import logger
from loguru import logger
from fastapi import APIRouter, Depends, HTTPException, UploadFile, Body, status
import sqlalchemy as sa
from langflow.interface.custom.custom_component import CustomComponent
@ -39,14 +39,15 @@ from langflow.services.task.manager import TaskManager
router = APIRouter(tags=["Base"])
@router.get("/all")
def get_all(current_user: User = Depends(get_current_active_user)):
@router.get("/all", dependencies=[Depends(get_current_active_user)])
def get_all(
settings_manager=Depends(get_settings_manager),
):
logger.debug("Building langchain types dict")
native_components = build_langchain_types_dict()
# custom_components is a list of dicts
# need to merge all the keys into one dict
custom_components_from_file: dict[str, Any] = {}
settings_manager = get_settings_manager()
if settings_manager.settings.COMPONENTS_PATH:
logger.info(
f"Building custom components from {settings_manager.settings.COMPONENTS_PATH}"

View file

@ -83,6 +83,7 @@ def update_flow(
flow_id: UUID,
flow: FlowUpdate,
current_user: User = Depends(get_current_active_user),
settings_manager=Depends(get_settings_manager),
):
"""Update a flow."""
@ -90,7 +91,6 @@ def update_flow(
if not db_flow:
raise HTTPException(status_code=404, detail="Flow not found")
flow_data = flow.dict(exclude_unset=True)
settings_manager = get_settings_manager()
if settings_manager.settings.REMOVE_API_KEYS:
flow_data = remove_api_keys(flow_data)
for key, value in flow_data.items():

View file

@ -34,9 +34,9 @@ async def login_to_get_access_token(
@router.get("/auto_login")
async def auto_login(db: Session = Depends(get_session)):
settings_manager = get_settings_manager()
async def auto_login(
db: Session = Depends(get_session), settings_manager=Depends(get_settings_manager)
):
if settings_manager.auth_settings.AUTO_LOGIN:
return create_user_longterm_token(db)

View file

@ -29,7 +29,7 @@ router = APIRouter(tags=["Users"])
@router.post("/user", response_model=UserRead, status_code=201)
def add_user(
user: UserCreate,
db: Session = Depends(get_session),
session: Session = Depends(get_session),
) -> User:
"""
Add a new user to the database.
@ -38,11 +38,11 @@ def add_user(
try:
new_user.password = get_password_hash(user.password)
db.add(new_user)
db.commit()
db.refresh(new_user)
session.add(new_user)
session.commit()
session.refresh(new_user)
except IntegrityError as e:
db.rollback()
session.rollback()
raise HTTPException(
status_code=400, detail="This username is unavailable."
) from e
@ -65,16 +65,16 @@ def read_all_users(
skip: int = 0,
limit: int = 10,
current_user: Session = Depends(get_current_active_superuser),
db: Session = Depends(get_session),
session: Session = Depends(get_session),
) -> UsersResponse:
"""
Retrieve a list of users from the database with pagination.
"""
query = select(User).offset(skip).limit(limit)
users = db.execute(query).fetchall()
users = session.execute(query).fetchall()
count_query = select(func.count()).select_from(User) # type: ignore
total_count = db.execute(count_query).scalar()
total_count = session.execute(count_query).scalar()
return UsersResponse(
total_count=total_count, # type: ignore
@ -87,19 +87,19 @@ def patch_user(
user_id: UUID,
user: UserUpdate,
_: Session = Depends(get_current_active_user),
db: Session = Depends(get_session),
session: Session = Depends(get_session),
) -> User:
"""
Update an existing user's data.
"""
return update_user(user_id, user, db)
return update_user(user_id, user, session)
@router.delete("/user/{user_id}")
def delete_user(
user_id: UUID,
current_user: User = Depends(get_current_active_superuser),
db: Session = Depends(get_session),
session: Session = Depends(get_session),
) -> dict:
"""
Delete a user from the database.
@ -113,12 +113,12 @@ def delete_user(
status_code=403, detail="You don't have the permission to delete this user"
)
user_db = db.query(User).filter(User.id == user_id).first()
user_db = session.query(User).filter(User.id == user_id).first()
if not user_db:
raise HTTPException(status_code=404, detail="User not found")
db.delete(user_db)
db.commit()
session.delete(user_db)
session.commit()
return {"detail": "User deleted"}
@ -126,7 +126,7 @@ def delete_user(
# TODO: REMOVE - Just for testing purposes
@router.post("/super_user", response_model=User)
def add_super_user_for_testing_purposes_delete_me_before_merge_into_dev(
db: Session = Depends(get_session),
session: Session = Depends(get_session),
) -> User:
"""
Add a superuser for testing purposes.
@ -141,11 +141,11 @@ def add_super_user_for_testing_purposes_delete_me_before_merge_into_dev(
)
try:
db.add(new_user)
db.commit()
db.refresh(new_user)
session.add(new_user)
session.commit()
session.refresh(new_user)
except IntegrityError as e:
db.rollback()
session.rollback()
raise HTTPException(status_code=400, detail="User exists") from e
return new_user

View file

@ -8,7 +8,7 @@ from langflow.api.v1.base import (
validate_prompt,
)
from langflow.template.field.base import TemplateField
from langflow.utils.logger import logger
from loguru import logger
from langflow.utils.validate import validate_code
# build router
@ -35,7 +35,7 @@ def post_validate_prompt(prompt_request: ValidatePromptRequest):
if prompt_request.frontend_node is None:
return PromptValidationResponse(
input_variables=input_variables,
frontend_node={},
frontend_node=None,
)
old_custom_fields = get_old_custom_fields(prompt_request)

View file

@ -1,4 +1,4 @@
from langflow.utils.logger import logger
from loguru import logger
from typing import TYPE_CHECKING
if TYPE_CHECKING:

View file

@ -10,7 +10,7 @@ from langflow.graph.vertex.types import (
)
from langflow.interface.tools.constants import FILE_TOOLS
from langflow.utils import payload
from langflow.utils.logger import logger
from loguru import logger
from langchain.chains.base import Chain

View file

@ -3,7 +3,7 @@ from langflow.graph.utils import UnbuiltObject
from langflow.interface.initialize import loading
from langflow.interface.listing import lazy_load_dict
from langflow.utils.constants import DIRECT_TYPES
from langflow.utils.logger import logger
from loguru import logger
from langflow.utils.util import sync_to_async

View file

@ -8,7 +8,7 @@ from langflow.interface.base import LangChainTypeCreator
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.agents import AgentFrontendNode
from langflow.utils.logger import logger
from loguru import logger
from langflow.utils.util import build_template_from_class, build_template_from_method

View file

@ -8,7 +8,7 @@ from pydantic import BaseModel
from langflow.template.field.base import TemplateField
from langflow.template.frontend_node.base import FrontendNode
from langflow.template.template.base import Template
from langflow.utils.logger import logger
from loguru import logger
# Assuming necessary imports for Field, Template, and FrontendNode classes

View file

@ -6,7 +6,7 @@ from langflow.interface.importing.utils import import_class
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.chains import ChainFrontendNode
from langflow.utils.logger import logger
from loguru import logger
from langflow.utils.util import build_template_from_class, build_template_from_method
from langchain import chains
from langchain_experimental.sql import SQLDatabaseChain # type: ignore

View file

@ -8,7 +8,7 @@ from langflow.interface.custom.custom_component import CustomComponent
from langflow.template.frontend_node.custom_components import (
CustomComponentFrontendNode,
)
from langflow.utils.logger import logger
from loguru import logger
# Assuming necessary imports for Field, Template, and FrontendNode classes

View file

@ -1,7 +1,7 @@
import os
import ast
import zlib
from langflow.utils.logger import logger
from loguru import logger
class CustomComponentPathValueError(ValueError):

View file

@ -5,7 +5,7 @@ from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.documentloaders import DocumentLoaderFrontNode
from langflow.interface.custom_lists import documentloaders_type_to_cls_dict
from langflow.utils.logger import logger
from loguru import logger
from langflow.utils.util import build_template_from_class

View file

@ -6,7 +6,7 @@ from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.base import FrontendNode
from langflow.template.frontend_node.embeddings import EmbeddingFrontendNode
from langflow.utils.logger import logger
from loguru import logger
from langflow.utils.util import build_template_from_class

View file

@ -34,7 +34,7 @@ from langflow.utils import validate
from langchain.chains.base import Chain
from langchain.vectorstores.base import VectorStore
from langchain.document_loaders.base import BaseLoader
from langflow.utils.logger import logger
from loguru import logger
if TYPE_CHECKING:
from langflow import CustomComponent

View file

@ -5,7 +5,7 @@ from langflow.interface.custom_lists import llm_type_to_cls_dict
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.llms import LLMFrontendNode
from langflow.utils.logger import logger
from loguru import logger
from langflow.utils.util import build_template_from_class

View file

@ -6,7 +6,7 @@ from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.base import FrontendNode
from langflow.template.frontend_node.memories import MemoryFrontendNode
from langflow.utils.logger import logger
from loguru import logger
from langflow.utils.util import build_template_from_class, build_template_from_method
from langflow.custom.customs import get_custom_nodes

View file

@ -7,7 +7,7 @@ from langflow.interface.importing.utils import import_class
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.output_parsers import OutputParserFrontendNode
from langflow.utils.logger import logger
from loguru import logger
from langflow.utils.util import build_template_from_class, build_template_from_method

View file

@ -8,7 +8,7 @@ from langflow.interface.importing.utils import import_class
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.prompts import PromptFrontendNode
from langflow.utils.logger import logger
from loguru import logger
from langflow.utils.util import build_template_from_class

View file

@ -7,7 +7,7 @@ from langflow.interface.importing.utils import import_class
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.retrievers import RetrieverFrontendNode
from langflow.utils.logger import logger
from loguru import logger
from langflow.utils.util import build_template_from_method, build_template_from_class

View file

@ -1,8 +1,8 @@
from typing import Any, Dict, Tuple
from langflow.services.cache.utils import Memoize
from langflow.graph import Graph
from langflow.utils.logger import logger
from langflow.services.utils import get_cache_manager
from loguru import logger
@Memoize(get_cache_manager=get_cache_manager)

View file

@ -5,7 +5,7 @@ from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.textsplitters import TextSplittersFrontendNode
from langflow.interface.custom_lists import textsplitter_type_to_cls_dict
from langflow.utils.logger import logger
from loguru import logger
from langflow.utils.util import build_template_from_class

View file

@ -6,7 +6,7 @@ from langflow.interface.base import LangChainTypeCreator
from langflow.interface.importing.utils import import_class, import_module
from langflow.services.utils import get_settings_manager
from langflow.utils.logger import logger
from loguru import logger
from langflow.utils.util import build_template_from_class

View file

@ -3,7 +3,7 @@ import inspect
from typing import Dict, Union
from langchain.agents.tools import Tool
from langflow.utils.logger import logger
from loguru import logger
def get_func_tool_params(func, **kwargs) -> Union[Dict, None]:

View file

@ -29,7 +29,7 @@ from langflow.template.frontend_node.custom_components import (
from langflow.interface.retrievers.base import retriever_creator
from langflow.interface.custom.directory_reader import DirectoryReader
from langflow.utils.logger import logger
from loguru import logger
from langflow.utils.util import get_base_classes
import re

View file

@ -8,7 +8,7 @@ from langflow.interface.importing.utils import import_class
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.utilities import UtilitiesFrontendNode
from langflow.utils.logger import logger
from loguru import logger
from langflow.utils.util import build_template_from_class

View file

@ -8,7 +8,7 @@ import re
import yaml
from langchain.base_language import BaseLanguageModel
from PIL.Image import Image
from langflow.utils.logger import logger
from loguru import logger
from langflow.services.chat.config import ChatConfig
from langflow.services.utils import get_settings_manager

View file

@ -7,7 +7,7 @@ from langflow.interface.importing.utils import import_class
from langflow.services.utils import get_settings_manager
from langflow.template.frontend_node.vectorstores import VectorStoreFrontendNode
from langflow.utils.logger import logger
from loguru import logger
from langflow.utils.util import build_template_from_method

View file

@ -3,7 +3,7 @@ from typing import Dict, List, Optional
from langchain import requests, sql_database
from langflow.interface.base import LangChainTypeCreator
from langflow.utils.logger import logger
from loguru import logger
from langflow.utils.util import build_template_from_class, build_template_from_method

View file

@ -4,7 +4,7 @@ from langflow.api.v1.callback import (
StreamingLLMCallbackHandler,
)
from langflow.processing.process import fix_memory_inputs, format_actions
from langflow.utils.logger import logger
from loguru import logger
from langchain.agents.agent import AgentExecutor

View file

@ -8,7 +8,7 @@ from langflow.interface.run import (
update_memory_keys,
)
from langflow.services.utils import get_session_manager
from langflow.utils.logger import logger
from loguru import logger
from langflow.graph import Graph
from langchain.chains.base import Chain
from langchain.vectorstores.base import VectorStore

View file

@ -4,8 +4,7 @@ from langflow.api.v1.schemas import ChatMessage, ChatResponse, FileResponse
from langflow.services.base import Service
from langflow.services.chat.cache import Subject
from langflow.services.chat.utils import process_graph
from langflow.interface.utils import pil_to_base64
from langflow.utils.logger import logger
from loguru import logger
from .cache import cache_manager
import asyncio

View file

@ -2,7 +2,7 @@ from fastapi import WebSocket
from langflow.api.v1.schemas import ChatMessage
from langflow.processing.base import get_result_and_steps
from langflow.interface.utils import try_setting_streaming_options
from langflow.utils.logger import logger
from loguru import logger
async def process_graph(

View file

@ -7,7 +7,7 @@ from langflow.services.utils import get_settings_manager
from sqlalchemy import inspect
import sqlalchemy as sa
from sqlmodel import SQLModel, Session, create_engine
from langflow.utils.logger import logger
from loguru import logger
from alembic.config import Config
from alembic import command
from langflow.services.database import models # noqa
@ -89,7 +89,7 @@ class DatabaseManager(Service):
for table in legacy_tables:
if table in inspector.get_table_names():
logger.warn(f"Legacy table exists: {table}")
logger.warning(f"Legacy table exists: {table}")
return True

View file

@ -1,6 +1,6 @@
from dataclasses import dataclass
from typing import TYPE_CHECKING
from langflow.utils.logger import logger
from loguru import logger
from contextlib import contextmanager
from alembic.util.exc import CommandError
from sqlmodel import Session

View file

@ -1,6 +1,6 @@
from langflow.services.schema import ServiceType
from typing import TYPE_CHECKING, Dict, List, Optional
from langflow.utils.logger import logger
from typing import TYPE_CHECKING, List, Optional
from loguru import logger
if TYPE_CHECKING:
from langflow.services.factory import ServiceFactory

View file

@ -5,7 +5,7 @@ from langflow.services.settings.utils import read_secret_from_file, write_secret
from pydantic import BaseSettings, Field, validator
from passlib.context import CryptContext
from langflow.utils.logger import logger
from loguru import logger
class AuthSettings(BaseSettings):

View file

@ -8,7 +8,7 @@ from pathlib import Path
import yaml
from pydantic import BaseSettings, root_validator, validator
from langflow.utils.logger import logger
from loguru import logger
# BASE_COMPONENTS_PATH = str(Path(__file__).parent / "components")
BASE_COMPONENTS_PATH = str(Path(__file__).parent.parent.parent / "components")

View file

@ -1,7 +1,7 @@
from langflow.services.base import Service
from langflow.services.settings.auth import AuthSettings
from langflow.services.settings.base import Settings
from langflow.utils.logger import logger
from loguru import logger
import os
import yaml

View file

@ -2,7 +2,7 @@ import os
from pathlib import Path
import platform
from langflow.utils.logger import logger
from loguru import logger
def set_secure_permissions(file_path):

View file

@ -1,5 +1,5 @@
from langflow.services import ServiceType, service_manager
from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, Generator
if TYPE_CHECKING:
@ -8,6 +8,7 @@ if TYPE_CHECKING:
from langflow.services.cache.manager import BaseCacheManager
from langflow.services.session.manager import SessionManager
from langflow.services.task.manager import TaskManager
from langflow.services.chat.manager import ChatManager
from sqlmodel import Session
@ -19,7 +20,7 @@ def get_db_manager() -> "DatabaseManager":
return service_manager.get(ServiceType.DATABASE_MANAGER)
def get_session() -> "Session":
def get_session() -> Generator["Session", None, None]:
db_manager = service_manager.get(ServiceType.DATABASE_MANAGER)
yield from db_manager.get_session()
@ -34,3 +35,7 @@ def get_session_manager() -> "SessionManager":
def get_task_manager() -> "TaskManager":
return service_manager.get(ServiceType.TASK_MANAGER)
def get_chat_manager() -> "ChatManager":
return service_manager.get(ServiceType.CHAT_MANAGER)

View file

@ -1,30 +1,35 @@
import logging
from typing import Optional
from loguru import logger
from pathlib import Path
from rich.logging import RichHandler
logger = logging.getLogger("langflow")
def configure(log_level: str = "DEBUG", log_file: Optional[Path] = None):
log_format = "<green>{time:HH:mm:ss}</green> - <level>{level: <8}</level> - <level>{message}</level>"
logger.remove() # Remove default handlers
def configure(log_level: str = "DEBUG", log_file: Path = None): # type: ignore
log_format = "%(asctime)s - %(levelname)s - %(message)s"
log_level_value = getattr(logging, log_level.upper(), logging.INFO)
logging.basicConfig(
level=log_level_value,
format=log_format,
datefmt="[%X]",
handlers=[RichHandler(rich_tracebacks=True)],
# Configure loguru to use RichHandler
logger.configure(
handlers=[
{
"sink": RichHandler(rich_tracebacks=True, markup=True),
"format": log_format,
"level": log_level.upper(),
}
]
)
if log_file:
log_file = Path(log_file)
log_file.parent.mkdir(parents=True, exist_ok=True)
file_handler = logging.FileHandler(log_file)
file_handler.setFormatter(logging.Formatter(log_format))
logger.addHandler(file_handler)
logger.add(
sink=str(log_file),
level=log_level.upper(),
format=log_format,
rotation="10 MB", # Log rotation based on file size
)
logger.info(f"Logger set up with log level: {log_level_value}({log_level})")
logger.info(f"Logger set up with log level: {log_level}")
if log_file:
logger.info(f"Log file: {log_file}")

View file

@ -39,9 +39,7 @@ export default function DropdownButton({
}}
>
{!showOptions ? (
<IconComponent
name="ChevronDown"
/>
<IconComponent name="ChevronDown" />
) : (
<IconComponent name="ChevronUp" />
)}

View file

@ -54,7 +54,7 @@ export default function InputComponent({
</Form.Control>
) : (
<Input
type={password && !pwdVisible ? "password" : "text"}
type="text"
value={value}
disabled={disabled}
required={required}

View file

@ -315,9 +315,11 @@ export function TabsProvider({ children }: { children: ReactNode }) {
input.type = "file";
input.accept = ".json";
// add a change event listener to the file input
id = await new Promise(resolve => {
id = await new Promise((resolve) => {
input.onchange = async (e: Event) => {
if ((e.target as HTMLInputElement).files![0].type === "application/json") {
if (
(e.target as HTMLInputElement).files![0].type === "application/json"
) {
const currentfile = (e.target as HTMLInputElement).files![0];
let text = await currentfile.text();
let flow: FlowType = JSON.parse(text);

View file

@ -164,7 +164,7 @@ export async function readFlowsFromDatabase() {
try {
const response = await api.get(`${BASE_URL_API}flows/`);
if (response?.status !== 200) {
throw new Error(`HTTP error! status: ${response.status}`);
throw new Error(`HTTP error! status: ${response?.status}`);
}
return response.data;
} catch (error) {
@ -177,7 +177,7 @@ export async function downloadFlowsFromDatabase() {
try {
const response = await api.get(`${BASE_URL_API}flows/download/`);
if (response?.status !== 200) {
throw new Error(`HTTP error! status: ${response.status}`);
throw new Error(`HTTP error! status: ${response?.status}`);
}
return response.data;
} catch (error) {
@ -190,8 +190,8 @@ export async function uploadFlowsToDatabase(flows: FormData) {
try {
const response = await api.post(`${BASE_URL_API}flows/upload/`, flows);
if (response.status !== 201) {
throw new Error(`HTTP error! status: ${response.status}`);
if (response?.status !== 201) {
throw new Error(`HTTP error! status: ${response?.status}`);
}
return response.data;
} catch (error) {
@ -468,7 +468,7 @@ export async function updateUser(user_id: string, user: Users) {
export async function getApiKey() {
try {
const res = await api.get(`${BASE_URL_API}api_key`);
const res = await api.get(`${BASE_URL_API}api_key/`);
if (res.status === 200) {
return res.data;
}
@ -480,7 +480,7 @@ export async function getApiKey() {
export async function createApiKey(name: string) {
try {
const res = await api.post(`${BASE_URL_API}api_key`, { name });
const res = await api.post(`${BASE_URL_API}api_key/`, { name });
if (res.status === 200) {
return res.data;
}

View file

@ -32,8 +32,8 @@ export default function LoginPage(): JSX.Element {
function signIn() {
const user: LoginType = {
username: username,
password: password,
username: username.trim(),
password: password.trim(),
};
onLogin(user)
.then((user) => {

View file

@ -33,8 +33,8 @@ export default function SignUp(): JSX.Element {
function handleSignup(): void {
const { username, password } = inputState;
const newUser: UserInputType = {
username,
password,
username: username.trim(),
password: password.trim(),
};
addUser(newUser)
.then((user) => {

View file

@ -548,5 +548,5 @@ export type fetchErrorComponentType = {
export type dropdownButtonPropsType = {
firstButtonName: string;
onFirstBtnClick: () => void;
options: Array<{ name: string; onBtnClick: () => void; }>;
options: Array<{ name: string; onBtnClick: () => void }>;
};