Merge branch 'feature/store' into bug/undo-copy

This commit is contained in:
Lucas Oliveira 2023-11-28 19:51:53 -03:00
commit 082ea5282f
37 changed files with 421 additions and 326 deletions

View file

@ -23,7 +23,7 @@ ENV PYTHONUNBUFFERED=1 \
\
# poetry
# https://python-poetry.org/docs/configuration/#using-environment-variables
POETRY_VERSION=1.7 \
POETRY_VERSION=1.7.1 \
# make poetry install to this location
POETRY_HOME="/opt/poetry" \
# make poetry create the virtual environment in the project's root

View file

@ -146,7 +146,7 @@ services:
build:
context: ../
dockerfile: base.Dockerfile
command: celery -A langflow.worker.celery_app worker --loglevel=INFO --concurrency=1 -n lf-worker@%h
command: celery -A langflow.worker.celery_app worker --loglevel=DEBUG --concurrency=1 -n lf-worker@%h
healthcheck:
test: "exit 0"
deploy:

View file

@ -134,8 +134,8 @@ services:
image: redis:6.2.5
env_file:
- .env
# ports:
# - 6379:6379
ports:
- 6379:6379
healthcheck:
test: "exit 0"
@ -146,7 +146,7 @@ services:
build:
context: ../
dockerfile: base.Dockerfile
command: celery -A langflow.worker.celery_app worker --loglevel=INFO --concurrency=1 -n lf-worker@%h
command: celery -A langflow.worker.celery_app worker --loglevel=INFO --concurrency=1 -n lf-worker@%h -P eventlet
healthcheck:
test: "exit 0"
deploy:

251
poetry.lock generated
View file

@ -337,32 +337,27 @@ six = ">=1.4.0"
[[package]]
name = "bcrypt"
version = "4.0.1"
version = "4.1.1"
description = "Modern password hashing for your software and your servers"
optional = false
python-versions = ">=3.6"
python-versions = ">=3.7"
files = [
{file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"},
{file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"},
{file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"},
{file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"},
{file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"},
{file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"},
{file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"},
{file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"},
{file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"},
{file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"},
{file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"},
{file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"},
{file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"},
{file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"},
{file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"},
{file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"},
{file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"},
{file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"},
{file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"},
{file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"},
{file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"},
{file = "bcrypt-4.1.1-cp37-abi3-macosx_13_0_universal2.whl", hash = "sha256:2e197534c884336f9020c1f3a8efbaab0aa96fc798068cb2da9c671818b7fbb0"},
{file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d573885b637815a7f3a3cd5f87724d7d0822da64b0ab0aa7f7c78bae534e86dc"},
{file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bab33473f973e8058d1b2df8d6e095d237c49fbf7a02b527541a86a5d1dc4444"},
{file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fb931cd004a7ad36a89789caf18a54c20287ec1cd62161265344b9c4554fdb2e"},
{file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:12f40f78dcba4aa7d1354d35acf45fae9488862a4fb695c7eeda5ace6aae273f"},
{file = "bcrypt-4.1.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2ade10e8613a3b8446214846d3ddbd56cfe9205a7d64742f0b75458c868f7492"},
{file = "bcrypt-4.1.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f33b385c3e80b5a26b3a5e148e6165f873c1c202423570fdf45fe34e00e5f3e5"},
{file = "bcrypt-4.1.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:755b9d27abcab678e0b8fb4d0abdebeea1f68dd1183b3f518bad8d31fa77d8be"},
{file = "bcrypt-4.1.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a7a7b8a87e51e5e8ca85b9fdaf3a5dc7aaf123365a09be7a27883d54b9a0c403"},
{file = "bcrypt-4.1.1-cp37-abi3-win32.whl", hash = "sha256:3d6c4e0d6963c52f8142cdea428e875042e7ce8c84812d8e5507bd1e42534e07"},
{file = "bcrypt-4.1.1-cp37-abi3-win_amd64.whl", hash = "sha256:14d41933510717f98aac63378b7956bbe548986e435df173c841d7f2bd0b2de7"},
{file = "bcrypt-4.1.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:24c2ebd287b5b11016f31d506ca1052d068c3f9dc817160628504690376ff050"},
{file = "bcrypt-4.1.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:476aa8e8aca554260159d4c7a97d6be529c8e177dbc1d443cb6b471e24e82c74"},
{file = "bcrypt-4.1.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:12611c4b0a8b1c461646228344784a1089bc0c49975680a2f54f516e71e9b79e"},
{file = "bcrypt-4.1.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c6450538a0fc32fb7ce4c6d511448c54c4ff7640b2ed81badf9898dcb9e5b737"},
{file = "bcrypt-4.1.1.tar.gz", hash = "sha256:df37f5418d4f1cdcff845f60e747a015389fa4e63703c918330865e06ad80007"},
]
[package.extras]
@ -411,17 +406,17 @@ files = [
[[package]]
name = "boto3"
version = "1.29.7"
version = "1.33.2"
description = "The AWS SDK for Python"
optional = false
python-versions = ">= 3.7"
files = [
{file = "boto3-1.29.7-py3-none-any.whl", hash = "sha256:96e9890ebe7cd823b5f4976dd676e112c000c6528c28e20a2f274590589dd18b"},
{file = "boto3-1.29.7.tar.gz", hash = "sha256:1eb4c548118b5fc5e018dee956fd33e6fb249cd1f2def85f1bba816aef4d9f3e"},
{file = "boto3-1.33.2-py3-none-any.whl", hash = "sha256:fc7c0dd5fa74ae0d57e11747695bdba4ad164e62dee35db15b43762c392fbd92"},
{file = "boto3-1.33.2.tar.gz", hash = "sha256:70626598dd6698d6da8f2854a1ae5010f175572e2a465b2aa86685c745c1013c"},
]
[package.dependencies]
botocore = ">=1.32.7,<1.33.0"
botocore = ">=1.33.2,<1.34.0"
jmespath = ">=0.7.1,<2.0.0"
s3transfer = ">=0.8.0,<0.9.0"
@ -430,13 +425,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
version = "1.32.7"
version = "1.33.2"
description = "Low-level, data-driven core of boto 3."
optional = false
python-versions = ">= 3.7"
files = [
{file = "botocore-1.32.7-py3-none-any.whl", hash = "sha256:58b33d02cafa23461c8a9d211b30e8cded992380a84de409379fd02811fa3e11"},
{file = "botocore-1.32.7.tar.gz", hash = "sha256:c6795c731b04c8e3635588c44cfd1a4462fc5987859195522c96812cf3eceff9"},
{file = "botocore-1.33.2-py3-none-any.whl", hash = "sha256:5c46b7e8450efbf7ddc2a0016eee7225a5564583122e25a20ca92a29a105225c"},
{file = "botocore-1.33.2.tar.gz", hash = "sha256:16a30faac6e6f17961c009defb74ab1a3508b8abc58fab98e7cf96af0d91ea84"},
]
[package.dependencies]
@ -926,19 +921,19 @@ testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"]
[[package]]
name = "cohere"
version = "4.36"
description = ""
version = "4.37"
description = "Python SDK for the Cohere API"
optional = false
python-versions = ">=3.7,<4.0"
python-versions = ">=3.8,<4.0"
files = [
{file = "cohere-4.36-py3-none-any.whl", hash = "sha256:4ebafbf2f340c90cf81599d94da1678fb4cd22548cd7fc0f6b16420090ffa705"},
{file = "cohere-4.36.tar.gz", hash = "sha256:3dd67abe2d12da55132a85dfbbf2963e50e3e60eaee9b07ff9457ae2766f1f8d"},
{file = "cohere-4.37-py3-none-any.whl", hash = "sha256:f3fad3a0f8d86761d4de851dfd2233a1e5c7634a024102212d850bde9c9bb031"},
{file = "cohere-4.37.tar.gz", hash = "sha256:788021d9d992c6c31d1985d95cccb277c7265882c4acd7a49b3e47da77b4bec8"},
]
[package.dependencies]
aiohttp = ">=3.0,<4.0"
backoff = ">=2.0,<3.0"
fastavro = {version = "1.8.2", markers = "python_version >= \"3.8\""}
fastavro = ">=1.8,<2.0"
importlib_metadata = ">=6.0,<7.0"
requests = ">=2.25.0,<3.0.0"
urllib3 = ">=1.26,<3"
@ -1110,34 +1105,34 @@ toml = ["tomli"]
[[package]]
name = "cryptography"
version = "41.0.5"
version = "41.0.7"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
optional = false
python-versions = ">=3.7"
files = [
{file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797"},
{file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5"},
{file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147"},
{file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696"},
{file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da"},
{file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20"},
{file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548"},
{file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d"},
{file = "cryptography-41.0.5-cp37-abi3-win32.whl", hash = "sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936"},
{file = "cryptography-41.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81"},
{file = "cryptography-41.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1"},
{file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72"},
{file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88"},
{file = "cryptography-41.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf"},
{file = "cryptography-41.0.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e"},
{file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8"},
{file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179"},
{file = "cryptography-41.0.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d"},
{file = "cryptography-41.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1"},
{file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86"},
{file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723"},
{file = "cryptography-41.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84"},
{file = "cryptography-41.0.5.tar.gz", hash = "sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7"},
{file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf"},
{file = "cryptography-41.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d"},
{file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a"},
{file = "cryptography-41.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15"},
{file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a"},
{file = "cryptography-41.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1"},
{file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157"},
{file = "cryptography-41.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406"},
{file = "cryptography-41.0.7-cp37-abi3-win32.whl", hash = "sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d"},
{file = "cryptography-41.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2"},
{file = "cryptography-41.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960"},
{file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003"},
{file = "cryptography-41.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7"},
{file = "cryptography-41.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec"},
{file = "cryptography-41.0.7-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be"},
{file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a"},
{file = "cryptography-41.0.7-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c"},
{file = "cryptography-41.0.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a"},
{file = "cryptography-41.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39"},
{file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a"},
{file = "cryptography-41.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248"},
{file = "cryptography-41.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309"},
{file = "cryptography-41.0.7.tar.gz", hash = "sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc"},
]
[package.dependencies]
@ -1175,13 +1170,13 @@ tests = ["pytest"]
[[package]]
name = "dataclasses-json"
version = "0.6.2"
version = "0.6.3"
description = "Easily serialize dataclasses to and from JSON."
optional = false
python-versions = ">=3.7,<4.0"
files = [
{file = "dataclasses_json-0.6.2-py3-none-any.whl", hash = "sha256:71816ced3d0f55a2c5bc1a813ace1b8d4234e79a08744269a7cf84d6f7c06e99"},
{file = "dataclasses_json-0.6.2.tar.gz", hash = "sha256:1b934c1bd63e775880946b8361a902d7de86e894bab8098eab27c010f95724d1"},
{file = "dataclasses_json-0.6.3-py3-none-any.whl", hash = "sha256:4aeb343357997396f6bca1acae64e486c3a723d8f5c76301888abeccf0c45176"},
{file = "dataclasses_json-0.6.3.tar.gz", hash = "sha256:35cb40aae824736fdf959801356641836365219cfe14caeb115c39136f775d2a"},
]
[package.dependencies]
@ -1599,42 +1594,48 @@ all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)"
[[package]]
name = "fastavro"
version = "1.8.2"
version = "1.9.0"
description = "Fast read/write of AVRO files"
optional = false
python-versions = ">=3.8"
files = [
{file = "fastavro-1.8.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:0e08964b2e9a455d831f2557402a683d4c4d45206f2ab9ade7c69d3dc14e0e58"},
{file = "fastavro-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:401a70b1e5c7161420c6019e0c8afa88f7c8a373468591f5ec37639a903c2509"},
{file = "fastavro-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef1ed3eaa4240c05698d02d8d0c010b9a03780eda37b492da6cd4c9d37e04ec"},
{file = "fastavro-1.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:543185a672ff6306beb329b57a7b8a3a2dd1eb21a5ccc530150623d58d48bb98"},
{file = "fastavro-1.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ffbf8bae1edb50fe7beeffc3afa8e684686550c2e5d31bf01c25cfa213f581e1"},
{file = "fastavro-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:bb545eb9d876bc7b785e27e98e7720ada7eee7d7a1729798d2ed51517f13500a"},
{file = "fastavro-1.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b837d3038c651046252bc92c1b9899bf21c7927a148a1ff89599c36c2a331ca"},
{file = "fastavro-1.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3510e96c0a47e4e914bd1a29c954eb662bfa24849ad92e597cb97cc79f21af7"},
{file = "fastavro-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccc0e74f2c2ab357f39bb73d67fcdb6dc10e23fdbbd399326139f72ec0fb99a3"},
{file = "fastavro-1.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:add51c70d0ab1175601c75cd687bbe9d16ae312cd8899b907aafe0d79ee2bc1d"},
{file = "fastavro-1.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d9e2662f57e6453e9a2c9fb4f54b2a9e62e3e46f5a412ac00558112336d23883"},
{file = "fastavro-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:fea75cf53a93c56dd56e68abce8d314ef877b27451c870cd7ede7582d34c08a7"},
{file = "fastavro-1.8.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:f489020bb8664c2737c03457ad5dbd490579ddab6f0a7b5c17fecfe982715a89"},
{file = "fastavro-1.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a547625c138efd5e61300119241041906ee8cb426fc7aa789900f87af7ed330d"},
{file = "fastavro-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53beb458f30c9ad4aa7bff4a42243ff990ffb713b6ce0cd9b360cbc3d648fe52"},
{file = "fastavro-1.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7b1b2cbd2dd851452306beed0ab9bdaeeab1cc8ad46f84b47cd81eeaff6dd6b8"},
{file = "fastavro-1.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d29e9baee0b2f37ecd09bde3b487cf900431fd548c85be3e4fe1b9a0b2a917f1"},
{file = "fastavro-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:66e132c710663230292bc63e2cb79cf95b16ccb94a5fc99bb63694b24e312fc5"},
{file = "fastavro-1.8.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:38aca63ce604039bcdf2edd14912d00287bdbf8b76f9aa42b28e6ca0bf950092"},
{file = "fastavro-1.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9787835f6449ee94713e7993a700432fce3763024791ffa8a58dc91ef9d1f950"},
{file = "fastavro-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:536cb448bc83811056be02749fd9df37a69621678f02597d272970a769e9b40c"},
{file = "fastavro-1.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e9d5027cf7d9968f8f819958b41bfedb933323ea6d6a0485eefacaa1afd91f54"},
{file = "fastavro-1.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:792adfc0c80c7f1109e0ab4b0decef20691fdf0a45091d397a0563872eb56d42"},
{file = "fastavro-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:650b22766259f7dd7519dfa4e4658f0e233c319efa130b9cf0c36a500e09cc57"},
{file = "fastavro-1.8.2.tar.gz", hash = "sha256:ab9d9226d4b66b6b3d0661a57cd45259b0868fed1c0cd4fac95249b9e0973320"},
{file = "fastavro-1.9.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:00826f295f290ba95f1f68d5c36970b4db7f9245a1b1a33dd9d464a382733894"},
{file = "fastavro-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ff7ac97cfe07ad90fdcca3ea90b14461ba8831bc45f02e13440b6c634f291c8"},
{file = "fastavro-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c251e7122b436458b8e1151c0613d6dac2b5edb6acbbc35de3b4c5f6ebb80b7"},
{file = "fastavro-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:35a32f5d33f91fcb7e8daf7afc82a75c8d7c774cf4d93937b2ad487d28f3f707"},
{file = "fastavro-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:228e7c525ff15a9f21f1adb2097ec87888933ef5c8a682c2f1d5d83796e4dd42"},
{file = "fastavro-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:d694bb1c2b20f1703bcb698a74f58f0f503eda8f49cb6d46209c8f3715098348"},
{file = "fastavro-1.9.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f044b71d8b0ba6bbd6166be6836c3caeadd26eeaabee70b6ac7c6a9b884f6bf"},
{file = "fastavro-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172d6d5c186ba51ec6eaa98eaaadc8e859b5a56862ae724413424a858619da7f"},
{file = "fastavro-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07dee19dcc2797a8cb1b410d9e65febb55af2a18d9a7b85465b039d4276b9a29"},
{file = "fastavro-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:83402b450f718b690ebd88f1df2ea70609f1192bed1498308d29ac737e992391"},
{file = "fastavro-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b3704847d79377a5b4252ccf6d3a391497cdb8f57017cde2613f92f5274d6261"},
{file = "fastavro-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:602492ea0c458020cd19138ff2b9e97aa187ae01c290183dd9bbb7ff2d2e83c4"},
{file = "fastavro-1.9.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1cea6c2508dfb06d65cddb5b90bd6a79d3e481f1d80adc5f6ce6e3dacb4a8773"},
{file = "fastavro-1.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8629d4367373db7d195672834c59c86e2642172bbebd5ec6d83797b39ac4ef01"},
{file = "fastavro-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f45dfc29de276b509c8dbbfa6076ba6562be055c877928d4ffa1cf35b8ec59dc"},
{file = "fastavro-1.9.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:cc3b2de071e4d6de19974ffd328e63f7c85de2348d614222238fda2b35578b63"},
{file = "fastavro-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0d2570052b4e2d7b46bec4cd74c8b12d8e21cd151f5bfc837da990cb62385c5"},
{file = "fastavro-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:718e5df505029269e7a80afdd7e5f196d24f1473ad47eea41061ce630609f80e"},
{file = "fastavro-1.9.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:6cebcc09c932931e3084c96fe2c666c9cfc8c4043520651fbfeb58575edeb7da"},
{file = "fastavro-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb7e3a058a169d2c8bd19dfcbc7ae14c879750ce49fbaf3c436af683991f7eae"},
{file = "fastavro-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5af71895a01618c98ae7c563ee75b18f721d8a66324d66613bd2fcd8b2f8ac9"},
{file = "fastavro-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:db30121ce34f5a0a4c368504a5e2df05449382e8d4918c0b43058ffb1d31d723"},
{file = "fastavro-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:48d9214982c0c0f29e583df11781dc6884e8f3f3336b97991c6e7587f509a02b"},
{file = "fastavro-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d4a71d39760de455dbe0b2121ea1bbd85fc851e8bab2970d9e9d6d8825277d2"},
{file = "fastavro-1.9.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:f803c33f4fd4e3bfc17bbdbf3c036fbcb92a1f8e6bd19a035800518479ce6b36"},
{file = "fastavro-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00361ea6d5a46813f3758511153fed9698308cae175500ff62562893d3570156"},
{file = "fastavro-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44fc998387271d57d0e3b29c30049ba903d2aead9471b12c20725284d60dd57e"},
{file = "fastavro-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:52e7df50431c21543682afd0ca95c40569c49e4c4599dcb78343f7c24fda6145"},
{file = "fastavro-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:215f40921d3f1f229cea89af25533e7be3fde16dd85c55436c15fb1ad067b486"},
{file = "fastavro-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:0c046ed9759d1100df59dc18452901253cff5a37d9e8e8701d0102116c3202cb"},
{file = "fastavro-1.9.0.tar.gz", hash = "sha256:71aad82b17442dc41223f8351b9f28a60dd877a8e5a7525eaf6342f45f6d23e1"},
]
[package.extras]
codecs = ["lz4", "python-snappy", "zstandard"]
codecs = ["cramjam", "lz4", "zstandard"]
lz4 = ["lz4"]
snappy = ["python-snappy"]
snappy = ["cramjam"]
zstandard = ["zstandard"]
[[package]]
@ -3002,13 +3003,13 @@ files = [
[[package]]
name = "ipykernel"
version = "6.26.0"
version = "6.27.1"
description = "IPython Kernel for Jupyter"
optional = false
python-versions = ">=3.8"
files = [
{file = "ipykernel-6.26.0-py3-none-any.whl", hash = "sha256:3ba3dc97424b87b31bb46586b5167b3161b32d7820b9201a9e698c71e271602c"},
{file = "ipykernel-6.26.0.tar.gz", hash = "sha256:553856658eb8430bbe9653ea041a41bff63e9606fc4628873fc92a6cf3abd404"},
{file = "ipykernel-6.27.1-py3-none-any.whl", hash = "sha256:dab88b47f112f9f7df62236511023c9bdeef67abc73af7c652e4ce4441601686"},
{file = "ipykernel-6.27.1.tar.gz", hash = "sha256:7d5d594b6690654b4d299edba5e872dc17bb7396a8d0609c97cb7b8a1c605de6"},
]
[package.dependencies]
@ -3524,13 +3525,13 @@ zookeeper = ["kazoo (>=2.8.0)"]
[[package]]
name = "langchain"
version = "0.0.340"
version = "0.0.341"
description = "Building applications with LLMs through composability"
optional = false
python-versions = ">=3.8.1,<4.0"
files = [
{file = "langchain-0.0.340-py3-none-any.whl", hash = "sha256:f80f40b52ef82424e38e894db8b8048b6505da100679e72613316f8d8b0243fb"},
{file = "langchain-0.0.340.tar.gz", hash = "sha256:1a6bd2511bbb81e42d2a3d7291ee03de180accab851181ee9fdbb7fbaef6c57c"},
{file = "langchain-0.0.341-py3-none-any.whl", hash = "sha256:7836c5b8a49e90a33b024696016c149cd8dae2cce1682dc0c41d2f4f89b14525"},
{file = "langchain-0.0.341.tar.gz", hash = "sha256:98c86527054df983464d139fff2ea906af7b28712889c83c330c7b5e8aeb9d41"},
]
[package.dependencies]
@ -3539,6 +3540,7 @@ anyio = "<4.0"
async-timeout = {version = ">=4.0.0,<5.0.0", markers = "python_version < \"3.11\""}
dataclasses-json = ">=0.5.7,<0.7"
jsonpatch = ">=1.33,<2.0"
langchain-core = ">=0.0.6,<0.0.7"
langsmith = ">=0.0.63,<0.1.0"
numpy = ">=1,<2"
pydantic = ">=1,<3"
@ -3555,13 +3557,30 @@ cli = ["typer (>=0.9.0,<0.10.0)"]
cohere = ["cohere (>=4,<5)"]
docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"]
embeddings = ["sentence-transformers (>=2,<3)"]
extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "dashvector (>=1.0.1,<2.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.6.0,<0.7.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "dashvector (>=1.0.1,<2.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.6.0,<0.7.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "html2text (>=2020.1.16,<2021.0.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)"]
javascript = ["esprima (>=4.0.1,<5.0.0)"]
llms = ["clarifai (>=9.1.0)", "cohere (>=4,<5)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (<2)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)"]
openai = ["openai (<2)", "tiktoken (>=0.3.2,<0.6.0)"]
qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"]
text-helpers = ["chardet (>=5.1.0,<6.0.0)"]
[[package]]
name = "langchain-core"
version = "0.0.6"
description = "Building applications with LLMs through composability"
optional = false
python-versions = ">=3.8.1,<4.0"
files = [
{file = "langchain_core-0.0.6-py3-none-any.whl", hash = "sha256:dcc727ff811159e09fc1d72caae4aaea892611349d5c3fc1c18b3a19573faf27"},
{file = "langchain_core-0.0.6.tar.gz", hash = "sha256:cffd1031764d838ad2a2f3f64477b710923ddad58eb9fe3130ff94b3669e8dd8"},
]
[package.dependencies]
jsonpatch = ">=1.33,<2.0"
langsmith = ">=0.0.63,<0.1.0"
pydantic = ">=1,<3"
tenacity = ">=8.1.0,<9.0.0"
[[package]]
name = "langchain-experimental"
version = "0.0.42"
@ -3618,13 +3637,13 @@ wrapt = "1.14"
[[package]]
name = "langsmith"
version = "0.0.66"
version = "0.0.67"
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
optional = false
python-versions = ">=3.8.1,<4.0"
files = [
{file = "langsmith-0.0.66-py3-none-any.whl", hash = "sha256:e5e6d2deff19de827ac04db106b900091c75b6a3c1a1c047a8aa78caf72a63ea"},
{file = "langsmith-0.0.66.tar.gz", hash = "sha256:33d011c9db9236c06789b17dba97acc023275bafd0c2bf097283730d6608dea7"},
{file = "langsmith-0.0.67-py3-none-any.whl", hash = "sha256:66a257b97dabd43a7e62af271b2ddb7566167ce4e446fd7b7760e97d6ce84a5e"},
{file = "langsmith-0.0.67.tar.gz", hash = "sha256:cef00bac2e7455a5943f3afaea91c032db1a1f2adb83003159a71e884fb5a9a2"},
]
[package.dependencies]
@ -3649,12 +3668,12 @@ regex = ["regex"]
[[package]]
name = "llama-cpp-python"
version = "0.2.19"
version = "0.2.20"
description = "Python bindings for the llama.cpp library"
optional = true
python-versions = ">=3.8"
files = [
{file = "llama_cpp_python-0.2.19.tar.gz", hash = "sha256:5c3be3f98108b7fc747f5c7260344af13621cd626d628cd5a6c0f6eec53a873a"},
{file = "llama_cpp_python-0.2.20.tar.gz", hash = "sha256:a0ada1cb800ba4da60ea6ac4f7264b687a35412374e5af2c92e5b22852cdbafb"},
]
[package.dependencies]
@ -3670,13 +3689,13 @@ test = ["httpx (>=0.24.1)", "pytest (>=7.4.0)"]
[[package]]
name = "locust"
version = "2.19.0"
version = "2.19.1"
description = "Developer friendly load testing framework"
optional = false
python-versions = ">=3.8"
files = [
{file = "locust-2.19.0-py3-none-any.whl", hash = "sha256:fc245bcdc3ffccfcd31be1ce99f736fe3be69b514d396ef419e2898d86795c6d"},
{file = "locust-2.19.0.tar.gz", hash = "sha256:091bb8ee321a9a63e160d0f6de0a0819c62f18378e44986bc5d446d017875aa6"},
{file = "locust-2.19.1-py3-none-any.whl", hash = "sha256:152d6c9b1d8b842422a9ef352d13529d337d5c4e1e0c0a3e0827c1b5eafb903d"},
{file = "locust-2.19.1.tar.gz", hash = "sha256:5c21609b1395833dc0b4bb3ddaaf709a88f6e70950aefce86e11eb5944fe9217"},
]
[package.dependencies]
@ -7089,20 +7108,20 @@ files = [
[[package]]
name = "s3transfer"
version = "0.8.0"
version = "0.8.1"
description = "An Amazon S3 Transfer Manager"
optional = false
python-versions = ">= 3.7"
files = [
{file = "s3transfer-0.8.0-py3-none-any.whl", hash = "sha256:baa479dc2e63e5c2ed51611b4d46cdf0295e2070d8d0b86b22f335ee5b954986"},
{file = "s3transfer-0.8.0.tar.gz", hash = "sha256:e8d6bd52ffd99841e3a57b34370a54841f12d3aab072af862cdcc50955288002"},
{file = "s3transfer-0.8.1-py3-none-any.whl", hash = "sha256:d1c52af7bceca1650d0f27728b29bb4925184aead7b55bccacf893b79a108604"},
{file = "s3transfer-0.8.1.tar.gz", hash = "sha256:e6cafd5643fc7b44fddfba1e5b521005675b0e07533ddad958a3554bc87d7330"},
]
[package.dependencies]
botocore = ">=1.32.7,<2.0a.0"
botocore = ">=1.33.2,<2.0a.0"
[package.extras]
crt = ["botocore[crt] (>=1.32.7,<2.0a.0)"]
crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"]
[[package]]
name = "safetensors"
@ -8063,18 +8082,18 @@ telegram = ["requests"]
[[package]]
name = "traitlets"
version = "5.13.0"
version = "5.14.0"
description = "Traitlets Python configuration system"
optional = false
python-versions = ">=3.8"
files = [
{file = "traitlets-5.13.0-py3-none-any.whl", hash = "sha256:baf991e61542da48fe8aef8b779a9ea0aa38d8a54166ee250d5af5ecf4486619"},
{file = "traitlets-5.13.0.tar.gz", hash = "sha256:9b232b9430c8f57288c1024b34a8f0251ddcc47268927367a0dd3eeaca40deb5"},
{file = "traitlets-5.14.0-py3-none-any.whl", hash = "sha256:f14949d23829023013c47df20b4a76ccd1a85effb786dc060f34de7948361b33"},
{file = "traitlets-5.14.0.tar.gz", hash = "sha256:fcdaa8ac49c04dfa0ed3ee3384ef6dfdb5d6f3741502be247279407679296772"},
]
[package.extras]
docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"]
test = ["argcomplete (>=3.0.3)", "mypy (>=1.6.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"]
test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"]
[[package]]
name = "transformers"
@ -9160,4 +9179,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<3.11"
content-hash = "1930ee9350f8d29899117ba0ffce4eff58b10b99e4d135b8d4ba52bca9e50d88"
content-hash = "48794de9f7fe20b5a736fc0553485501d15ef611a0171a6c065d5a9f64798210"

View file

@ -79,7 +79,7 @@ psycopg = "^3.1.9"
psycopg-binary = "^3.1.9"
fastavro = "^1.8.0"
langchain-experimental = "*"
celery = { extras = ["redis"], version = "^5.3.1", optional = true }
celery = { extras = ["redis"], version = "^5.3.6", optional = true }
redis = { version = "^4.6.0", optional = true }
flower = { version = "^2.0.0", optional = true }
alembic = "^1.12.0"

View file

@ -47,12 +47,3 @@ def build_input_keys_response(langchain_object, artifacts):
input_keys_response["template"] = langchain_object.prompt.template
return input_keys_response
def get_new_key(dictionary, original_key):
counter = 1
new_key = original_key + " (" + str(counter) + ")"
while new_key in dictionary:
counter += 1
new_key = original_key + " (" + str(counter) + ")"
return new_key

View file

@ -170,7 +170,7 @@ async def stream_build(
update_build_status(cache_service, flow_id, BuildStatus.FAILURE)
vertex_id = vertex.parent_node_id if vertex.parent_is_top_level else vertex.id
if vertex_id in graph.top_level_nodes:
if vertex_id in graph.top_level_vertices:
response = {
"valid": valid,
"params": params,

View file

@ -89,6 +89,7 @@ async def process(
if flow.data is None:
raise ValueError(f"Flow {flow_id} has no data")
graph_data = flow.data
task_result = None
if tweaks:
try:
graph_data = process_tweaks(graph_data, tweaks)
@ -160,6 +161,10 @@ async def get_task_status(task_id: str):
result = None
if task.ready():
result = task.result
# If result isinstance of Exception, can we get the traceback?
if isinstance(result, Exception):
logger.exception(task.traceback)
if isinstance(result, dict) and "result" in result:
result = result["result"]
elif hasattr(result, "result"):
@ -167,6 +172,10 @@ async def get_task_status(task_id: str):
if task is None:
raise HTTPException(status_code=404, detail="Task not found")
if task.status == "FAILURE":
result = str(task.result)
logger.error(f"Task {task_id} failed: {task.traceback}")
return TaskStatusResponse(status=task.status, result=result)
@ -237,3 +246,9 @@ async def custom_component_update(
component_node = build_langchain_template_custom_component(component, user_id=user.id, update_field=raw_code.field)
# Update the field
return component_node
# Update the field
return component_node
# Update the field
return component_node
# Update the field
return component_node

View file

@ -3,12 +3,16 @@ import os
langflow_redis_host = os.environ.get("LANGFLOW_REDIS_HOST")
langflow_redis_port = os.environ.get("LANGFLOW_REDIS_PORT")
if "BROKER_URL" in os.environ and "RESULT_BACKEND" in os.environ:
# RabbitMQ
broker_url = os.environ.get("BROKER_URL", "amqp://localhost")
result_backend = os.environ.get("RESULT_BACKEND", "redis://localhost:6379/0")
elif langflow_redis_host and langflow_redis_port:
# broker default user
if langflow_redis_host and langflow_redis_port:
broker_url = f"redis://{langflow_redis_host}:{langflow_redis_port}/0"
result_backend = f"redis://{langflow_redis_host}:{langflow_redis_port}/0"
else:
# RabbitMQ
mq_user = os.environ.get("RABBITMQ_DEFAULT_USER", "langflow")
mq_password = os.environ.get("RABBITMQ_DEFAULT_PASS", "langflow")
broker_url = os.environ.get("BROKER_URL", f"amqp://{mq_user}:{mq_password}@localhost:5672//")
result_backend = os.environ.get("RESULT_BACKEND", "redis://localhost:6379/0")
# tasks should be json or pickle
accept_content = ["json", "pickle"]

View file

@ -22,8 +22,8 @@ class TargetHandle(BaseModel):
class Edge:
def __init__(self, source: "Vertex", target: "Vertex", edge: dict):
self.source_id: str = source.id
self.target_id: str = target.id
self.source_id: str = source.id if source else ""
self.target_id: str = target.id if target else ""
if data := edge.get("data", {}):
self._source_handle = data.get("sourceHandle", {})
self._target_handle = data.get("targetHandle", {})

View file

@ -1,8 +1,6 @@
from typing import Dict, Generator, List, Type, Union
from langchain.chains.base import Chain
from loguru import logger
from langflow.graph.edge.base import Edge
from langflow.graph.graph.constants import lazy_load_vertex_dict
from langflow.graph.graph.utils import process_flow
@ -10,6 +8,7 @@ from langflow.graph.vertex.base import Vertex
from langflow.graph.vertex.types import FileToolVertex, LLMVertex, ToolkitVertex
from langflow.interface.tools.constants import FILE_TOOLS
from langflow.utils import payload
from loguru import logger
class Graph:
@ -71,7 +70,7 @@ class Graph:
def _build_graph(self) -> None:
"""Builds the graph from the vertices and edges."""
self.vertices = self._build_vertices()
self.vertex_ids = [vertex.id for vertex in self.vertices]
self.vertex_map = {vertex.id: vertex for vertex in self.vertices}
self.edges = self._build_edges()
# This is a hack to make sure that the LLM vertex is sent to
@ -108,7 +107,7 @@ class Graph:
def get_vertex(self, vertex_id: str) -> Union[None, Vertex]:
"""Returns a vertex by id."""
return next((vertex for vertex in self.vertices if vertex.id == vertex_id), None)
return self.vertex_map.get(vertex_id)
def get_vertex_edges(self, vertex_id: str) -> List[Edge]:
"""Returns a list of edges for a given vertex."""
@ -154,8 +153,8 @@ class Graph:
if state[vertex] == 0:
state[vertex] = 1
for edge in vertex.edges:
if edge.source == vertex:
dfs(edge.target)
if edge.source_id == vertex.id:
dfs(self.get_vertex(edge.target_id))
state[vertex] = 2
sorted_vertices.append(vertex)
@ -250,3 +249,4 @@ class Graph:
vertex_ids = [vertex.id for vertex in self.vertices]
edges_repr = "\n".join([f"{edge.source_id} --> {edge.target_id}" for edge in self.edges])
return f"Graph:\nNodes: {vertex_ids}\nConnections:\n{edges_repr}"
return f"Graph:\nNodes: {vertex_ids}\nConnections:\n{edges_repr}"

View file

@ -1,7 +1,7 @@
import ast
import inspect
import types
from typing import TYPE_CHECKING, Any, Dict, List, Optional
from typing import TYPE_CHECKING, Any, Coroutine, Dict, List, Optional
from langflow.graph.utils import UnbuiltObject
from langflow.interface.initialize import loading
@ -73,8 +73,8 @@ class Vertex:
self.parent_node_id = state["parent_node_id"]
self.parent_is_top_level = state["parent_is_top_level"]
def set_top_level(self, top_level_nodes: List[str]) -> None:
self.parent_is_top_level = self.parent_node_id in top_level_nodes
def set_top_level(self, top_level_vertices: List[str]) -> None:
self.parent_is_top_level = self.parent_node_id in top_level_vertices
def _parse_data(self) -> None:
self.data = self._data["data"]
@ -245,7 +245,10 @@ class Vertex:
if self.is_task and self.task_id is not None:
task = self.get_task()
result = task.get(timeout=timeout)
if isinstance(result, Coroutine):
result = await result
if result is not None: # If result is ready
self._update_built_object_and_artifacts(result)
return self._built_object

View file

@ -26,7 +26,7 @@ class AgentVertex(Vertex):
def _set_tools_and_chains(self) -> None:
for edge in self.edges:
if not hasattr(edge, "source"):
if not hasattr(edge, "source_id"):
continue
source_node = self.graph.get_vertex(edge.source_id)
if isinstance(source_node, (ToolVertex, ToolkitVertex)):

View file

@ -8,7 +8,6 @@ from uuid import UUID
from cachetools import LRUCache, cached
from fastapi import HTTPException
from langflow.api.utils import get_new_key
from langflow.interface.agents.base import agent_creator
from langflow.interface.chains.base import chain_creator
from langflow.interface.custom.base import custom_component_creator
@ -416,8 +415,8 @@ def build_valid_menu(valid_components):
component_template = build_langchain_template_custom_component(component_extractor)
component_template["output_types"] = component_output_types
full_path = f"{menu_path}/{component.get('file')}"
component_template["full_path"] = full_path
# full_path = f"{menu_path}/{component.get('file')}"
# component_template["full_path"] = full_path
if len(component_output_types) == 1:
component_name = component_output_types[0]
else:
@ -476,15 +475,24 @@ def build_invalid_menu(invalid_components):
return invalid_menu
def get_new_key(dictionary, original_key):
counter = 1
new_key = original_key + " (" + str(counter) + ")"
while new_key in dictionary:
counter += 1
new_key = original_key + " (" + str(counter) + ")"
return new_key
def merge_nested_dicts_with_renaming(dict1, dict2):
for key, value in dict2.items():
if key in dict1 and isinstance(value, dict) and isinstance(dict1.get(key), dict):
for sub_key, sub_value in value.items():
if sub_key in dict1[key]:
new_key = get_new_key(dict1[key], sub_key)
dict1[key][new_key] = sub_value
else:
dict1[key][sub_key] = sub_value
# if sub_key in dict1[key]:
# new_key = get_new_key(dict1[key], sub_key)
# dict1[key][new_key] = sub_value
# else:
dict1[key][sub_key] = sub_value
else:
dict1[key] = value
return dict1

View file

@ -1,7 +1,7 @@
import asyncio
import json
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple, Union
from typing import Any, Coroutine, Dict, List, Optional, Tuple, Union
from langchain.chains.base import Chain
from langchain.schema import AgentAction, Document
@ -138,6 +138,8 @@ def generate_result(langchain_object: Union[Chain, VectorStore], inputs: dict):
result = langchain_object.dict()
else:
logger.warning(f"Unknown langchain_object type: {type(langchain_object)}")
if isinstance(langchain_object, Coroutine):
result = asyncio.run(langchain_object)
result = langchain_object
return result

View file

@ -1,24 +1,23 @@
import base64
import contextlib
import functools
import hashlib
import os
import tempfile
from collections import OrderedDict
from pathlib import Path
from typing import TYPE_CHECKING, Any, Dict
from platformdirs import user_cache_dir
from fastapi import UploadFile
from langflow.api.v1.schemas import BuildStatus
from langflow.services.database.models.base import orjson_dumps
from platformdirs import user_cache_dir
if TYPE_CHECKING:
pass
from langflow.api.v1.schemas import BuildStatus
CACHE: Dict[str, Any] = {}
CACHE_DIR = user_cache_dir("langflow", "langflow")
PREFIX = "langflow_cache"
def create_cache_folder(func):
def wrapper(*args, **kwargs):
@ -33,50 +32,6 @@ def create_cache_folder(func):
return wrapper
def memoize_dict(maxsize=128):
cache = OrderedDict()
hash_to_key = {} # Mapping from hash to cache key
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
hashed = compute_dict_hash(args[0])
key = (func.__name__, hashed, frozenset(kwargs.items()))
if key not in cache:
result = func(*args, **kwargs)
cache[key] = result
hash_to_key[hashed] = key # Store the mapping
if len(cache) > maxsize:
oldest_key = next(iter(cache))
oldest_hash = oldest_key[1]
del cache[oldest_key]
del hash_to_key[oldest_hash]
else:
result = cache[key]
wrapper.session_id = hashed # Store hash in the wrapper
return result
def clear_cache():
cache.clear()
hash_to_key.clear()
def get_result_by_session_id(session_id):
key = hash_to_key.get(session_id)
return cache.get(key) if key is not None else None
wrapper.clear_cache = clear_cache # type: ignore
wrapper.get_result_by_session_id = get_result_by_session_id # type: ignore
wrapper.hash = None
wrapper.cache = cache # type: ignore
return wrapper
return decorator
PREFIX = "langflow_cache"
@create_cache_folder
def clear_old_cache_files(max_cache_size: int = 3):
cache_dir = Path(tempfile.gettempdir()) / PREFIX
@ -90,14 +45,6 @@ def clear_old_cache_files(max_cache_size: int = 3):
os.remove(cache_file)
def compute_dict_hash(graph_data):
graph_data = filter_json(graph_data)
cleaned_graph_json = orjson_dumps(graph_data, sort_keys=True)
return hashlib.sha256(cleaned_graph_json.encode("utf-8")).hexdigest()
def filter_json(json_data):
filtered_data = json_data.copy()
@ -203,9 +150,11 @@ def save_uploaded_file(file: UploadFile, folder_name):
return file_path
def update_build_status(cache_service, flow_id: str, status: BuildStatus):
def update_build_status(cache_service, flow_id: str, status: "BuildStatus"):
cached_flow = cache_service[flow_id]
if cached_flow is None:
raise ValueError(f"Flow {flow_id} not found in cache")
cached_flow["status"] = status
cache_service[flow_id] = cached_flow
cached_flow["status"] = status
cache_service[flow_id] = cached_flow

View file

@ -2,8 +2,7 @@ from typing import TYPE_CHECKING
from langflow.interface.run import build_sorted_vertices
from langflow.services.base import Service
from langflow.services.cache.utils import compute_dict_hash
from langflow.services.session.utils import session_id_generator
from langflow.services.session.utils import compute_dict_hash, session_id_generator
if TYPE_CHECKING:
from langflow.services.cache.base import BaseCacheService

View file

@ -1,6 +1,18 @@
import hashlib
import random
import string
from langflow.services.cache.utils import filter_json
from langflow.services.database.models.base import orjson_dumps
def session_id_generator(size=6):
return "".join(random.SystemRandom().choices(string.ascii_uppercase + string.digits, k=size))
def compute_dict_hash(graph_data):
graph_data = filter_json(graph_data)
cleaned_graph_json = orjson_dumps(graph_data, sort_keys=True)
return hashlib.sha256(cleaned_graph_json.encode("utf-8")).hexdigest()

View file

@ -1,10 +1,11 @@
from typing import Any, Callable, Coroutine, Union
from langflow.utils.logger import configure
from loguru import logger
from langflow.services.base import Service
from langflow.services.task.backends.anyio import AnyIOBackend
from langflow.services.task.backends.base import TaskBackend
from langflow.services.task.utils import get_celery_worker_status
from langflow.utils.logger import configure
from loguru import logger
def check_celery_availability():
@ -60,7 +61,11 @@ class TaskService(Service):
if not hasattr(task_func, "apply"):
raise ValueError(f"Task function {task_func} does not have an apply method")
task = task_func.apply(args=args, kwargs=kwargs)
result = task.get()
# if result is coroutine
if isinstance(result, Coroutine):
result = await result
return task.id, result
async def launch_task(self, task_func: Callable[..., Any], *args: Any, **kwargs: Any) -> Any:
@ -71,3 +76,6 @@ class TaskService(Service):
def get_task(self, task_id: Union[int, str]) -> Any:
return self.backend.get_task(task_id)
def get_task(self, task_id: Union[int, str]) -> Any:
return self.backend.get_task(task_id)

View file

@ -235,6 +235,12 @@ def build_class_constructor(compiled_class, exec_globals, class_name):
:param class_name: Name of the class
:return: Constructor function for the class
"""
# Add basic imports from typing module
# List, Dict, Tuple, Union, Optional
# to the global scope
for name in ["List", "Dict", "Tuple", "Union", "Optional"]:
exec_globals[name] = getattr(importlib.import_module("typing"), name)
exec(compiled_class, exec_globals, locals())
exec_globals[class_name] = locals()[class_name]

View file

@ -6,6 +6,8 @@ from langflow.core.celery_app import celery_app
from langflow.processing.process import Result, generate_result, process_inputs
from langflow.services.deps import get_session_service
from langflow.services.manager import initialize_session_service
from loguru import logger
from rich import print
if TYPE_CHECKING:
from langflow.graph.vertex.base import Vertex
@ -36,19 +38,38 @@ def process_graph_cached_task(
clear_cache=False,
session_id=None,
) -> Dict[str, Any]:
initialize_session_service()
session_service = get_session_service()
if clear_cache:
session_service.clear_session(session_id)
if session_id is None:
session_id = session_service.generate_key(session_id=session_id, data_graph=data_graph)
# Load the graph using SessionService
graph, artifacts = async_to_sync(session_service.load_session)(session_id, data_graph)
built_object = graph.build()
processed_inputs = process_inputs(inputs, artifacts)
result = generate_result(built_object, processed_inputs)
# langchain_object is now updated with the new memory
# we need to update the cache with the updated langchain_object
session_service.update_session(session_id, (graph, artifacts))
try:
initialize_session_service()
session_service = get_session_service()
return Result(result=result, session_id=session_id).model_dump()
if clear_cache:
session_service.clear_session(session_id)
if session_id is None:
session_id = session_service.generate_key(session_id=session_id, data_graph=data_graph)
# Use async_to_sync to handle the asynchronous part of the session service
session_data = async_to_sync(session_service.load_session, force_new_loop=True)(session_id, data_graph)
logger.warning(f"session_data: {session_data}")
graph, artifacts = session_data if session_data else (None, None)
if not graph:
raise ValueError("Graph not found in the session")
# Use async_to_sync for the asynchronous build method
built_object = async_to_sync(graph.build, force_new_loop=True)()
logger.debug(f"Built object: {built_object}")
processed_inputs = process_inputs(inputs, artifacts or {})
result = generate_result(built_object, processed_inputs)
# Update the session with the new data
session_service.update_session(session_id, (graph, artifacts))
result_object = Result(result=result, session_id=session_id).model_dump()
print(f"Result object: {result_object}")
return result_object
except Exception as e:
logger.error(f"Error in process_graph_cached_task: {e}")
# Handle the exception as needed, maybe re-raise or return an error message
raise

View file

@ -16,7 +16,7 @@ const buttonVariants = cva(
primary:
"border bg-background text-secondary-foreground hover:bg-background/80 dark:hover:bg-background/10 hover:shadow-sm",
secondary:
"border border-muted bg-muted text-secondary-foreground hover:bg-secondary/80",
"border border-muted bg-muted text-muted-foreground hover:bg-secondary/80",
ghost: "hover:bg-accent hover:text-accent-foreground",
link: "underline-offset-4 hover:underline text-primary",
},

View file

@ -308,7 +308,7 @@ export function FlowsProvider({ children }: { children: ReactNode }) {
let text = await file.text();
let fileData = JSON.parse(text);
if (
(fileData.is_component === undefined && isComponent === true) ||
(!fileData.is_component && isComponent === true) ||
(fileData.is_component !== undefined &&
fileData.is_component !== isComponent)
) {
@ -339,11 +339,10 @@ export function FlowsProvider({ children }: { children: ReactNode }) {
let fileData: FlowType = await JSON.parse(text);
console.log(isComponent, fileData);
if (fileData.is_component === undefined) {
reject("Your file doesn't have the is_component property.");
} else if (
fileData.is_component !== undefined &&
fileData.is_component !== isComponent
if (
(!fileData.is_component && isComponent === true) ||
(fileData.is_component !== undefined &&
fileData.is_component !== isComponent)
) {
reject("You cannot upload a component as a flow or vice versa");
} else {

View file

@ -1,5 +1,6 @@
import { InfinityIcon } from "lucide-react";
import { InfinityIcon, Terminal, Code } from "lucide-react";
import { forwardRef } from "react";
import ForwardedIconComponent from "../../components/genericIconComponent";
export const GradientInfinity = forwardRef<
SVGSVGElement,
@ -15,7 +16,52 @@ export const GradientInfinity = forwardRef<
</linearGradient>
</defs>
</svg>
<InfinityIcon stroke="url(#grad1)" ref={ref} {...props} />
<Code stroke="url(#grad1)" ref={ref} {...props} />
</>
);
});
export const GradientSave = forwardRef<
SVGSVGElement,
React.PropsWithChildren<{}>
>((props, ref) => {
return (
<>
<svg width="0" height="0" style={{ position: "absolute" }}>
<defs>
<linearGradient id="grad2" x1="0%" y1="0%" x2="100%" y2="0%">
<stop className="gradient-start" offset="0%" />
<stop className="gradient-end" offset="100%" />
</linearGradient>
</defs>
</svg>
<ForwardedIconComponent
name="Save"
stroke="url(#grad2)"
ref={ref}
{...props}
/>
</>
);
});
export const GradientGroup = (props) => {
return (
<>
<svg width="0" height="0" style={{ position: "absolute" }}>
<defs>
<linearGradient id="grad3" x1="0%" y1="0%" x2="100%" y2="0%">
<stop className="gradient-start" offset="0%" />
<stop className="gradient-end" offset="100%" />
</linearGradient>
</defs>
</svg>
<ForwardedIconComponent
name="Combine"
stroke="url(#grad3)"
{...props}
/>
</>
);
};

View file

@ -29,6 +29,7 @@ function ConfirmationModal({
cancelText,
confirmationText,
children,
destructive = false,
icon,
data,
index,
@ -56,9 +57,9 @@ function ConfirmationModal({
);
return (
<BaseModal size={size ?? "x-small"} open={modalOpen} setOpen={setModalOpen}>
<BaseModal size={size} open={modalOpen} setOpen={setModalOpen}>
<BaseModal.Trigger asChild={asChild}>{triggerChild}</BaseModal.Trigger>
<BaseModal.Header description={titleHeader}>
<BaseModal.Header description={titleHeader ?? null}>
<span className="pr-2">{title}</span>
<Icon
name="icon"
@ -67,7 +68,7 @@ function ConfirmationModal({
/>
</BaseModal.Header>
<BaseModal.Content>
{modalContentTitle != "" && (
{modalContentTitle && modalContentTitle != "" && (
<>
<strong>{modalContentTitle}</strong>
<br></br>
@ -78,7 +79,8 @@ function ConfirmationModal({
<BaseModal.Footer>
<Button
className="ml-3 mt-5"
className="ml-3"
variant={destructive ? "destructive" : "default"}
onClick={() => {
setModalOpen(false);
onConfirm(index, data);
@ -88,7 +90,7 @@ function ConfirmationModal({
</Button>
<Button
className="mt-5"
className=""
variant="outline"
onClick={() => {
if (onCancel) onCancel();

View file

@ -41,7 +41,7 @@ export default function DeleteConfirmationModal({
</span>
<DialogFooter>
<DialogClose>
<Button className="mr-3">Cancel</Button>
<Button className="mr-3" variant="outline">Cancel</Button>
<Button
type="submit"

View file

@ -99,7 +99,7 @@ function BaseModal({
switch (size) {
case "x-small":
minWidth = "min-w-[20vw]";
height = "h-[10vh]";
height = " ";
break;
case "smaller":
minWidth = "min-w-[40vw]";
@ -150,7 +150,7 @@ function BaseModal({
<div className="truncate-doubleline word-break-break-word">
{headerChild}
</div>
<div className={`mt-2 flex flex-col ${height!} w-full `}>
<div className={`flex flex-col ${height!} w-full `}>
{ContentChild}
</div>
{ContentFooter && (

View file

@ -1,6 +1,7 @@
import { useEffect, useState } from "react";
import { NodeToolbar } from "reactflow";
import IconComponent from "../../../../components/genericIconComponent";
import { GradientGroup } from "../../../../icons/GradientSparkles";
export default function SelectionMenu({ onClick, nodes, isVisible }) {
const [isOpen, setIsOpen] = useState(false);
const [isTransitioning, setIsTransitioning] = useState(false);
@ -37,15 +38,15 @@ export default function SelectionMenu({ onClick, nodes, isVisible }) {
<div className="h-10 w-28 overflow-hidden">
<div
className={
"h-10 w-24 rounded-md border border-indigo-300 bg-white px-2.5 text-gray-700 shadow-inner transition-all duration-500 ease-in-out dark:bg-gray-800 dark:text-gray-300" +
(isTransitioning ? " translate-y-0" : " translate-y-10")
"h-10 w-24 rounded-md border border-indigo-300 bg-white px-2.5 text-gray-700 shadow-inner transition-all duration-400 ease-in-out dark:bg-gray-800 dark:text-gray-300" +
(isTransitioning ? " opacity-100" : " opacity-0 ")
}
>
<button
className="flex h-full w-full items-center justify-between text-sm hover:text-indigo-500"
onClick={onClick}
>
<IconComponent name="Group" className="w-6" />
<GradientGroup strokeWidth={1.5} size={22} className="text-primary" />
Group
</button>
</div>

View file

@ -343,7 +343,11 @@ export default function ExtraSidebar(): JSX.Element {
return -1;
} else if (b.toLowerCase() === "saved_components") {
return 1;
} else {
} else if (a.toLowerCase() === "custom_components") {
return -2
} else if (b.toLowerCase() === "custom_components") {
return 2
}else {
return a.localeCompare(b);
}
})

View file

@ -277,11 +277,10 @@ export default function NodeToolbarComponent({
<ConfirmationModal
asChild
open={showOverrideModal}
title={`Replace ${data.node?.display_name}`}
titleHeader={`Please, confirm your save actions`}
modalContentTitle="Attention!"
cancelText="New"
title={`Replace`}
cancelText="Create New"
confirmationText="Replace"
size={"x-small"}
icon={"SaveAll"}
index={6}
onConfirm={(index, user) => {
@ -292,8 +291,7 @@ export default function NodeToolbarComponent({
>
<ConfirmationModal.Content>
<span>
It seems {data.node?.display_name} already exists. Replacing it
will switch the current component. Proceed with replacement?
It seems {data.node?.display_name} already exists. Do you want to replace it with the current or create a new one?
</span>
</ConfirmationModal.Content>
<ConfirmationModal.Trigger>

View file

@ -95,47 +95,49 @@ export default function ComponentsComponent({
>
<div className="flex h-full w-full flex-col justify-between">
<div className="flex w-full flex-col gap-4">
<div className="grid w-full gap-4 md:grid-cols-2 lg:grid-cols-2">
{!isLoading || data?.length > 0 ? (
data?.map((item, idx) => (
<CollectionCardComponent
onDelete={() => {
removeFlow(item.id);
}}
key={idx}
data={item}
disabled={isLoading}
button={
!is_component ? (
<Button
variant="outline"
size="sm"
className="whitespace-nowrap "
onClick={() => {
navigate("/flow/" + item.id);
}}
>
<IconComponent
name="ExternalLink"
className="main-page-nav-button"
/>
Edit Flow
</Button>
) : (
<></>
)
}
/>
))
) : !isLoading && data?.length === 0 ? (
<>You haven't created {name}s yet.</>
) : (
<>
<SkeletonCardComponent />
<SkeletonCardComponent />
</>
)}
</div>
{!isLoading && data.length === 0 ? (
<div className="w-full text-center">You haven't created any {name}s yet.</div>
) : (
<div className="grid w-full gap-4 md:grid-cols-2 lg:grid-cols-2">
{!isLoading || data?.length > 0 ? (
data?.map((item, idx) => (
<CollectionCardComponent
onDelete={() => {
removeFlow(item.id);
}}
key={idx}
data={item}
disabled={isLoading}
button={
!is_component ? (
<Button
variant="outline"
size="sm"
className="whitespace-nowrap "
onClick={() => {
navigate("/flow/" + item.id);
}}
>
<IconComponent
name="ExternalLink"
className="main-page-nav-button"
/>
Edit Flow
</Button>
) : (
<></>
)
}
/>
))
) : (
<>
<SkeletonCardComponent />
<SkeletonCardComponent />
</>
)}
</div>
)}
</div>
{!isLoading && allData.length > 0 && (
<div className="relative py-6">

View file

@ -283,9 +283,10 @@ export type PaginatorComponentType = {
export type ConfirmationModalType = {
onCancel?: () => void;
title: string;
titleHeader: string;
titleHeader?: string;
asChild?: boolean;
modalContentTitle: string;
destructive?: boolean;
modalContentTitle?: string;
cancelText: string;
confirmationText: string;
children: [

View file

@ -99,6 +99,8 @@ import {
X,
XCircle,
Zap,
Combine,
TerminalIcon
} from "lucide-react";
import { FaApple, FaGithub } from "react-icons/fa";
import { AWSIcon } from "../icons/AWS";
@ -111,7 +113,7 @@ import { EvernoteIcon } from "../icons/Evernote";
import { FBIcon } from "../icons/FacebookMessenger";
import { GitBookIcon } from "../icons/GitBook";
import { GoogleIcon } from "../icons/Google";
import { GradientInfinity } from "../icons/GradientSparkles";
import { GradientInfinity, GradientSave } from "../icons/GradientSparkles";
import { HuggingFaceIcon } from "../icons/HuggingFace";
import { IFixIcon } from "../icons/IFixIt";
import { MetaIcon } from "../icons/Meta";
@ -260,7 +262,7 @@ export const nodeIconsLucide: iconsType = {
advanced: Laptop2,
chat: MessageCircle,
embeddings: Fingerprint,
saved_components: Save,
saved_components: GradientSave,
documentloaders: Paperclip,
vectorstores: Layers,
toolkits: Hammer,
@ -356,4 +358,6 @@ export const nodeIconsLucide: iconsType = {
Link,
ToyBrick,
RefreshCcw,
Combine,
TerminalIcon,
};

View file

@ -1,7 +1,7 @@
import json
from langflow.graph import Graph
import pytest
from langflow.graph import Graph
def get_graph(_type="basic"):
@ -41,5 +41,5 @@ def langchain_objects_are_equal(obj1, obj2):
def test_build_graph(client, basic_data_graph):
graph = Graph.from_payload(basic_data_graph)
assert graph is not None
assert len(graph.vertices) == len(basic_data_graph["nodes"])
assert len(graph.nodes) == len(basic_data_graph["nodes"])
assert len(graph.edges) == len(basic_data_graph["edges"])

View file

@ -1,18 +1,17 @@
from collections import namedtuple
import time
import uuid
from langflow.processing.process import Result
from langflow.services.auth.utils import get_password_hash
from langflow.services.database.models.api_key.model import ApiKey
from langflow.services.deps import get_settings_service
from langflow.services.database.utils import session_getter
from langflow.services.deps import get_db_service
from collections import namedtuple
import pytest
from fastapi.testclient import TestClient
from langflow.interface.tools.constants import CUSTOM_TOOLS
from langflow.processing.process import Result
from langflow.services.auth.utils import get_password_hash
from langflow.services.database.models.api_key.model import ApiKey
from langflow.services.database.utils import session_getter
from langflow.services.deps import get_db_service, get_settings_service
from langflow.template.frontend_node.chains import TimeTravelGuideChainNode
import time
def run_post(client, flow_id, headers, post_data):
response = client.post(
@ -25,7 +24,7 @@ def run_post(client, flow_id, headers, post_data):
# Helper function to poll task status
def poll_task_status(client, headers, href, max_attempts=20, sleep_time=1):
def poll_task_status(client, headers, href, max_attempts=20, sleep_time=2):
for _ in range(max_attempts):
task_status_response = client.get(
href,

View file

@ -81,8 +81,8 @@ def test_graph_structure(basic_graph):
assert isinstance(node, Vertex)
for edge in basic_graph.edges:
assert isinstance(edge, Edge)
assert edge.source_id in basic_graph.vertex_ids
assert edge.target_id in basic_graph.vertex_ids
assert edge.source_id in basic_graph.vertex_map.keys()
assert edge.target_id in basic_graph.vertex_map.keys()
def test_circular_dependencies(basic_graph):

View file

@ -1,9 +1,10 @@
import json
import pytest
from langchain.chains.base import Chain
from langflow.processing.process import load_flow_from_json
from langflow.graph import Graph
from langflow.utils.payload import get_root_node
from langflow.processing.process import load_flow_from_json
from langflow.utils.payload import get_root_vertex
def test_load_flow_from_json():
@ -22,14 +23,15 @@ def test_load_flow_from_json_with_tweaks():
assert loaded.llm.model_name == "test model"
def test_get_root_node():
def test_get_root_vertex():
with open(pytest.BASIC_EXAMPLE_PATH, "r") as f:
flow_graph = json.load(f)
data_graph = flow_graph["data"]
nodes = data_graph["nodes"]
edges = data_graph["edges"]
graph = Graph(nodes, edges)
root = get_root_node(graph)
root = get_root_vertex(graph)
assert root is not None
assert hasattr(root, "id")
assert hasattr(root, "data")
assert hasattr(root, "data")